#!/usr/bin/env python

# $Id: getLatency.py,v 1.4 2021/03/04 17:25:01 stuart Exp $

# One of our product specifications is position solution latency. This
# script gives an indication of the solution latency. In the I/O library
# the NMEA:ZDA message will provide the currrent time with a resolution
# of 1ms. Therefore to get an estimate of the latency for years we have
# enabled GGA and ZDA, ZDA is generated after the GGA. Therefore if you
# subtract the GGA time (time of position) from ZDA time (time the ZDA
# message was generated which is very soon after the GGA message is
# generated) it will give an indicate of the latecny. This does not 
# captured any further delays after the IOTX task, for example it does
# not capture delays in the drivers or any physical layer delays
#
# The code has been written as a class. However, the script will work
# standalone. The class structure was used and the general structure of
# "replayCorr.py" used so that a python layer can be added on top of
# this file to in parallel monitor multiple receivers.
#
# pyShark "pcap" based timing
# ---------------------------
#   If the argument "--pyshark_interface ETH_IFACE_NAME" is included in
#   the command line, NMEA arrival times will be based on pcap timing
#   rather than the Python system clock. This significantly improves
#   timing accuracy and eliminates outliers related to system load or
#   periodic I/O performed in this script.
#
#   Notes:
#     - Use "ifconfig" to list Ethernet interfaces.
#     - Currently there must be no other TCP/IP client to the NMEA stream
#       from the receiver to the same host because filtering is done based
#       on remote IP:PORT, and packets for both clients will be processed
#       by this script.
#     - This feature has only been tested on Linux.
#
# Copyright Trimble Inc., 2019-2020
#

usage="""\
To use:
  provide the socket information
  provide the required frequency rate (will setup NMEA)
  optionally provide the prefix to generate more unique file names

  Important: The port must be read/write.
"""

import matplotlib
# Allow running headless from the command line
matplotlib.use("agg")

from numpy import *
from pylab import *

import datetime
#from datetime import timezone
import serial
import argparse
import leapseconds
import subprocess as sp
import re
import sys
import math
import RXTools
import time
import os.path
import signal
import zipfile
import bisect
import threading
import pyshark
import queue
import psutil

# Make the object used for latency measurements global so it is
# available from within signal_handler() to perform an orderly shutdown.
rc = None

def signal_handler(received_signal, frame):
  global rc
  print('You pressed Ctrl+C!')
  if rc is not None:
    print("Shutting down...")
    rc.stop()

    if not sys.platform == 'win32':
      print("Killing child processes")
      p = psutil.Process( os.getpid() )
      for child in p.children(recursive=True):
        os.kill( child.pid, signal.SIGTERM )

  print("Exiting")
  sys.exit(0)

class pySharkLine():
  '''Object used to return a timestamped line of NMEA. These lines are
      returned by pySharkReader().getLine() called from parse_nmea().
  '''
  def __init__(self, line, timestamp):
    self.line      = line
    self.timestamp = timestamp

class pySharkReader():
  '''This class runs the thread for the interface to pyShark and provides
     timestamped lines to the application.
       pyshark_pktCallback() - Handles packets from pyShark.
       pyshark_pktThread() - Used as thread function to run interface.
       getLine() - Returns timestamped NMEA lines to parse_nmea().
  '''
  def __init__(self, interface, host, port):
    self.interface = interface
    self.host      = host
    self.port      = port

    # Received packets are submitted to this synchronous Queue for extraction
    # when required.
    self.pktQueue = queue.Queue()

    self.bufferedLines = []

    ###print("pyshark: Start RemoteCapture( %s %d ) ..." % ( host, port ))
    ###self.capture = pyshark.RemoteCapture( host,
    ###                                      self.interface,
    ###                                      remote_port = port )

    # Set the filter to be applied to pyShark packets.
    pysharkFilter = 'src host %s and tcp port %d' % ( host, port )
    print( 'pyshark: Start LiveCapture( "%s", "%s")' %
           ( self.interface, pysharkFilter ) )

    # Start live capture with that filter.
    self.capture = pyshark.LiveCapture( self.interface,
                                        bpf_filter = pysharkFilter )
    ###print("pyshark: sniff() ...")
    ###self.capture.sniff( timeout=50 )

    print("pyshark: Create callback thread ...")
    self.pkt_thread = threading.Thread( target=self.pyshark_pktThread )
    self.pkt_thread.start()

    print("pyshark: Initialized.")

  def pyshark_pktCallback(self, pkt):
    '''Callback used by extern pyShark module to pass timestamped packets
       into the application.
    '''
    # Double check host/port before queueing.
    if pkt.ip.src == self.host and int(pkt.tcp.port) == self.port:
      #print( 'packetHandler(): enqueue OK' )
      self.pktQueue.put( pkt )

  def pyshark_pktThread(self):
    '''Thread to simply call module implementing the interface to pyShark
       and pass pyshark_pktCallback() as the callback.'''

    # Run the packet handler call back function. It seems like this would
    # set an asynchronous callback function, but apply_on_packets() just
    # blocks as long as packets are arriving while invoking the callback.
    # Note that there is also a "timeout" parameter that could be set, but
    # this just terminates blocking by throwing a Timeout exception after
    # the specified interval. No timeout is set here so that this function
    # just runs continuously until the script is shut down.

    self.capture.apply_on_packets( self.pyshark_pktCallback )

  def getLine(self):
    '''Called from parse_nmea() to convert queued packets into timestamped
       NMEA lines returned as pySharkLine() objects.'''

    # If we have no buffered lines, read a packet from the queue and process.
    while len(self.bufferedLines) < 1:
      pkt = self.pktQueue.get(block=True)

      if pkt.tcp.payload is not None:
        # Extract the CR-LF terminated lines from each packet and store
        # in bufferedLines() along with the timetag.

        # Encode to ASCII bytes then decode to string for processing.
        payloadStr = pkt.tcp.payload.encode('ascii').decode('ascii')

        # This data is a colon-separated list of 2-digit hex characters
        # for each byte. Convert these and store in "buf" and interpret
        # the LF character as the end-of-line.

        # Buffer to contain bytes in a single CR-LF terminated string.
        buf = bytearray()

        # Scan through colon-separated hex values.
        for s in payloadStr.split(':'):
          c = int(s, 16)
          buf.append( c )

          # Terminated on the LF character.
          if c == 0x0A:
            # Append with 
            timestamp = pkt.sniff_time.timestamp()
            self.bufferedLines.append( pySharkLine( buf, timestamp ) )

            # Clear the buffer for the next line.
            buf = bytearray()

    # Pop the first buffered line from the list and return.
    p = self.bufferedLines.pop(0)
    return p.line, p.timestamp

class getLatency(object):
    count = 0
    oldTime = 'XX'
    def __init__(self, port, prefix=None, log=False, freq=0,
                       start_nmea_output=False, pyshark_interface=None ):
        '''
        port = Where to send data, e.g. /dev/ttyS0 or socket://10.1.150.x:5018
        freq = Freqency of NMEA in Hz
        Optional:
          prefix - prefix to generated files
          log    - True/False - enables logging of NMEA ZDA/GGA to a
                   file. Note the file rolls over every hour
        %s
        ''' % usage

        # Option to enable sending commands to start NMEA output when
        # TCP/IP port is 2-way.
        self.start_nmea_output = start_nmea_output

        # A few variables related only to pyshark timing capture.
        self.pySharkReader     = None               # Capture buffering object
        self.pySharkInterface  = pyshark_interface  # Name of eth interface
        self.capture           = None               # pyshark capture object
        self.io_thread         = None               # Dummy I/O thread
        self.io_thread_enabled = False              # Enables dummy I/O thread
        self.pkt_thread        = None               # pyshark callback thread
        
        self.freq = int(freq)
        self.msPerEpoch = 1000/self.freq
        self.minValue = [99999] * self.freq
        self.maxValue = [0] * self.freq
        self.sumValue = [0] * self.freq
        self.numInSum = [0] * self.freq
        # Storage array for all logged data as a function of epoch within
        # the second. We'll use bisect to keep this sorted so we can easily
        # extract the percentiles
        self.history = [[] for x in range(self.freq)] 

        if(prefix is not None):
          self.prefix = prefix
        else:
          self.prefix = ''
        self.summary_fid = open(self.prefix + 'Summary.txt','a')
        self.hist_fid = open(self.prefix + 'Hist.txt','a')

        self.port = port

        if(log == True):
          now = datetime.datetime.utcnow()
          self.logHour = now.hour

          self.NMEA_fid_root = (  self.prefix+ '-' 
                                 + str(now.year) 
                                 + str(now.month).zfill(2) 
                                 + str(now.day).zfill(2) 
                                 + str(now.hour).zfill(2) + '-NMEA')

          self.NMEA_fid = open(self.NMEA_fid_root + '.txt','wb')
        else:
          self.NMEA_fid = None

        self.s = serial.serial_for_url(port,timeout=5)
        self.s_nmea = None
        self.rcvr_port_idx = -1
        self.FileHandle = None
        self.FilePrefix = None


    def plotLatency(self,filename):
      fig=figure()
      ax=fig.add_subplot(111)

      sixtyEight = []
      ninetyFive = []
      for i in range(self.freq):
        # history is already sorted, but is in ms - convert to seconds
        sixtyEight.append(float(self.history[i][int(len(self.history[i])*0.68)]) / 1000.0)
        ninetyFive.append(float(self.history[i][int(len(self.history[i])*0.68)]) / 1000.0)

      plot( array(range(0,self.freq)) * self.msPerEpoch, array(self.minValue)-0.0002,'gx',  label='Min Latency')
      plot( array(range(0,self.freq)) * self.msPerEpoch, array(self.maxValue)+0.0002,'rx',  label='Max Latency')
      plot( array(range(0,self.freq)) * self.msPerEpoch, array(self.sumValue) / array(self.numInSum),'bx',  label='Avg Latency')
      plot( array(range(0,self.freq)) * self.msPerEpoch, array(sixtyEight)-0.0001,'cx',  label='68%')
      plot( array(range(0,self.freq)) * self.msPerEpoch, array(ninetyFive)+0.0001,'mx',  label='95%')
   
      ax.set_xlim([0,1000])
      ydata = list(ax.get_ylim())
      ydata[0] = 0
      ax.set_ylim(ydata)
      xlabel('Millisecond Bin')
      ylabel('Latency [s]')
      grid(True)
      legend()
      tight_layout()
      # Prevent the axis numers having an offset
      ax.get_xaxis().get_major_formatter().set_useOffset(False)
      ax.get_yaxis().get_major_formatter().set_useOffset(False)
      show()
      # Save the data as a PNG file
      savefig(filename + '.png',dpi=150)
      close()




    def checksumtest(self,s):
        '''Returns True if NMEA input 's' passes its checksum'''
        tokens = s.split(b'*')
        if len(tokens) >= 2:
            cs1 = int(tokens[1][:2].decode(),16)
            cs2 = 0
            for val in tokens[0][1:]:
                cs2 ^= val
        return (cs1 == cs2)

    def NMEA_to_gps_secs(self,s,processDate):
        '''Converts an NMEA string 's' to full GPS seconds since the start of GPS
           When the data rolls into a new hour save some diagnostics
        '''
        #if not re.match(b'\$G.ZDA.*',s):
        #    return -1.
        #if not self.checksumtest(s):
        #    return -1.

        tokens = s.split(b',')
        if len(tokens) < 5:
            return -1.
        if len(tokens[2]) == 0:
            return -1.

        if(processDate == True):
          time_str = bytearray(b',').join(tokens[1:5]).decode('ascii')
          utc_date = datetime.datetime.strptime(time_str,'%H%M%S.%f,%d,%m,%Y')
        else:
          now = datetime.datetime.utcnow()
          time_str = tokens[1].decode('ascii') + ' ' + str(now.year) + ' ' + str(now.month) + ' ' + str(now.day)
          utc_date = datetime.datetime.strptime(time_str,'%H%M%S.%f %Y %m %d')

        # convert to GPS time
        gps_epoch = datetime.datetime(1980,1,6)
        gps_time = leapseconds.utc_to_gps(utc_date) - gps_epoch

        # Test to see if we are processing full date (ZDA) and the time
        # has bumped to the next hour. If so plot the data and write
        # information to the summary file
        if( (processDate == True) and (time_str[0:2] != self.oldTime)):
          self.oldTime = time_str[0:2]
          if(self.numInSum[0] > 0):
            date = str(utc_date.year) + '-' + str(utc_date.month).zfill(2) + '-' + str(utc_date.day).zfill(2) + 'T' + str(utc_date.hour).zfill(2) 
            self.plotLatency(self.prefix + '-' + date + '-Latency-' + str(self.freq) + 'Hz')
            self.summary_fid.write("%d %d %d %d %d %d " % (utc_date.year, utc_date.month, utc_date.day, utc_date.hour, min(self.numInSum), self.freq))
            for i in range(self.freq):
              if(self.numInSum[i] > 0):
                sixtyEight = float(self.history[i][int(len(self.history[i])*0.68)]) / 1000.0
                ninetyFive = float(self.history[i][int(len(self.history[i])*0.95)]) / 1000.0
                self.summary_fid.write("%.3f %.3f %.5f %.5f %.5f " % (self.minValue[i],
                                                                      self.maxValue[i],
                                                                      self.sumValue[i]/self.numInSum[i],
                                                                      sixtyEight,
                                                                      ninetyFive))
              else:
                self.summary_fid.write("%.3f %.3f NaN NaN NaN " % (self.minValue[i],self.maxValue[i]))
              
              self.minValue[i] = 9999
              self.maxValue[i] = 0
              self.sumValue[i] = 0
              self.numInSum[i] = 0
            self.summary_fid.write("\n")
            self.summary_fid.flush()

            for i in range(self.freq):
              # Now output the data as a histogram
              #
              # Columns:
              #  Year
              #  Month
              #  Day
              #  Hour
              #  Epoch within the second
              #  Num epochs per second (e.g. 20 for 20Hz data)
              #  Num of data points for this line of data (written later)
              #  Data from ms 0, the last entry is for everything over 100ms
              #
              self.hist_fid.write("%d %d %d %d %d %d " % (utc_date.year, utc_date.month, utc_date.day, utc_date.hour, i, self.freq))
              dataBin = [0] * 102
              dataLen = len(self.history[i])
              dataArr = np.array(self.history[i])

              for j in range(0,101):
                idx = np.where(dataArr == j)
                dataBin[j] += len(idx[0])

              # Above we checked for bins 0, 100 inclusive. Now
              # check for any data beyond that range and write to the 
              # last element of the list
              idx = np.where(dataArr > 100)
              dataBin[101] += len(idx[0])

              # Clear the history
              self.history[i] = []
            
              # dataLen should equal the sum of what follows in the output
              self.hist_fid.write("%d  " % dataLen)

              for j in range(0,102):
                self.hist_fid.write("%d " % dataBin[j])
              self.hist_fid.write("\n")
              self.hist_fid.flush()

        return gps_time.total_seconds()

    def full_secs_to_week_sec(self,secs):
        week = int(secs / (24*7*60*60.))
        secs_in_week = secs - week*24*7*60*60.
        return week, secs_in_week

    def parse_nmea(self):
        '''Parse ZDA/GGA and get the latency. It will loop until it has an issue'''
        ZDA_gps_secs = -1;
        gotZDA = False

        while(ZDA_gps_secs == -1):
          received = None
          t_raw    = None

          # Do pyshark capture.
          if self.pySharkInterface is not None:
            received, t_raw = self.pySharkReader.getLine()
              
          # Do regular socket/serial read.
          else:
            received = bytearray(self.s.readline())
            # Get the current time.
            t_raw = time.time()

          # Ethernet delay.
          s = received.rstrip()

          if self.NMEA_fid is not None:
            now = datetime.datetime.utcnow()
            if(now.hour != self.logHour):
              self.logHour = now.hour
              self.NMEA_fid.close()

              # zip the file
              zipfile.ZipFile(self.NMEA_fid_root + '.zip', mode='w').write(self.NMEA_fid_root + '.txt', compress_type=zipfile.ZIP_DEFLATED)
              # Remove the file we zipped
              os.remove(self.NMEA_fid_root + '.txt')

              self.NMEA_fid_root = (  self.prefix+ '-' 
                                    + str(now.year) 
                                    + str(now.month).zfill(2) 
                                    + str(now.day).zfill(2) 
                                    + str(now.hour).zfill(2) + '-NMEA')

              self.NMEA_fid = open(self.NMEA_fid_root + '.txt','wb')

            self.NMEA_fid.write(received)

          if(re.match(b'\$G.GGA.*',s) and self.checksumtest(s)):
            GGA_gps_secs = self.NMEA_to_gps_secs(s,False)
            gotZDA = True
          elif((gotZDA == True) and re.match(b'\$G.ZDA.*',s) and self.checksumtest(s)):
            # We have the GGA and ZDA data
            ZDA_gps_secs = self.NMEA_to_gps_secs(s,True)
            gps_epoch = datetime.datetime(1980,1,6)

            # The following code uses "t_raw" and time encoded in the
            # GGA/ZDA to approximate the Ethernet delay. This delay is a
            # combination of:
            #
            # - Delays in the receiver from the creation of the GGA/ZDA
            # (in IOTX) to output on the wire
            # - Collisions and other problems on the network
            # - Router delay
            # - Linux delays / processing load
            # - Delays thru to the "t_raw = time.time()" statement above
            #
            # After the following we'll have
            # GGA_gps_secs - GPS seconds of the validity of the position
            # ZDA_gps_secs - GPS time of when IOTX created the NMEA message
            # Eth_gps_secs - GPS time of when this function parsed the data (approx the receive time - or Ethernet delayed time)
            Eth_gps_secs = (leapseconds.utc_to_gps(datetime.datetime.utcfromtimestamp(t_raw)) - gps_epoch).total_seconds()
            ZDADelay = ZDA_gps_secs - GGA_gps_secs
            EthDelay = Eth_gps_secs - ZDA_gps_secs
            TotalDelay = Eth_gps_secs - GGA_gps_secs
            Week = int(GGA_gps_secs/(7*86400))
            WeekSeconds = GGA_gps_secs - Week*7*86400

            dateInfo = s.decode('ascii').split(',')
            DateString = dateInfo[4] + '-' + dateInfo[3] + '-' + dateInfo[2]
            if(self.FileHandle == None):
              self.FileHandle = open('EthernetTime-' + DateString + '.txt','a')
              self.FilePrefix = DateString
            elif(self.FilePrefix != DateString):
              self.FileHandle.close()
              self.FileHandle = open('EthernetTime-' + DateString + '.txt','a')
              FilePrefix = DateString
       
            self.FileHandle.write("%d %.2f %.3f %.6f %.6f\n" % (Week,WeekSeconds,ZDADelay,EthDelay,TotalDelay))

        if ZDA_gps_secs < 0.:
          return 0

        delta =  ZDA_gps_secs- GGA_gps_secs;

        # Build up a data table if it passes basic sanity tests.
        # It should not be negative, but it might from an inertial
        # system as the data is extrapolated under certain conditions
        if( (delta > -0.05) and (delta < 1.00)):
          millisecond = round(1000*(GGA_gps_secs - int(GGA_gps_secs)))
          epoch   = int(millisecond / self.msPerEpoch)
          if(epoch >= 0 and epoch < self.freq):
            if(delta < self.minValue[epoch]):
              self.minValue[epoch] = delta
            if(delta > self.maxValue[epoch]):
              self.maxValue[epoch] = delta
            self.sumValue[epoch] += delta
            self.numInSum[epoch] += 1
            # Add the data to a sorted array one array per epoch, use
            # bisect to keep the list sorted.
            bisect.insort(self.history[epoch],int(delta*1000))

        # Output a diagnostic occassionally
        self.count += 1
        # Test for number of bins +1 so the data slowly moves through all millisecond
        # bins for data
        try:
          if(self.count == (self.freq + 1)):
            if(epoch >= 0 and epoch < self.freq):
              if(self.numInSum[epoch] > 0):
                sixtyEight = float(self.history[epoch][int(len(self.history[epoch])*0.68)]) / 1000.0 # Convert to secs
                ninetyFive = float(self.history[epoch][int(len(self.history[epoch])*0.95)]) / 1000.0
                print("%.2f %2d %.3f -- %.3f %.3f %.4f %.3f %.3f %d" % (GGA_gps_secs, epoch, ZDA_gps_secs- GGA_gps_secs,
                                                              self.minValue[epoch],
                                                              self.maxValue[epoch],
                                                              self.sumValue[epoch]/self.numInSum[epoch],
                                                              sixtyEight,
                                                              ninetyFive,
                                                              self.numInSum[epoch]))
              else:
                print("%.2f %2d %.3f -- NaN NaN NaN 0" % (GGA_gps_secs, epoch, ZDA_gps_secs- GGA_gps_secs))
            self.count = 0

          if(self.count == 0):
            for i in range(self.freq):
              if(self.numInSum[i] > 0):
                sixtyEight = float(self.history[i][int(len(self.history[i])*0.68)]) / 1000.0 # Convert to secs
                ninetyFive = float(self.history[i][int(len(self.history[i])*0.95)]) / 1000.0
                print("%d %.3f %.3f %.4f %.4f %.4f %d" % (i,
                                                          self.minValue[i],
                                                          self.maxValue[i],
                                                          self.sumValue[i]/self.numInSum[i],
                                                          sixtyEight,
                                                          ninetyFive,
                                                          self.numInSum[i]))
              else:
                print("%d NaN NaN NaN 0" % i)
        except:
          print("Error in report loop")
        return 1

    def do_DCOL(self,cmd_num,cmd_data):
        '''Send DCOL command # (cmd_num) with data (cmd_data) over
        current receiver port.  Return binary data response.'''
        self.s.reset_input_buffer()
        cmd=RXTools.formDColCommand(cmd_num,cmd_data)
        self.s.write(cmd)
        time.sleep(1)
        data = b''
        while self.s.in_waiting and len(data) < 1024:
            data += self.s.read()
        return bytearray(data)

    def enable_NMEA(self):
        '''Enable NMEA ZDA/GGA on current receiver port (and disable all
        other outputs on the port).'''
        
        # it is hard to get the port number if a lot of data is already 
        # streaming. Start by turning off all data
        self.do_DCOL(0x51,[0xa,0xff])
        
        # get port #
        data = self.do_DCOL(0x6f,[])
        pat = re.compile(b'PORT,([0-9]+),')
        self.rcvr_port_idx = int(pat.findall(data)[0])+1

        # Convert the frequency to the rate parmeter for the NMEA I/O
        # command
        if(self.freq == 100):
          rate = 16
        elif(self.freq == 50):
          rate = 15
        elif(self.freq == 20):
          rate = 13
        elif(self.freq == 10):
          rate = 1
        elif(self.freq == 5):
          rate = 2
        else:
          print("Not a valid frequency " + str(self.freq))
          exit()
        # Enable the GGA and ZDA
        data = self.do_DCOL(0x51,[0x7,self.rcvr_port_idx,26,rate,0])
        data = self.do_DCOL(0x51,[0x7,self.rcvr_port_idx,29,rate,0])

    def disable_NMEA(self):
        '''Disable ZDA/NMEA (and all outputs) on the port.'''
        self.do_DCOL(0x51,[0xa,self.rcvr_port_idx])

    def pyshark_dummyReader(self):
        '''Thread function to read from host if pyshark is used for timing.'''
        while self.io_thread_enabled:
          self.s.readline()

    def start_pyshark(self):
        '''Start pyshark live capture and dummy thread NMEA TCP/IP I/O.'''
        #self.capture = pyshark.LiveCapture( interface = self.pySharkInterface )
        # Port may have the format "/dev/ttyS0" or "socket://10.1.150.x:5018".
        # Extract socket parameters and fail if a serial port was used.
        m = re.match( r"socket://([^:]+):(\d+)$", self.port )
        if m is None:
          print("Error: %s is not a socket spec that can be used with pyshark"%(
                self.port) )
          sys.exit(1)

        # Start a dummy I/O thread to read data from the remote host so that
        # timed packets can be read using pyshark.
        print("pyshark: Create I/O thread ...")
        self.io_thread = threading.Thread( target=self.pyshark_dummyReader )
        self.io_thread_enabled = True

        print("pyshark: Start I/O thread ...")
        self.io_thread.start()

        host = m.group(1)
        port = int( m.group(2) )
        self.pySharkReader = pySharkReader( self.pySharkInterface, host, port )
      
    def stop_pyshark(self):
        if self.io_thread is not None:
          # pyShark spawns two processes, tshark and tcpdump. However,
          # it only reliably kills tshark and sometimes leaves tcpdump
          # running.

          self.io_thread_enabled = False
          print("Waiting for I/O thread to terminate...")
          self.io_thread.join( 3.0 )
          print("I/O thread terminated.")
          self.io_thread = None


    def start(self):
        if self.start_nmea_output:
          self.enable_NMEA()

        if self.pySharkInterface is not None:
          self.start_pyshark()

    def stop(self):
        # Stop pyShark packet capture.
        if self.pySharkInterface is not None:
          self.stop_pyshark()

        # Close the "EthernetTime-*" file.
        if not None == self.FileHandle:
          self.FileHandle.close()

        # Stop pyShark packet capture.
        if self.start_nmea_output:
          self.disable_NMEA()

    def do_loop(self):
        '''Main loop - runs until we have an issue with the NMEA data'''

        self.start()

        while self.parse_nmea() >= 0:
            pass

        # Add call to stop() for form.
        # It will actually be called by signal_handler().
        self.stop()

    def close(self):
        '''Call this to turn off ZDA and disconnect from the receiver port'''
        # ??? Looks like disable_ZDA() does not exist. This is never called.
        self.disable_ZDA()
        self.s.close()

if __name__ == "__main__":
    # Handle Ctrl-C termination.
    signal.signal(signal.SIGINT, signal_handler)
    # Handle "kill PID" termination.
    signal.signal(signal.SIGTERM, signal_handler)

    parser = argparse.ArgumentParser(description=usage,
                                     formatter_class=argparse.RawDescriptionHelpFormatter)

    parser.add_argument( "port",
                          help="socket://10.1.150.xxx:5018).  Must be writeable")
    parser.add_argument( "freq", help="NMEA frequency in Hz" )
    parser.add_argument( "--prefix", help="file prefix" )
    parser.add_argument( "--log_NMEA",
                         help="Log the NMEA data",
                         action="store_true",
                         default=False )

    # Added optional --start_nmea_output argument.
    parser.add_argument( "--start_nmea_output",
                         help="Send commands to start NMEA output.",
                         action="store_true",
                         default=False )

    # The --pyshark_interface parameter triggers use of pyShark for timing.
    parser.add_argument( "--pyshark_interface",
                         help="Enables pyshark timing on specified interface" )

    args = parser.parse_args()

    rc = getLatency( args.port,
                     prefix = args.prefix,
                     log = args.log_NMEA,
                     freq=args.freq,
                     start_nmea_output = args.start_nmea_output,
                     pyshark_interface = args.pyshark_interface )
    rc.do_loop()
