import sys
import os
import re
import glob
import time
import calendar
import datetime
import signal
import threading
import json
import RXTools
import argparse
import logging
import xmltodict
from logging.handlers import RotatingFileHandler
from flask import Flask, render_template, render_template_string, Markup, Response, request, jsonify, send_from_directory
from ftplib import FTP

# To run from a fresh Anaconda 3.X python install:
# conda install -c conda-forge xmltodict
# conda install -c conda-forge flask-caching
# conda install -c conda-forge flask-compress
#
# You'll also need GPSTools/pythonTools/ (from CVS) in your PYTHONPATH (for RXTools)
#

# Makes sure that the static data doesn't get reloaded - e.g. the graph
# PNG was reloading and flickering on one browser before this was added
try:
  from flask_caching import Cache
  cacheConfig = {
    "DEBUG": True,          # some Flask specific configs
    "CACHE_TYPE": "simple", # Flask-Caching related configs
    "CACHE_DEFAULT_TIMEOUT": 300
  }
  cacheSupported = True
except:
  cacheSupported = False


# Flash compress isn't loaded on all systems, only install when it is
# available. It will gzip data over HTTP
try:
  # This is not installed as standard by the anaconda python
  # installation. To install:
  from flask_compress import Compress
  CompressSupported = True
except:
  CompressSupported = False

try:
  from urllib.request import urlopen
except ImportError:
  from urllib2 import urlopen


# Sleep time in seconds between system data refresh
sysRefreshTime = 1
# Sleep time in seconds between SV data refresh
svRefreshTime  = 1

# Make sure we don't send stale data
svDataTimeout  = 10
sysDataTimeout = 30

# When no requests have occurred for the following number of
# seconds stop requesting data from the receiver(s)
svTimeout      = 20
stationTimeout = 40

# Set to False by default, you can enable on the command line.
# Note - this can impact latency as it locks the receiver's
# malloc system.
getMemStats = False

# Port at which the service will run - don't run a 
# second instance of this without changing this!!
webPort = 81
# We use this frame work to get SV tracking data
# from mulitple stations and provide as an 
# XML output. If you won't want this feature set
# to False
runGetSVData = True
enableServerLogs = True

# Use the current time to initialize a few variables
secondsSysRequest = datetime.datetime.now()
secondsSvRequest = datetime.datetime.now()
ThreadsActive = True

app = Flask(__name__)
if(CompressSupported == True):
  # Print to the terminal as the log hasn't been setup yet
  print("HTTP Compression Installed")
  Compress(app)

if(cacheSupported):
  print("HTTP Cache Installed")
  cache = Cache(app,config=cacheConfig)



def getFileByFTP(filename,ftp,RXNum,remoteName):
  with open(filename, 'wb') as ftpfile:
    if(ThreadsActive):
      startTime = int(time.time())
      s = ftp.retrbinary('RETR ' + remoteName, ftpfile.write) # retrieve file
      if str(s).startswith('226'): # comes from ftp status: '226 Transfer complete.'
        stopTime = int(time.time())
        localSize = os.path.getsize(filename)
        delta = stopTime - startTime
        if(delta == 0):
          delta = 1
        rate = (localSize/(stopTime - startTime))/(1024*1024)
        app.logger.info(  '%s Downloaded %s [size=%d time=%.3f secs, Rate=%.3f MB/sec]' 
                        % (stations[RXNum]['short'], remoteName, localSize, delta, rate))
      else:
        app.logger.info(s) # if error, print retrbinary's return
    else:
      return


# FTP handler
def ftpFiles(RXNum):
  while(ThreadsActive):
    now = time.time()
    if(ThreadsActive == False):
      return

    try:
      ftp = FTP(stations[RXNum]['addr'], timeout = 30)
      ftp.login(stations[RXNum]['user'], stations[RXNum]['pw'])

      ftp.sendcmd('CWD /Internal')
      app.logger.info('Receiver: %s IP: %s' % (stations[RXNum]['short'],stations[RXNum]['addr']))
      h_remote_files = []
      for rfile in ftp.nlst():
        if(rfile.endswith('.T02') or rfile.endswith('.T04')): # Only the T02 or T04's
          tokens = rfile.split()
          fileStr = tokens[-1] + " " + tokens[4];
          h_remote_files.append(fileStr) # populate remote dir list

      app.logger.info(   "%s IP %s has %d T02/T04 files - attempting to sync"
                       %(stations[RXNum]['short'], stations[RXNum]['addr'],len(h_remote_files)) )

      if(args.ReceiverDate == False):
        dateList = []
        # Get a list of the date from the file names
        for i in range(len(h_remote_files)):
          dateList.append(h_remote_files[i][10:18])
        
        # Get the unique list
        dateList = list(set(dateList))

        # Loop for each day
        for i in range(len(dateList)):
          h_local_files = []

          directory = dateList[i] + '/' + stations[RXNum]['short']
          if not os.path.exists(dateList[i]):
            os.mkdir(dateList[i])

          if not os.path.exists(directory):
            os.mkdir(directory)

          for file_name in os.listdir(directory):
            length = os.path.getsize(directory + '/' + file_name)
            fileStr = file_name + " " + str(length)
            h_local_files.append(fileStr) # populate local dir list

          remote_files = []
          for j in range(len(h_remote_files)):
            if(h_remote_files[j][10:18] == dateList[i]):
              remote_files.append(h_remote_files[j])
          h_diff = sorted(list(set(remote_files) - set(h_local_files))) 

          for h in h_diff:
            if(ThreadsActive == False):
              return

            tokens = h.split()
            filename = os.path.join(directory,tokens[0])

            getFileByFTP(filename,ftp,RXNum,tokens[0])
            if(not ThreadsActive):
              return
      else:
        # Local directory structure is receiverShortName/Date/
        if not os.path.exists(stations[RXNum]['short']):
          os.mkdir(stations[RXNum]['short'])

        h_local_files = []
        for file_name in os.listdir(stations[RXNum]['short']):
          length = os.path.getsize( stations[RXNum]['short'] + '/' + file_name)
          fileStr = file_name + " " + str(length)
          h_local_files.append(fileStr) # populate local dir list

        h_diff = sorted(list(set(h_remote_files) - set(h_local_files))) 
        for h in h_diff:
          if(ThreadsActive == False):
            return

          tokens = h.split()
          # Directory of RXName/Date/
          directory = os.path.join(stations[RXNum]['short'],tokens[0][10:18])
          if not os.path.exists(directory):
            os.mkdir(directory)
          filename = os.path.join(directory,tokens[0])
          getFileByFTP(filename,ftp,RXNum,tokens[0])
          if(not ThreadsActive):
            return

        # End of args.ReceiverDate == True

      ftp.close()
      app.logger.info("Sync complete %s UTC = %s" % (stations[RXNum]['short'],
                                                     datetime.datetime.utcnow().isoformat()) )

    except:
      app.logger.info("FTP download failed Receiver %s IP %s - Check RX!!!!!" % (stations[RXNum]['short'],stations[RXNum]['addr']))

    # End of receiver loop - wait for 60 seconds from when we started the
    # last loop
    delta = 1
    while(ThreadsActive and ((60-delta) > 0.0)):
      newTime = time.time()
      delta = newTime - now 

      if( (60-delta) > 0.0):
        time.sleep(1)

# HTTP handler - we install a thread per receiver. We used to install
# one thread. This is fine if the PC and receivers are almost in sync.
# However, if there are lots of files on the receivers that need
# downloading, using a single thread can result in a long lag before
# everything is synchronized. On a local LAN the receivers only deliver
# a maximum of a couple of MB per second. The LAN and PC are much
# faster. Therefore, but pulling data from receivers in parallel we can
# speed up the overall sync. In steady state the difference is
# negligible.
def httpFiles(RXNum):
  while(ThreadsActive):
    now = time.time()
    if(ThreadsActive == False):
      return

    try:
      dateDirFormat = not args.ReceiverDate
      RXTools.GetLoggedFilesViaHttpNoDisable( stations[RXNum]['addr'], 
                                              stations[RXNum]['user'], 
                                              stations[RXNum]['pw'],
                                              stations[RXNum]['short'],
                                              False,checkLocal=True,flat=False,dateDirFormat=dateDirFormat,timeout=30)
          
      app.logger.info("%s %s Sync complete - %s" % ( stations[RXNum]['short'],
                                                   stations[RXNum]['addr'],
                                                   datetime.datetime.utcnow().isoformat()) )

    except:
      app.logger.info("No route to Receiver %s IP %s - Check RX!!!!!" % (stations[RXNum]['short'],stations[RXNum]['addr']))

    # Wait for 60 seconds from when we started the last loop
    delta = 1
    while(ThreadsActive and ((60-delta) > 0.0)):
      newTime = time.time()
      delta = newTime - now 

      if( (60-delta) > 0.0):
        time.sleep(1)


# This intercepts the web requests, it allows us to filter and
# provides a central place for logging
@app.before_request
def limit_remote_addr():
  if(request.remote_addr == '10.1.187.20'):
    # Block Trimble's scanner which messes up the logs
    return "", 403
  else:
    if(     (not request.url.endswith('svData.json'))
        and (not request.url.endswith('sysData.json')) ):
      # Don't clog the log with the regular web requests
      if(enableServerLogs == True):
        app.logger.info("%s %s" % (request.remote_addr, request.url))

# If we get a 404 (page not found) log an error
@app.errorhandler(404)
def handle404(error):
  if(enableServerLogs == True):
    app.logger.error("%s %s" % (request.remote_addr, request.url))
  return "", 404

@app.route("/")
def main():
  return index()

@app.route('/favicon.ico')
def favicon():
  return send_from_directory(os.path.join(app.root_path, 'static'),
                             'favicon.ico', mimetype='image/vnd.microsoft.icon')

@app.route("/stations.json")
def stations():
  filteredStations = []
  for i in range(len(stations)):
    if(len(groups) == 0):
      filteredStations.append(stations[i])
    elif( int(stations[i]['group']) == groupID):
      filteredStations.append(stations[i])
  jsonData = jsonify(filteredStations)
  return jsonData

@app.route("/ResetReceivers.xml")
def ResetReceivers():
  keys = list(dict(request.args).keys())

  # Get a list of receivers for the current group
  filteredStations = []
  if(len(groups) == 0):
    filteredStations = stations
  else:
    for i in range(len(stations)):
      if( int(stations[i]['group']) == groupID):
        filteredStations.append(stations[i])

  for i in range(len(keys)):
    try:
      rx = int(re.search(r'\d+$', keys[i]).group(0))
    except:
      continue

    reset = (request.args.get(keys[i]) == 'on')

    if(reset):
      app.logger.info("Reset Type " + keys[i] + " Receiver: " + filteredStations[rx]['short'] + " " + filteredStations[rx]['addr']) 
      if(keys[i].startswith('Reboot')):
        RXTools.SoftReset(filteredStations[rx]['addr'], 
                          filteredStations[rx]['user'], 
                          filteredStations[rx]['pw'])
      elif(keys[i].startswith('ClearCPU')):
        cpuResetCmd = "/cgi-bin/resetProcessorLoading.xml?ResetProcessorLoading=1"
        RXTools.SendHttpPostRetry(filteredStations[rx]['addr'], 
                                  cpuResetCmd,
                                  filteredStations[rx]['user'], 
                                  filteredStations[rx]['pw'])
        irqRate[rx]['time'] = -1 # Also reset the IRQ rate calculator
      elif(keys[i].startswith('ClearGPSEph')):
        RXTools.DeleteFile(filteredStations[rx]['addr'], 
                           filteredStations[rx]['user'], 
                           filteredStations[rx]['pw'],
                           "/bbffs/gnssData","gpsEph")
        RXTools.SoftReset(filteredStations[rx]['addr'], 
                          filteredStations[rx]['user'], 
                          filteredStations[rx]['pw'])
      elif(keys[i].startswith('ClearGPSAlmEph')):
        RXTools.DeleteFile(filteredStations[rx]['addr'], 
                           filteredStations[rx]['user'], 
                           filteredStations[rx]['pw'],
                           "/bbffs/gnssData","gpsAlm")
        RXTools.DeleteFile(filteredStations[rx]['addr'], 
                           filteredStations[rx]['user'], 
                           filteredStations[rx]['pw'],
                           "/bbffs/gnssData","gpsEph")
        RXTools.SoftReset(filteredStations[rx]['addr'], 
                          filteredStations[rx]['user'], 
                          filteredStations[rx]['pw'])
      elif(keys[i].startswith('ClearData')):
        RXTools.clearGNSSReset(filteredStations[rx]['addr'], 
                               filteredStations[rx]['user'], 
                               filteredStations[rx]['pw'])
      elif(keys[i].startswith('EnableLogging')):
        enableLoggingCmd = "/xml/dynamic/dataLogger.xml?enable=DEFAULT"
        RXTools.SendHttpPostRetry(filteredStations[rx]['addr'], 
                                  enableLoggingCmd,
                                  filteredStations[rx]['user'], 
                                  filteredStations[rx]['pw'])
      elif(keys[i].startswith('DisableLogging')):
        disableLoggingCmd = "/xml/dynamic/dataLogger.xml?disable=DEFAULT"
        RXTools.SendHttpPostRetry(filteredStations[rx]['addr'], 
                                  disableLoggingCmd,
                                  filteredStations[rx]['user'], 
                                  filteredStations[rx]['pw'])
      else:
        app.logger.info("Unhandled Reset")

  return("OK")


@app.route("/index.html")
def index():
  global groupID

  arg = request.args.get('group')
  if(arg is None):
    groupID = 1
  else:
    groupID = int(arg)

  print(groupID)

  webStr = ("{% extends 'menu.html' %}")
  webStr = webStr + ("{% block main %}")

  webStr = webStr + ("<section id='content'>")
  webStr = webStr + ("<div class='col'>")
  webStr = webStr + "<div class='col-sm-12'>"

  webStr += ("<p>GNSS Receiver Monitoring</p>")
  webStr += ("<span id='SystemStatus'></span>")
  webStr += ("<br/>")

  # If the JSON station file was the legacy format, we don't have a
  # group filter
  if(len(groups) > 0):
    # Script to handle the pull downs
    webStr += ' <script>'
    webStr += ' function ChangeGroup() {'
    webStr += ' var x = document.getElementById("group").value;'
    webStr += ' link = "index.html?group=" + x;'
    webStr += ' window.open(link, "_self");'
    webStr += ' }'
    webStr += ' </script>'

    webStr += ' <label for="stations">Group:</label>'
    webStr += ' <select name="group" id="group" onchange="ChangeGroup()">'
  
    for i in range(len(groups)):
      if(groupID == int(groups[i]['group'])):
        webStr += ' <option value="' + str(groupID) + '" selected="selected">' + groups[i]['name'] + '</option>'
      else:
        webStr += ' <option value="' + str(groups[i]['group']) + '">' + groups[i]['name'] + '</option>'
    webStr += ' </select> '

  webStr += ("<br/>")

  webStr += ("<form name='theForm'>")
  webStr += ("<div id='tabSpan'></div>")
  webStr += ("<div id='trackingSpan'></div>")
  webStr += ("</form>")
  webStr += ("<div id='overall'></div>")
  webStr += ("<br/>")

  webStr = webStr + ("</div></div>")

  webStr = webStr + ("</section>")

  webStr = webStr + ("{% endblock %}")
  webStr = webStr + ("{% block userscript %}")

  webStr = webStr + ("<script src='static/js/app.js'></script>")
  webStr += "<link href='static/trimbleRX.css' rel='stylesheet'>"
  webStr += "<script language='Javascript' src='static/jquery.js'></script>"
  webStr += "<script language='JavaScript' src='static/monitorNew.js'></script>"
  webStr += "<script language='JavaScript' src='static/coreAjax.js'></script>"


  webStr = webStr + ("<script>init();</script>")
  
  webStr = webStr + ("{% endblock %}")
  return render_template_string(Markup(webStr))


def signal_handler(signal, frame):
  global ThreadsActive
  print('You pressed Ctrl+C!')
  ThreadsActive = False

  if(runGetSVData == True):
    time.sleep(5)
    for i in range(len(threadsSVdata)):
      threadsSVdata[i].join()
    for i in range(len(threadsSYSdata)):
      threadsSYSdata[i].join()

    if( (enableFTP == True) or (enableHTTP == True) ):
      for i in range(len(threadsSYSdata)):
        downloadThreads.join()
  
  print("Exiting")
  sys.exit(0)


def workerSv(num):
  while(ThreadsActive == True):
    resp = []
    global secondsSvRequest

    now = datetime.datetime.now()
    delta = (datetime.datetime.now() - secondsSvRequest).total_seconds()

    # When processing multiple groups of data, we keep track of when users
    # last requested each group. If a group hasn't been requested for a while
    # we don't need to get the data from the receiver, this reduces the python
    # script load.
    if( groupID is not None):
      # Get the group ID for this station
      index = int(stations[num]['group'])
      # How long ago since this group was requested?
      delta2 = (now - groupRequestTime[index]).total_seconds()
      if(delta2 > delta):
        delta = delta2 # Bump the delta
    
    #print("%d SV-delta = %f %s" % (num,delta,stations[num].get("addr")))
    try:
      if( delta < svTimeout ):
        resp = RXTools.SendHttpGet( stations[num].get("addr"),
                             '/xml/dynamic/svData.xml',
                             stations[num].get("user"),
                             stations[num].get("pw"),
                             verbose=False)
        if(resp):
          data = xmltodict.parse(resp)
          for i in range(len(data['svTrack']['sv'])):
            if("chan" in data['svTrack']['sv'][i].keys()):
              data['svTrack']['sv'][i].pop('chan')
            data['svTrack']['sv'][i].pop('azi')
            if("doppler" in data['svTrack']['sv'][i].keys()):
              data['svTrack']['sv'][i].pop('doppler')
            if("iode" in data['svTrack']['sv'][i].keys()):
              data['svTrack']['sv'][i].pop('iode')
            if("ura" in data['svTrack']['sv'][i].keys()):
              data['svTrack']['sv'][i].pop('ura')
            if("freq" in data['svTrack']['sv'][i].keys()):
              data['svTrack']['sv'][i].pop('freq')
            if("res" in data['svTrack']['sv'][i].keys()):
              data['svTrack']['sv'][i].pop('res')
            if("wgt" in data['svTrack']['sv'][i].keys()):
              data['svTrack']['sv'][i].pop('wgt')

          jsonSVData[num] = data
          jsonSVDataTime[num] = datetime.datetime.now()

      time.sleep(svRefreshTime)

    except:
      # Sleep for a second and try again
      time.sleep(1)
  return

def workerSys(num):
  while(ThreadsActive == True):
    resp = []
    global secondsSysRequest

    now = datetime.datetime.now()
    delta = (datetime.datetime.now() - secondsSysRequest).total_seconds()

    # When processing multiple groups of data, we keep track of when users
    # last requested each group. If a group hasn't been requested for a while
    # we don't need to get the data from the receiver, this reduces the python
    # script load.
    if( groupID is not None):
      # Get the group ID for this station
      index = int(stations[num]['group'])
      # How long ago since this group was requested?
      delta2 = (now - groupRequestTime[index]).total_seconds()
      if(delta2 > delta):
        delta = delta2 # Bump the delta
    
    #print("%d SYS-delta = %f %s" % (num,delta,stations[num].get("addr")))
    try:
      if( delta < stationTimeout):
        try:
          resp = RXTools.SendHttpGet( stations[num].get("addr"),
                               '/xml/dynamic/sec_merge.xml?sysData=&powerData=&posData=&power=&pos=&options=&config=&time=&dataLogger=&errLog=',
                               stations[num].get("user"),
                               stations[num].get("pw"),
                               verbose=False)
        except:
          try:
            resp = RXTools.SendHttpGet( stations[num].get("addr"),
                                 '/xml/dynamic/merge.xml?sysData=&powerData=&posData=&power=&pos=&options=&config=&time=&dataLogger=&errLog=',
                                 stations[num].get("user"),
                                 stations[num].get("pw"),
                                 verbose=False)
          except:
            app.logger.info("Exception Task(SYS) %d:%s" % (num, stations[num].get("long")))
            irqRate[num]['time'] = -1
        
        if(resp):
          sysData = xmltodict.parse(resp)
          
          # We only want the reserve CPU from the SPA data. As the SPA XML
          # is quite large, parse it to get the informaion we want, then 
          # erase from the dict() using pop(). Note for recent releases
          # the SPA data is available within the dataLogger data section!
          count = 0
          res   = 0
          sysData['CPURes'] = '-' # Until we determine otherwise
          if(logSPA):
            if("spa" in sysData['data']['dataLogger'].keys()):
              for i in range(len(sysData['data']['dataLogger']['spa']['spa']['task'])):
                if(sysData['data']['dataLogger']['spa']['spa']['task'][i]['id'] == 'RES'):
                  count += 1
                  res += float(sysData['data']['dataLogger']['spa']['spa']['task'][i]['load'])
              if(count > 0):
                sysData['CPURes'] = res/count

          if(logLatency):
            latencyTotal = 0
            latencySum   = 0
            latencyMax   = 0
            if(     "spa" in sysData['data']['dataLogger'].keys()
                and "latency" in sysData['data']['dataLogger']['spa']['spa'].keys()):
              for i in range(len(sysData['data']['dataLogger']['spa']['spa']['latency'])):
                count = int(sysData['data']['dataLogger']['spa']['spa']['latency'][i]['count'])
                if(count > 0):
                  latencyTotal += count
                  latencySum   += count * float(sysData['data']['dataLogger']['spa']['spa']['latency'][i]['avg'])
                  epochMax      = float(sysData['data']['dataLogger']['spa']['spa']['latency'][i]['peak'])
                  if(epochMax > latencyMax):
                    latencyMax = epochMax
              if(latencyTotal > 0):
                sysData['latencyMean'] = latencySum / float(latencyTotal)
                sysData['latencyMax'] = latencyMax
            else:
              sysData['latencyMean'] = '-'
              sysData['latencyMax'] = '-'
          else:
            sysData['latencyMean'] = '-'
            sysData['latencyMax'] = '-'


          # Erase it - along with other large blocks of data
          if("spa" in sysData['data']['dataLogger'].keys()):
            sysData['data']['dataLogger'].pop("spa")    
          
          if("stats" in sysData['data']['dataLogger'].keys()):
            sysData['data']['dataLogger'].pop("stats")    
          
          if("referenceStation" in sysData['data']['pos'].keys()):
            sysData['data']['pos'].pop("referenceStation") # A lot of data, so
          
          if("rtk" in sysData['data']['pos'].keys()):
            sysData['data']['pos'].pop("rtk") # A lot of data, so
          
          if("clock" in sysData['data']['pos'].keys()):
            sysData['data']['pos'].pop("clock") # A lot of data, so
          
          if("SvsUsed" in sysData['data']['pos']['position'].keys()):
            sysData['data']['pos']['position'].pop("SvsUsed") # A lot of data, so remove    
          
          sysData['errors'] = '-'
          sysData['warnings'] = '-'
          if("errLog" in sysData['data'].keys()):
            if("entry" in sysData['data']['errLog'].keys()):
              warnings = 0
              errors   = 0
              if(sysData['data']['errLog']['numEntries'] == '1'):
                # Convert to int and mask the appropriate bits
                flag = int(sysData['data']['errLog']['entry']['flags']) & 3
                if( flag == 3 ):
                  warnings = 1
                else:
                  errors   = 1
              else:
                for i in range(int(sysData['data']['errLog']['numEntries'])):
                  flag = int(sysData['data']['errLog']['entry'][i]['flags']) & 3
                  if( flag == 3 ): 
                    warnings += 1
                  else:
                    errors   += 1
              sysData['errors'] = errors
              sysData['warnings'] = warnings

            sysData['data'].pop("errLog") # A lot of data, so

          if(getMemStats):
            # The memory information cannot be obtained via merge, therefore
            # rquest the page. Extract what we want and add to the dict()
            resp = RXTools.SendHttpGet( stations[num].get("addr"),
                                 '/xml/dynamic/dyn_mem_info.xml',
                                 stations[num].get("user"),
                                 stations[num].get("pw"),
                                 verbose=False)
            data = xmltodict.parse(resp)
            sysData['totalMemUsed'] = int(data['data']['totalUsed'])
            sysData['totalMemFree'] = int(data['data']['totalFree'])
            totalMem = sysData['totalMemUsed'] + sysData['totalMemFree']
            if(sysData['CPURes'] == '-'):
              res = sysData['CPURes']
            else:
              res = '{:.2f}'.format(float(sysData['CPURes']))
            if(sysData['latencyMean'] == '-'):
              print("%s - - %s %d %d %d %d" % 
                    (res,
                     stations[num]['short'],
                     totalMem,
                     512*1024*1024 - totalMem,
                     sysData['totalMemUsed'],
                     sysData['totalMemFree']))
            #else:
            #  print("%.2f %.2f %.0f %s %d %d %d %d" % 
            #        (float(sysData['CPURes']),
            #         sysData['latencyMean'],
            #         sysData['latencyMax'],
            #         stations[num]['short'],
            #         totalMem,
            #         512*1024*1024 - totalMem,
            #         sysData['totalMemUsed'],
            #         sysData['totalMemFree']))
          else:
            sysData['totalMemUsed'] = 'NaN'
            sysData['totalMemFree'] = 'NaN'
            if(sysData['CPURes'] == '-'):
              print(" -    -    -   %s" % (stations[num]['short']))
            elif(sysData['latencyMean'] == '-'):
              print("%.2f    -    - %s" % (float(sysData['CPURes']), stations[num]['short']))
            else:
              print("%.2f %.2f %.0f %s" % (float(sysData['CPURes']), sysData['latencyMean'], sysData['latencyMax'], stations[num]['short']))

          if(logISR):
            # Now get interrupt information. Store the start count and time
            # and difference to the current start and time to get the rate
            try:
              resp = RXTools.SendHttpGet( stations[num].get("addr"),
                                 '/xml/dynamic/irq_info.xml',
                                 stations[num].get("user"),
                                 stations[num].get("pw"),
                                 verbose=False)
              irqData = xmltodict.parse(resp)
              irqCount = 0
              for i in range(len(irqData['data']['irq'])):
                if('FECount' in irqData['data']['irq'][i].keys()):
                  irqCount += int(irqData['data']['irq'][i]['FECount'])
              
              currTime = calendar.timegm(time.gmtime())
              if( (irqRate[num]['time'] == -1) or (currTime < irqRate[num]['time']) ):
                irqRate[num]['time'] = currTime
                irqRate[num]['start'] = irqCount
                sysData['irqRate'] = '-'
              else:
                sysData['irqRate'] = (irqCount - irqRate[num]['start']) / (currTime - irqRate[num]['time'])
            except:
              sysData['irqRate'] = 'NaN'
              irqRate[num]['time'] = -1
              app.logger.info("Exception Task(IRQ) %d:%s" % (num, stations[num].get("long")))
          else:
            sysData['irqRate'] = '-'

          # Store the dict() for later use (we'll convert to JSON and
          # output)
          jsonSYSData[num] = sysData
          jsonSYSDataTime[num] = datetime.datetime.now()

          #print 'Worker-SYS[%d]: %s got response Active = %d' % (num,stations[num].get("long"),ThreadsActive)

      time.sleep(sysRefreshTime)
    except:
      # Try again in a second
      time.sleep(sysRefreshTime)
  return

@app.route("/svData.json")
def svData():
  global secondsSvRequest
  global groupRequestTime

  output = []
  for i in range(len(jsonSVData)):
    delta = (datetime.datetime.now() - jsonSVDataTime[i]).total_seconds()
    if( (len(jsonSVData[i]) > 0)  and (delta < svDataTimeout) ):
      if( (groupID is None) or (len(groups) == 0) or (int(stations[i]['group']) == groupID) ):
        jsonSVData[i]['name'] = stations[i]['long']
        output.append( jsonSVData[i] )
  
  # Get the time of the request
  now = datetime.datetime.now()  
  secondsSvRequest = now
  if( groupID is not None ):
    groupRequestTime[groupID] = now

  jsonData = jsonify(output)
  return jsonData

@app.route("/sysData.json")
def sysData():
  global secondsSysRequest

  output = []
  for i in range(len(jsonSYSData)):
    delta = (datetime.datetime.now() - jsonSYSDataTime[i]).total_seconds()
    if( (len(jsonSYSData[i]) > 0)  and (delta < sysDataTimeout) ):
      jsonSYSData[i]['name'] = stations[i]['long']
      output.append( jsonSYSData[i] )

  # Get the time of the request
  secondsSysRequest = datetime.datetime.now()  

  jsonData = jsonify(output)
  return jsonData

if __name__ == "__main__":
  ######################################################################
  # Parse arguments
  parser = argparse.ArgumentParser(description='Web Application providing receiver status')
  parser.add_argument('-s','--stations', help='Filename of the station JSON e.g. --stations global.json')
  parser.add_argument('-p','--port', help='Optional HTTP port, by default 81 e.g. --port 80')

  parser.add_argument('-b','--svRate', help='Optionally control how frequent in seconds we request SV data (default 1) --svRate 1')
  parser.add_argument('-y','--sysRate', help='Optionally control how frequent in seconds we request System data (default 1) --sysRate 1')

  parser.add_argument('-f','--FTP', help='Sync data via FTP', action="store_true")
  parser.add_argument('-w','--HTTP', help='Sync data via HTTP', action="store_true")
  parser.add_argument('-m','--Memory', help='Monitor the receiver memory', action="store_true")
  parser.add_argument('-c','--CPU', help='Monitor the receiver\'s reserve CPU load', action="store_true")
  parser.add_argument('-l','--Latency', help='Monitor the PM layer latency', action="store_true")
  parser.add_argument('-i','--Interrupt', help='Monitor the number of ISRs per second', action="store_true")
  parser.add_argument('-r','--ReceiverDate', help='Set = ReceiverName/Date for download.  Not set = Date/ReceiverName (use with --FTP or --HTTP)', action="store_true")
  args = parser.parse_args()
  ######################################################################

  if(args.svRate):
    svRefreshTime = int(args.svRate)

  if(args.sysRate):
    sysRefreshTime = int(args.sysRate)

  if(args.FTP):
    enableFTP = True
  else:
    enableFTP = False
  
  if(args.Memory):
    getMemStats = True
  else:
    getMemStats = False

  if(args.CPU):
    logSPA = True
  else:
    logSPA = False

  if(args.Interrupt):
    logISR = True
  else:
    logISR = False

  if(args.Latency):
    logLatency = True
  else:
    logLatency = False

  if(args.HTTP):
    enableHTTP = True
  else:
    enableHTTP = False

  if(enableHTTP and enableFTP):
    print("Both HTTP and FTP are set - only one supported at a time")
    exit(1)

  # Load the list of stations
  if(args.stations):
    with open(args.stations,'r') as f: 
      data = json.load(f)

    stations = []
    groups = []
    gotStations = False
    # New JSON format
    for i in range(len(data)):
      if('groupDef' in data[i]):
        groups = data[i]['groupDef']
      elif('RX' in data[i]):
        stations = data[i]['RX']
        gotStations = True
    
    # Old format
    if(gotStations == False):
      stations = data
  else:
    print('require a station JSON file')
    sys.exit(1)

  longName = []
  for i in range(len(stations)):
    longName.append(stations[i]['long'])

  # By changing to a set and back to a list 
  # we get a unique list
  longUnique = list(set(longName))
  if(len(longUnique) != len(longName)):
    # long name should be unique
    print('Exiting - Long name needs to be unique')
    # When we create a unique list the order will change.
    # Therefore sort both the original and unique lists
    # so they are in the same order and it is easy to
    # find the duplicates
    print('List of input names\n',sorted(longName))
    print('List of unique input names\n',sorted(longUnique))
    duplicates = list(set([x for x in longName if longName.count(x) > 1]))
    print('List of duplicate names\n',sorted(duplicates))
    sys.exit(10)

  now = datetime.datetime.now()
  groupRequestTime = []
  if(len(groups) > 0):
    groupID = int(groups[0]['group'])

    # Find the maximum group ID
    maxID = 0
    for i in range(len(groups)):
      if(int(groups[i]['group']) > maxID):
        maxID = int(groups[i]['group'])

    # Setup the list so we can directly index via the group setting
    for i in range(maxID + 1):
      groupRequestTime.append(now)
  else:
    groupID = None
    groupRequestTime.append(now)

  if(args.port):
    webPort = int(args.port)

  signal.signal(signal.SIGINT, signal_handler)
  print("Setup CTRL-C handler")
  if(runGetSVData == True):
    threadsSVdata = []
    threadsSYSdata = []
    jsonSVData = []
    jsonSYSData = []
    jsonSVDataTime = []
    jsonSYSDataTime = []
    irqRate = []
    # Kick off the threads
    downloadThreads = []
    for i in range(len(stations)):
      # Holds the XML from the receiver
      jsonSVData.append(dict())
      jsonSYSData.append(dict())
      irqRate.append({'time':-1, 'irqStart':0})
      # Holds the time we received the XML from the receiver
      # set to a time in the past
      jsonSVDataTime.append(datetime.datetime(2010,1,1,0,0,0))
      jsonSYSDataTime.append(datetime.datetime(2010,1,1,0,0,0))

      t = threading.Thread(target=workerSv, args=(i,))
      threadsSVdata.append(t)
      t.start()
      t = threading.Thread(target=workerSys, args=(i,))
      threadsSYSdata.append(t)
      t.start()
        
      if(enableHTTP == True):
        t = threading.Thread(target=httpFiles, args=(i,))
        downloadThreads.append(t)
        t.start()
      elif(enableFTP == True):
        t = threading.Thread(target=ftpFiles, args=(i,))
        downloadThreads.append(t)
        t.start()

  if(enableServerLogs == True):
    formatter = logging.Formatter(
          "[%(asctime)s] {%(pathname)s:%(lineno)d} %(levelname)s - %(message)s")
    handler = RotatingFileHandler('webApp.log', maxBytes=(1024*1024), backupCount=128)
    handler.setLevel(logging.INFO)
    handler.setFormatter(formatter)
    app.logger.addHandler(handler)
    app.logger.setLevel(logging.INFO)
    log = logging.getLogger('werkzeug')
    log.setLevel(logging.INFO)

  
  try:
    app.run(host='0.0.0.0',port=webPort,threaded=True)
  except:
    sys.exit(1)


