# Copyright 2018 Trimble Inc.
# $Id: relock.py,v 1.4 2019/02/06 01:34:20 stuart Exp $

import matplotlib
# Allow running headless from the command line
matplotlib.use("agg")
# Set to zero to stop Matplotlib ever complaining about the number of open
# figures. However, set to a reasonable number here to warn if it gets too
# wild. It's a warning, it doesn't stop anything.
matplotlib.rcParams.update({'figure.max_open_warning':80})

import sys
import os
import glob
import time
import datetime
import signal
import math
from numpy import *
from pylab import *
from mutils import get_sub_type

plotSymb = [ "bo",
             "ro",
             "co",
             "go",
             "ks",
             "ys",
             "b*",
             "r*"]

def find(x):
  return np.where(x)[0]

# Data from multiple locations can be combined using the firstRx and lastRx
# flags. firstRx marks the start of data to be combined, lastRx marks the
# end.
# The Wamel, BD970, BD935 have firstRx = lastRx = True so all data comes
# from a single location
# The BD940 uses data from two location
fileroot = ['/net/teller/mnt/data_drive/T01.150.89',
            '/net/teller/mnt/data_drive/T01.150.186',
            '/net/teller/mnt/data_drive/T01.BD935.4',
            '/net/fermion/mnt/data_drive/T01.BD940.AntSwitch',
            '/net/higgs/mnt/data_drive/T01.BD940Prd.AntSwitch']
rxStr = ['Wamel','BD970','BD935','Invalid','BD940']
firstRx = [True, True, True, True, False]
lastRx  = [True, True, True, False, True]

if ( len(fileroot) != len(rxStr)
  or len(firstRx)  != len(lastRx)
  or len(fileroot) != len(lastRx) ) :
  print("Unexpected configuration array sizes")
  quit()


# List of signals type (RT_SatType.X + signal group #)
# Maintained across all dates for one receiver
sigTypes = []
# List of signal name strings for a given signal type
# Maintained to match sigTypes
sigStrs = []
# List of plotting information
# Maintained to match sigTypes
figList = []
axisList = []
figListSlips = []
axisListSlips = []

datetimeformat = "%Y-%m-%d %H:%M:%S"
# Loop for each receiver data set we want to analyze
for rx in range(len(fileroot)):
  start  = datetime.datetime.strptime("2018-01-01 12:00:00",datetimeformat)
  stop   = datetime.datetime.now()
  delta = ((stop-start).total_seconds()/86400)

  # Lists the contain the acquisition statistics
  # Ordered to match sigTypes but cleared for each receiver
  if (firstRx[rx]):
    ninetyFive = []
    ninetyFiveTime = []
    numEpochs = []
    timeRange = []
    dateStr = []
    totalSlips = []
    for i in range(len(sigTypes)):
      totalSlips.append([])
      ninetyFive.append([])
      ninetyFiveTime.append([])
      numEpochs.append([])
      timeRange.append([])
      dateStr.append([])

  # Loop for each day we want to analyze
  for index in range((int)(ceil(delta))):
    day   = start.day
    month = start.month
    year  = start.year

    dashedDate = str(year) + '-' + str(month).zfill(2) + '-' + str(day).zfill(2)
    filename   = fileroot[rx] + '/out-' + dashedDate + '.txt'

    # List of reacquisition times for a given signal type
    # Ordered to match sigTypes but cleared for each date
    reacqTimes = []
    for i in range(len(sigTypes)):
      reacqTimes.append([])

    print(filename)
    first = False
    timeDelta = 0
    if(os.path.isfile(filename)):
      with open(filename,'r') as f:
        for line in f:
          data = line.rstrip().split()
          # Make sure the line of data looks correct
          if(len(data)>=6):
            if(first == False):
              startTime = float(data[0])
              first = True

            endTime = float(data[0])
            system  = int(data[2])
            freq    = int(data[3])
            sigType = int(data[4])
            acqTime = float(data[5])

            # Assume that there'll never be more than 10 groups for any
            # satellite system to get a unique identifier for this data
            ident = system*10 + get_sub_type(system, freq, sigType).group

            # Check if this ident already exists in the lists
            # If not, find insertion point in the lists to sort by ident
            if not(ident in sigTypes):
              ins = 0
              for x in sigTypes:
                if x < ident :
                  ins += 1
                else :
                  break

              # Insert new data in to the lists
              sigTypes.insert(ins,ident)
              sigStrs.insert(ins,get_sub_type(system, freq, sigType).sigstr)
              reacqTimes.insert(ins,[])
              ninetyFive.insert(ins,[])
              ninetyFiveTime.insert(ins,[])
              numEpochs.insert(ins,[])
              totalSlips.insert(ins,[])
              timeRange.insert(ins,[])
              dateStr.insert(ins,[])
              figList.insert(ins,figure())
              axisList.insert(ins,figList[ins].add_subplot(111))
              figListSlips.insert(ins,figure())
              axisListSlips.insert(ins,figListSlips[ins].add_subplot(111))

            # Append the acquisition time to the existing list
            reacqTimes[sigTypes.index(ident)].append(acqTime)

      timeDelta = endTime - startTime

      filename   = fileroot[rx] + '/out-' + dashedDate + '.slips'
      NumSlips = [0] * len(sigTypes)

      # Now read the slip file. 
      if(os.path.isfile(filename)):
        with open(filename,'r') as f:
          for line in f:
            data = line.rstrip().split()
            if(len(data)>=6):
              endTime = float(data[0])
              system  = int(data[2])
              freq    = int(data[3])
              sigType = int(data[4])
            
              # Needs to match the indent calculation in the previous section
              ident = system*10 + get_sub_type(system, freq, sigType).group
            
              # Check if this ident already exists in the lists
              # If not, find insertion point in the lists to sort by ident
              if not(ident in sigTypes):
                print("Missing ident = ", ident)
              else:
                NumSlips[sigTypes.index(ident)] += 1

      # We've processed the current file, find the 95% for each signal
      # type we are considering.
      for i in range(len(sigTypes)):
        # Make sure we have at least half a day of data
        if( len(reacqTimes[i]) > 0 and timeDelta >= int(86400.0 * 0.5) ):
          reacqTimes[i].sort()
          index95 = int(0.95 * len(reacqTimes[i]))
          print("%s %d %d %d %.3f %d %.3f" % (sigStrs[i],
                                      timeDelta,
                                      len(reacqTimes[i]),
                                      index95,
                                      reacqTimes[i][index95],
                                      NumSlips[i],
                                      float(NumSlips[i]) / float(len(reacqTimes[i])) ))

          # 95 precentile
          ninetyFive[i].append(reacqTimes[i][index95])
          # Day of year (+1 so the 1st day of the year is 1 and not 0)
          ninetyFiveTime[i].append(index + 1)
          # How many measurements for this data type
          numEpochs[i].append(len(reacqTimes[i]))
          # How many slips did we get for the data set?
          totalSlips[i].append(NumSlips[i])
          # Min to Max time stamp - crude indicator of how much of the 
          # day the test covers, note missing data in the middle of the 
          # data set is not captured with this metric.
          timeRange[i].append(timeDelta)
          # for book keeping save the date which we'll save to a summary
          # file
          dateStr[i].append(dashedDate)

    start = start + datetime.timedelta(days=1)

  if (lastRx[rx]):
    # We've processed all the data for the current receiver, plot each
    # signal type we've processed
    for i in range(len(sigTypes)):
      figure(figList[i].number)
      plot(array(ninetyFiveTime[i]),array(ninetyFive[i]),plotSymb[rx],label=rxStr[rx])

      figure(figListSlips[i].number)
      slipRate = array(totalSlips[i]).astype(float) / array(numEpochs[i]).astype(float)
      k = find(array(ninetyFiveTime[i]) >= 389)

      if(len(k) > 0):
        plot(array(ninetyFiveTime[i])[k],slipRate[k],plotSymb[rx],label=rxStr[rx])

      # Output the time history, we'll use this in the dashboard
      tmp = sigStrs[i].replace(' ','-')
      fid = open(rxStr[rx] + '-' + tmp + "-reAcq.txt",'w')
      for j in range(len(ninetyFiveTime[i])):
        fid.write("%d %.3f %d %d %s %d %.3f\n" % (ninetyFiveTime[i][j], 
                                          ninetyFive[i][j],
                                          numEpochs[i][j],
                                          timeRange[i][j],
                                          dateStr[i][j],
                                          totalSlips[i][j],
                                          float(totalSlips[i][j]) / float(numEpochs[i][j]) ))
      fid.close()

# Complete the graphs
for i in range(len(sigTypes)):
  figure(figList[i].number)
  xlabel('Time [Days from 2018-01-01]')
  ylabel('95% Time to Acquire [Sec]')
  title('Re-acquisition '+sigStrs[i] )
  grid(True)
  legend()
  tight_layout()
  # Prevent the axis numers having an offset
  axisList[i].get_xaxis().get_major_formatter().set_useOffset(False)
  axisList[i].get_yaxis().get_major_formatter().set_useOffset(False)
  show()
  # Save the data as a PNG file
  tmp = sigStrs[i].replace(' ','-')
  savefig('TTReacquire-' + tmp + '.png', dpi=150)


  figure(figListSlips[i].number)
  xlabel('Time [Days from 2018-01-01]')
  ylabel('Slips per Acquisition')
  title('Re-acquisition Slips '+sigStrs[i] )
  grid(True)
  legend()
  tight_layout()
  # Prevent the axis numers having an offset
  axisListSlips[i].get_xaxis().get_major_formatter().set_useOffset(False)
  axisListSlips[i].get_yaxis().get_major_formatter().set_useOffset(False)
  show()
  # Save the data as a PNG file
  tmp = sigStrs[i].replace(' ','-')
  savefig('TTReacquireSlips-' + tmp + '.png', dpi=150)


