#!/usr/bin/env python

#######################################################
# Copyright Trimble Inc 2021
# Purpose: For plotting out useful plots for record 35:19 from multiple T0x files
#          Usually used for reboot tests that results in multiple T0x files over
#          a long period of time
#
# Plots:
# - Individual CDF and time-series plots for acquistion times for each satellite system
# - A summary plot for average number of signals tracked over time
#
# Usage example:
# - plot_rec35-19_Meas_multi.py "*.T04"
# - plot_rec35-19_Meas_multi.py "*.T04" --label "Alloy FW 6.10"
#
#
# Contributor(s): Joshua_McLean@Trimble.com, 
#######################################################

import argparse
import os
import sys

import glob
import matplotlib
import mutils as m
import numpy as np

import mutils.RTConst as RTConst

from matplotlib.font_manager import (FontProperties)

from matplotlib.ticker import (MultipleLocator, FormatStrFormatter,
                               AutoMinorLocator)

parser = argparse.ArgumentParser(description='Generate useful .png plots for record 35:2 for multiple T0x files')
parser.add_argument('pattern', help='T0x file search pattern, i.e. "*.T04"')
parser.add_argument('--label', help='Label to include in plot(s)', default=None, type=str)
parser.add_argument('--fig', help='Output plots in Matlab fig instead of PNG', action='store_true')
parser.add_argument('--signals_max_x', help='Max x value in seconds for the signals_tracked cdf plot', default=120, type=int)
args = parser.parse_args()

# Allow running headless from the command line
if (args.fig == False):
    matplotlib.use("agg")

t0x_file_pat = args.pattern
signalsMaxX = args.signals_max_x

secondRolloverStatus = 0        # 0 = pending rollover (< 60000), 1 = approaching rollover (after > 60000)
secondOffset = 0                # to offset the second values following a gps second roll over
maxGapTime = 150                # max gap time between T04 files (to detect missed reboot)
averageSignalTrackedYMax = 50   # for plot
lastFileEndGPSTime = -1         # to record last GPS time for previous file (to calc gap duration)
elevCutOff = 20                 # use 20 degrees elevation cut off
fileNum = 0                     # file counter
uniqueTrackIds = []             # unique track ids
dataAvg = {}                    # file num > unique track id > computed average time
dataAcqCombined = {}            # unique track id > list of average times
gapDurations = []               # to calculate mean gap duration between each files
deltaTrackData = {}             # file num > delta time since reboot (seconds) > list of sv id + unique track id

svTypeList = (
    (RTConst.RT_SatType_GPS, "GPS"),
    (RTConst.RT_SatType_GLONASS, "GLONASS"),
    (RTConst.RT_SatType_GALILEO, "Galileo"),
    (RTConst.RT_SatType_BEIDOU_B1GEOPhs, "BeiDou"),
    (RTConst.RT_SatType_QZSS, "QZSS"),
    (RTConst.RT_SatType_SBAS, "SBAS"),
    (RTConst.RT_SatType_IRNSS, "IRNSS"))


for filename in sorted(glob.glob(t0x_file_pat)):
    
    #(ref,k)=m.vd2arr(filename,rec='-d35:19 --dec=1000')
    (ref,k)=m.vd2arr(filename,rec='-d27 --translate_rec35_sub19_to_rec27 --dec=1000')

    fileNum += 1
    skipFile = False
    firstLockTracker = {}

    fileData = {}

    seconds       = ref[:,k.TIME]
    svIDs         = ref[:,k.SV]
    svTypes       = ref[:,k.SAT_TYPE]   # https://wiki.eng.trimble.com/index.php/SV_System
    svFreqBands   = ref[:,k.FREQ]       # http://wiki.eng.trimble.com/index.php/SV_Frequency_Band
    svTrackTypes  = ref[:,k.TRACK]      # https://wiki.eng.trimble.com/index.php/SV_Signal
    svNOs         = ref[:,k.CNO]
    svFlags       = ref[:,k.SV_FLAGS]   # https://wiki.eng.trimble.com/index.php/Dat_file_format#Measurement_header
    svMeasFlags   = ref[:,k.MEAS1]
    svElevs       = ref[:,k.EL]

    #### Record 35:19 keys ####
    # % key.WN = 1;         %  GPS Week Number
    # % key.TIME = 2;       %  GPS Time [Sec]
    # % key.CLKBIAS = 3;    %  Clock Offset [ms]
    # % key.RAIM = 4;       %  RAIM INFO
    # % key.SV = 5; % SV ID
    # % key.FDMA = 6; % FDMA Channel (GLONASS)
    # % key.channel = 7; % Receiver Channel
    # % key.SAT_TYPE = 8; % Satellite Type
    # % key.SV_FLAGS = 9; % Satellite Flags
    # % flag.dSV_FLAGS_DELETED = 1;
    # % flag.dSV_FLAGS_UNHEALTHY = 2;
    # % flag.dSV_FLAGS_RAIM_ERR = 4;
    # % flag.dSV_FLAGS_PHS_SMOOTH = 8;
    # % flag.dSV_FLAGS_CODE_SMOOTH = 16;
    # % flag.dSV_FLAGS_VAR_CODE_SMOOTH = 32;
    # % flag.dSV_FLAGS_FAULT_MM_RAIM = 64;
    # % flag.dSV_FLAGS_FAULT_WLS_RAIM = 128;
    # % flag.dSV_FLAGS_FAULT_KF_RAIM = 256;
    # % flag.dSV_FLAGS_FAULT_SBAS_MSG = 512;
    # % flag.dSV_FLAGS_FAULT_CNAV = 1024;
    # % flag.dSV_FLAGS_FAULT_FFT_CORR = 2048;
    # % flag.dSV_FLAGS_FAULT_RTX_POS = 4096;
    # % flag.dSV_FLAGS_FAULT_SYS_RAIM = 8192;
    # % key.AZ = 10; % Azimuth [deg]
    # % key.EL = 11; % Elevation [deg]
    # % key.FREQ = 12; % blockType = Frequency Band
    # % key.TRACK = 13; % trackType = Track Type
    # % key.PHASE = 14; % Phase [Cycles]
    # % key.RANGE = 15; % Pseudorange [m]
    # % key.DOPP = 16; % Doppler [Hz]
    # % key.CNO = 17; % C/No [dBHz]
    # % key.SLIP_CNT = 18; % Cycle Slip Counter
    # % key.MEAS = 19; % Measurement Flags
    # % flag.dMEAS_PHASE_VALID = 1;
    # % flag.dMEAS_DOPP_VALID = 2;
    # % flag.dMEAS_RANGE_VALID = 4;
    # % flag.dMEAS_LOCK_POINT = 8;
    # % flag.dMEAS_POS_LOCK_POINT = 16;
    # % flag.dMEAS_XCORR_WAIT = 32;
    # % flag.dMEAS_UNHEALTHY = 64;
    # % flag.dMEAS_MULTIPATH = 128;
    # % flag.dMEAS_SLIP = 256;
    # % flag.dMEAS_MASTER_SUB = 512;
    # % flag.dMEAS_REC27 = 1024;
    # % flag.dMEAS_JAMMER = 2048;
    # % flag.dMEAS_CODE_PULLIN = 4096;
    # % flag.dMEAS_SCINT_LO = 8192;
    # % flag.dMEAS_SCINT_HI = 16384;
    # % key.ANTENNA = 20; % Antenna #
    # % key.CSTATE = 21; % CSTATE in rec35:19 - not st_iface*.h value

    ### Notes from William Lentz about which flags to use ####
    # For  RTK integer ambiguity processing use mRec35sub19_MeasFlags_LockPointKnown
    #  which implies mRec35sub19_MeasFlags_PhaseValid and mRec35sub19_MeasFlags_RangeValid is set
    #  For code-only processing then use mRec35sub19_MeasFlags_RangeValid
    #  do also check mRec35sub19_SvFlags_Unhealthy and mRec35sub19_SvFlags_RaimFault

    #### Record 27 keys ####
    # %key.TIME = 1;       %  GPS Time [Sec]
    # % key.CLKBIAS = 2;    %  Clock Offset [ms]
    # % key.GPSGLNBIAS = 3; %  GPS-GLONASS clock offset [ms]
    # % key.RAIM = 4;       %  RAIM INFO
    # % key.EPOCH = 5; % Epoch Flags (Deleted, System offset present, RAIM info present)
    # % key.SV = 6; % SV ID
    # % key.FDMA = 7; % FDMA Channel (GLONASS) otherwise the hardware channel number
    # % key.SAT_TYPE = 8; % Satellite Type
    # % key.SV_FLAGS = 9; % Satellite Flags (Deleted, Multi-path, Code smoothing, Phase smoothing, healthy, RAIM fault)
    # % key.AZ = 10; % Azimuth [deg]
    # % key.EL = 11; % Elevation [deg]
    # % key.FREQ = 12; % Frequency Band
    # % key.TRACK = 13; % Track Type
    # % key.PHASE = 14; % Phase [Cycles]
    # % key.RANGE = 15; % Pseudorange [m]
    # % key.DOPP = 16; % Doppler [Hz]
    # % key.CNO = 17; % C/No [dBHz]
    # % key.SLIP_FLG = 18; % Cycle Slip Flag
    # % key.SLIP_CNT = 19; % Cycle Slip Counter
    # % key.MEAS1 = 20; % Measurement Flags (Phase, Range, Doppler, Cycle slip, Half-cycle, Lock point known, Positive lock point)
    # % key.ANTENNA = 21; % Antenna #
    # % key.MEAS2 = 22; % Measurement Flags 2
    # % key.MEAS3 = 23; % Measurement Flags 3
    # % key.SVFLAGS2 = 24; % SV Flags2

    firstSecond = -1
    lastSecond = -1
    epochIndex = -1
    prevSecond = -1
    gapDurationSec = -1

    if fileNum not in deltaTrackData and fileNum != 1:
        deltaTrackData[fileNum] = {}

    for index in range(0, len(seconds), 1):
        second = seconds[index]

        # A rollover must have occured
        if (secondRolloverStatus == 1 and second < 100_000):
            secondRolloverStatus = 0
            secondOffset += 604800
            print ("GPS second rollover detected. secondOffset is now %d" % secondOffset)

        # we're getting close to a gps second rollover
        # let's change the status to 1 and then if the 
        # second value is less than 100_000 then
        # a rollover must have occured. We have to do it like this
        # as we don't have GPS week number in r27
        if (secondRolloverStatus == 0 and second > 600_000):
            secondRolloverStatus = 1

        second += secondOffset
        
        if (firstSecond == -1):
            firstSecond = second

        if (lastSecond  == -1):
            lastSecond = seconds[len(seconds) - 1] + secondOffset

        if (gapDurationSec == -1):
            gapDurationSec = firstSecond - lastFileEndGPSTime
            lastFileEndGPSTime = lastSecond
        
            # skip the first file as we haven't yet determined the gap duration
            if fileNum == 1:
                skipFile = True
                break

            # The gap is larger than 2 minutes. Skip and ignore
            # this check is in place in case a test was paused somehow
            # and there is a large gap between this and previous T04 file
            # So.. it is skipped to avoid affecting the stats
            if (gapDurationSec > maxGapTime):    
                print ("Skipping this file due to large gap duration! = %d sec.." % gapDurationSec)
                fileNum -= 1
                skipFile = True
                break

            gapDurations.append(gapDurationSec)    

        # fill in data gaps with previous data
        # to avoid statistics / visual issue in plots.
        # Gaps in data are possible, for example
        # after enabling diagnostics logging while
        # a receiver logging session is going.
        if (prevSecond != -1 and second - prevSecond >= 2):
            print ("%ds data gap detected at %s" % (second - prevSecond, prevSecond))

            for k in np.arange(prevSecond + 1, second, 1):
                deltaTime = (k - firstSecond) #delta time since reboot / reset
                deltaTimeSinceReset = deltaTime + gapDurationSec
                deltaTimePrev = (prevSecond - firstSecond) #delta time since reboot / reset
                deltaTimeSinceResetPrev = deltaTimePrev + gapDurationSec

                if deltaTimeSinceReset not in deltaTrackData[fileNum]:
                    deltaTrackData[fileNum][deltaTimeSinceReset] = [] 

                # clone the data from the previous second (before the gap) to avoid
                # issues when computing average as the times can be different
                deltaTrackData[fileNum][deltaTimeSinceReset].extend(deltaTrackData[fileNum][deltaTimeSinceResetPrev])

        deltaTime = (second - firstSecond) #delta time since reboot / reset
        deltaTimeSinceReset = deltaTime + gapDurationSec

        if (deltaTimeSinceReset < 0): 
            print ("err")
        
        if deltaTimeSinceReset not in deltaTrackData[fileNum]:
            deltaTrackData[fileNum][deltaTimeSinceReset] = []      

        if (second != prevSecond):
            epochIndex += 1

        prevSecond = second
        
        svFlag = int(svFlags[index])
        svMeasFlag = int(svMeasFlags[index])        
        
        if (((svFlag & 0x10) == 0) and       # b4 = set = satellite unhealthy / reset = satellite healthy
            ((svMeasFlag & 0x20) != 0)):     # b5 = Data sync/Lock Point Known
             
            svType = int(svTypes[index])
            band   = int(svFreqBands[index])
            track  = int(svTrackTypes[index])
            svID   = int(svIDs[index])

            subtype = m.get_sub_type(svType,band,track)

            uniqueTrackId = (svType << 16) + (band << 8) + track
            
            # unique SV + track combo
            uniqueSvAndTrackId = (svID << 24) + uniqueTrackId
            
            #collect tracking data for each epoch index
            if svElevs[index] >= elevCutOff:
                deltaTrackData[fileNum][deltaTimeSinceReset].append(uniqueSvAndTrackId)

            # only record this SV on this track id
            # if it haven't been locked before
            if not uniqueSvAndTrackId in firstLockTracker:
                
                # if SV is below the elevation cut off then we
                # ignore this SV for this round
                # as the delta time would be wrong
                # once the SV gets above the elevation mask
                if svElevs[index] >= elevCutOff:

                    #if (deltaTime > 60):               
                    #    print("Long lock time %d for sv %d @ %s " % (deltaTime, svID, subtype.fullstr))

                    if not uniqueTrackId in fileData:
                        fileData[uniqueTrackId] = []                 
                    
                    fileData[uniqueTrackId].append(deltaTime + gapDurationSec)

                  

                # flag this SV regardless of elev mask
                firstLockTracker[uniqueSvAndTrackId] = 1

    if skipFile:
        continue

    #store average for each track id for each file / session
    for uniqueTrackId in sorted(fileData):
        
        if not uniqueTrackId in uniqueTrackIds:
           uniqueTrackIds.append(uniqueTrackId)
         
        if not fileNum in dataAvg:
           dataAvg[fileNum] = {}
        
        if not uniqueTrackId in dataAcqCombined:
            dataAcqCombined[uniqueTrackId] = []

        dataAcqCombined[uniqueTrackId].extend(fileData[uniqueTrackId])

        dataAvg[fileNum][uniqueTrackId] = np.average(fileData[uniqueTrackId])



avgDataGapDuration = np.average(gapDurations)  #avg gap
minDataGapDuration = min(gapDurations)  #min possible gap
maxDataGapDuration = max(gapDurations)  #min possible gap

print ("T04 files gap: Min: %d, Max: %d, Avg: %d" % ( minDataGapDuration, maxDataGapDuration, avgDataGapDuration ) )

#subtract average gap duration from all durations to get the final duration
for num in sorted(dataAvg):
    for id in sorted(dataAvg[num]):
        dataAvg[num][id] -= minDataGapDuration

for id in sorted(dataAcqCombined):    
    for i in range(1, len(dataAcqCombined[id])):
        dataAcqCombined[id][i] -= minDataGapDuration


currType = -1


def plot_gap_times(numOfResets):

    fig = m.figure()
    fig.add_subplot(1, 1, 1)

    title = "T0x Gap Times"
    
    if (args.label is not None):
        title += "\n[%s]" % args.label

    m.title(title)
    m.xlabel('Reset interval')
    m.ylabel('Gap time (minutes)')
    
    x = np.arange(1, numOfResets + 1, 1)
    m.plot( x, gapDurations, "X", markersize=4, linestyle='dotted', linewidth=1, label=None)
    m.grid(True)
    m.tight_layout()
    
    pngName = 'gap_times.png'
    if (args.label is not None):
            pngName = "%s_%s" % (args.label, pngName)

    m.savefig(pngName)
    m.close(fig)


def plot_acquistion_average(filterBytype, label, numOfResets):
   
    x = np.arange(1, numOfResets + 1, 1)
    y = {}

    for fileNum in sorted(dataAvg):
        for id in sorted(uniqueTrackIds):
            svType  = id >> 16 & 0xFF
            if (svType != filterBytype):
                continue

            if id not in y:
                y[id] = []

            if id in dataAvg[fileNum]:
                y[id].append(dataAvg[fileNum][id])
            else:
                y[id].append(np.nan)            

    if (len(y) == 0):
        return


    fig = m.figure()
    fig.add_subplot(1, 1, 1)

    title = "Rec 35:19 Signal avg lock times - %s" % label
    
    if (args.label is not None):
        title += "\n[%s]" % args.label

    m.title(title)
    m.xlabel('Reset interval')
    m.ylabel('Average time to lock (s)')

    for id in sorted(uniqueTrackIds):

        svType  = id >> 16 & 0xFF
        svBand  = id >> 8 & 0xFF
        svTrack = id & 0xFF      
    
        if (svType != filterBytype):
            continue

        subtype = m.get_sub_type(svType,svBand,svTrack)  
        m.plot( x, y[id], "X", markersize=4, linestyle='dotted', linewidth=1, label=subtype.fullstr)

    m.grid(True)
    m.legend(loc='best')
    m.tight_layout()

    
    pngName = 'rec35-19_sv_acq_time_%s.png' % label

    if (args.label is not None):
            pngName = "%s_%s" % (args.label, pngName)

    m.savefig(pngName)

    m.close(fig)

# get max label width for given SV type
# used to determine string padding for CDF plot
def get_max_label_width (filterBytype):

    maxwidth = 0

    for uniqueTrackId in sorted(dataAcqCombined):

        svType  = uniqueTrackId >> 16 & 0xFF
        svBand  = uniqueTrackId >> 8 & 0xFF
        svTrack = uniqueTrackId & 0xFF

        if (svType != filterBytype):
            continue        

        subtype = m.get_sub_type(svType,svBand,svTrack)
    
        labellen = len(subtype.fullstr)

        if (labellen > maxwidth):
            maxwidth = labellen

    return maxwidth

#plot number of unique signals tracked over time for each constellation
# i.e. GPS L1 + GPS L2 counts as two
def plot_signal_tracked_over_time(numOfResets):

    fig = m.figure()
    ax = fig.add_subplot(1, 1, 1)


    for (svTypeId, svTypeName) in svTypeList:

        x = [] # delta second
        y = [] # number of tracked signals for given time

        #minValue = 0

        for i in np.arange(0, signalsMaxX):
            
            deltaTime = minDataGapDuration + i

            trackedSignals = []

            for num in deltaTrackData:

                if deltaTime in deltaTrackData[num]:

                    count = 0

                    for uniqueTrackIdWithSV in deltaTrackData[num][deltaTime]:
                        svType  = uniqueTrackIdWithSV >> 16 & 0xFF

                        #skip if it's not the satellite type we want.
                        if (svType != svTypeId):
                            continue
                        
                        count += 1

                    trackedSignals.append(count) # store number of tracked signals for this file / session

            if len(trackedSignals) > 0:
                avg = np.average(trackedSignals) # compute number of tracked signals for all files at this delta time
                x.append(deltaTime - minDataGapDuration)
                y.append(avg) 

        m.plot( x, y, "-", markersize=2, label=svTypeName)

    ax.set_ylim([-2, averageSignalTrackedYMax])
    ax.set_xlim([-2, signalsMaxX])
    
    title = "Rec 35:19 Average Signals Tracked"
    if (args.label is not None):
        title += "\n[%s]" % args.label

    m.title(title)
    m.xlabel('Time elapsed [seconds] [T0x Gap time = %d]' % minDataGapDuration)
    m.ylabel('Number of signals tracked')
    m.grid(True)
    m.legend(loc='upper right')
    m.tight_layout()

    pngName = 'rec35-19_sv_avg_signals_tracked.png'
    if (args.label is not None):
        pngName = "%s_%s" % (args.label, pngName)

    m.savefig(pngName)
    print('saved %s' % pngName)


def plot_acquistion_CDF(filterBytype, label):

    fig = m.figure()
    ax = fig.add_subplot(1, 1, 1)

    title = "Rec 35:19 %s Acquisition CDF" % label
    if (args.label is not None):
        title += "\n[%s]" % args.label

    m.title(title)
    m.xlabel('Time elapsed [seconds] [T0x Gap time = %d]' % minDataGapDuration)
    m.ylabel('CDF [%]')

    monofont = FontProperties(family="monospace")
    padding = get_max_label_width(filterBytype)

    count = 0
    for uniqueTrackId in sorted(dataAcqCombined):

        svType  = uniqueTrackId >> 16 & 0xFF
        svBand  = uniqueTrackId >> 8 & 0xFF
        svTrack = uniqueTrackId & 0xFF

        if (svType != filterBytype):
            continue

        subtype = m.get_sub_type(svType,svBand,svTrack)

        #print ("%d %d %d = %s = %d" % (svType, svBand, svTrack, subtype.fullstr, average))

        cx,cy = m.docdf( dataAcqCombined[uniqueTrackId] )

        mrk = (count*.05,.1+count*.01)
        lbl = '{:<{pad}} [Avg:{:<3.0f} 95%:{:<3.0f} Cnt:{:<3d}]'.format( \
            subtype.fullstr,
            np.round(np.average(dataAcqCombined[uniqueTrackId])),
            np.round(m.get_cdf_percents(cx,cy,[95])[0]),
            len(dataAcqCombined[uniqueTrackId]),
            pad = padding)         
        
        m.plot( cx, 100*cy, '^-', markevery=mrk, fillstyle='none', label=lbl )

        count += 1


    if count > 0:
        ax.set_xlim([-2,60]) #60s
        ax.set_ylim([0,100])
        m.grid(True)
        m.legend(loc='lower right', prop=monofont)
        m.tight_layout()

        pngName = 'rec35-19_sv_acq_cdf_%s.png' % label

        if (args.label is not None):
            pngName = "%s_%s" % (args.label, pngName)

        m.savefig(pngName)

    m.close(fig)

numResets = fileNum - 1 #we skipped the first one

plot_gap_times(numResets)
plot_signal_tracked_over_time(numResets)

#Plot CDF and Average for each satellite type
for (svTypeId, svTypeName) in svTypeList:
    plot_acquistion_average(svTypeId, svTypeName, numResets)
    plot_acquistion_CDF(svTypeId, svTypeName)