#!/usr/bin/env python

#######################################################
# plotPos.py:
#  For usage details "plotPos.py -h"
#
# Desc:
#  Given a T02 or T04 file the script will generate
#  some simple plots to show the performance along with
#  statistics to stdout.
#
#  Optionally:
#
#  Truth can be provided as either a 
#  static point or a POSPac exported file.
#  
#  The script can call t012kml and generate a KMZ file
#
#  The script can run SNPC and analyze the output of
#  the SNPC engine
#
# Copyright Trimble Inc 2017
#######################################################

import matplotlib
# Allow running headless from the command line
matplotlib.use("agg")

from numpy import *
from pylab import *
import mutils as m
import os;
import sys;
import argparse
import math
import pandas as pd

# Override pylab find() to avoid deprecation warning
# pylint: disable=function-redefined
def find(x):
    return where(x)[0]


# Get the key parameters from the T0X data array.
def getParameters(ref,key,index):
  global Week, Time, TimeLLH, used, tracked, PVTType, correctionAge
  global GPSUsd, SBASUsd, GLOUsd, GALUsd, QZSSUsd, BDSUsd, IRNSSUsd
  global ClkOffset, ClkDrift
  global GPSClkDelta, GLNClkDelta, GALClkDelta, SBASClkDelta, BDSClkDelta
  global sigmaE, sigmaN, sigmaU
  global LLH

  Week    = ref[index,key.WEEK]
  Time    = ref[index,key.TIME]
  TimeLLH = Time

  used    = ref[index,key.NUSED]
  tracked = ref[index,key.NTRK]
  PVTType = ref[index,key.FIXTYPE]
  correctionAge = ref[index,key.corrAge]

  GPSUsd   = ref[index,key.GPSUsd]
  SBASUsd  = ref[index,key.SBASUsd]
  GLOUsd   = ref[index,key.GLOUsd]
  GALUsd   = ref[index,key.GALUsd]
  QZSSUsd  = ref[index,key.QZSSUsd]
  BDSUsd   = ref[index,key.BDSUsd]
  IRNSSUsd = ref[index,key.IRNSSUsd]

  ClkOffset = ref[index,key.CLKBIAS]
  ClkDrift  = ref[index,key.CLKDRIFT]
  GPSClkDelta = ref[index,key.GPSCLKDELTA]
  GLNClkDelta = ref[index,key.GLNCLKDELTA]
  GALClkDelta = ref[index,key.GALCLKDELTA]
  SBASClkDelta = ref[index,key.SBASCLKDELTA]
  BDSClkDelta = ref[index,key.BDSCLKDELTA]
 
  sigmaN = ref[index,key.SigN]
  sigmaE = ref[index,key.SigE]
  sigmaU = ref[index,key.SigU]

  LLH = ref[index,key.LAT:key.LAT+3]


#######################################################
# Start of script
#######################################################
if __name__ == "__main__":

  ######################################################################
  # Parse arguments
  parser = argparse.ArgumentParser(description='Parse positions from a T02/T04/Hud, optionally run SNPC')
  parser.add_argument('filename', help='T02/T04 filename')
  parser.add_argument('-t','--truth_lla', help='Truth lat,lon,hgt, e.g. -t 37.3848990528,-122.0054260639,-6.260')
  parser.add_argument('-p','--pospac', help='POSPac filename to use as truth position')
  parser.add_argument('-m','--spirent', help='Spirent motion filename to use as truth position')
  parser.add_argument('-w','--winastra', help='Assume WinAstra pos position file', action="store_true")
  parser.add_argument('-k','--kmz', help='Generate KMZ file', action="store_true")
  parser.add_argument('-s','--SNPC', help='Run SNPC', action="store_true")
  parser.add_argument('-f','--flags', help='SNPC flags')
  parser.add_argument('-b','--start', help='start time tag in GPS week,secs e.g. -b 1234,12345.0 or just secs e.g. -b 12345.0')
  parser.add_argument('-e','--stop', help='stop time tag in GPS week,secs e.g. -e 1234,20000.0 or just secs e.g. -b 20000.0')
  parser.add_argument('-d','--dec', help='Decimate in milliseconds')
  parser.add_argument('-u','--hud', help='Assume TitanPlayer Hud position file',action="store_true")
  parser.add_argument('-r','--record35_16', help='Analyze the record 35:16 instead of the 29 (T02 files) or 35:2 (T04)', action="store_true")
  parser.add_argument('-o','--output_prefix', help='By default use "filename".png as the output for graphs.  With this option, choose a different output prefix.  Useful if input filename is "*.T04"')
  args = parser.parse_args()
  ######################################################################

  filename = args.filename
  if args.output_prefix:
    out_basename = args.output_prefix
  else:
    out_basename = filename

  startWeek = 0
  startSecs = 0
  if(args.start):
    tmp = args.start.split(',')
    if(len(tmp) == 1):
      startWeek = np.nan
      startSecs = float(tmp[0])
    elif(len(tmp) == 2):
      startWeek = int(tmp[0])
      startSecs = float(tmp[1])
    else:
      print("Incorrect start time")
      os.exit(1)

  stopWeek = 10000
  stopSecs = 9999999999999999
  if(args.stop):
    tmp = args.stop.split(',')
    if(len(tmp) == 1):
      stopWeek = np.nan
      stopSecs = float(tmp[0])
    elif(len(tmp) == 2):
      stopWeek = int(tmp[0])
      stopSecs = float(tmp[1])
    else:
      print("Incorrect start time")
      os.exit(1)

  if(args.hud):
    # RTK hud file
    header, data = m.load_hud(filename)
    
    print(header)
    Time = data[:,header['GPS_Sec']];
    
    Lat = data[:,header['Pos_lat']];
    Lon = data[:,header['Pos_lon']];
    Hgt = data[:,header['Pos_hgt']];
    LLH = array([Lat, Lon, Hgt]).T
    print("HUD interface needs bringing up to date")
    os.exit(1)
  elif (args.winastra):
    header, data = m.load_pos(filename)
    
    print(header)
    Time = data[:,header['Time']];
    
    Lat = data[:,header['Lat']];
    Lon = data[:,header['Lon']];
    Hgt = data[:,header['Hgt']];
    PVTType = data[:,header['FixType']];
    LLH = array([Lat, Lon, Hgt]).T
    print("WinAstra interface needs bringing up to date")
    os.exit(1)
  else:
    if(args.dec):
      decFlags = '--dec=' + str(args.dec)

    # Issues with IRNSS & BDS
    #flags = "--irnss_broadcast --comp_broadcast --sub19 --rcvr_cc -d99 -e0"
    #flags = "--sub19 --rcvr_cc -d99 -e0"
    #flags = "--sub19 --rcvr_cc -d99 -e5 --excludedSys=r,,e,q,c,i"
    #flags = "--sub19 --rcvr_cc -d99 -e5 --excludedSys=r,e,q,c,i"
    flags = "--sub19 --rcvr_cc --multi_freq_cc=10000 -w -d99 -e5"
    if(args.SNPC):
      if(args.flags):
        flags = flags + " " + arg.flags
    
      print("Running SNPC with flags: %s" % flags)
      #os.system("StingerNavPC " + filename + " " + flags)
      os.system("StingerNavPC " + filename + " " + flags)
      if(args.dec):
        (ref,k)=m.vd2arr("navAPP.t04",rec='-d35:2',opt=[decFlags])
      else:
        (ref,k)=m.vd2arr("navAPP.t04",rec='-d35:2')
    else:
      # ToDo: update viewdat so we can automatically convert record 29's to 35:2's, then
      # all we will need is something like the following
      #(ref,k)=m.vd2arr(filename,rec='-d35:2',opt=['--dec=1000 --translate_rec29_to_rec35_sub2'])

      name, file_extension = os.path.splitext(filename)
      if(file_extension == ".T02" or file_extension == ".t02"):
        os.system("t0x2t0x " + filename + " " + name + ".t04")
        filename = name + ".t04"

      if(args.record35_16):
        if(args.dec):
          (ref,k)=m.vd2arr(filename,rec='-d35:16',opt=[decFlags])
        else:
          (ref,k)=m.vd2arr(filename,rec='-d35:16')
      else:
        if(args.dec):
          (ref,k)=m.vd2arr(filename,rec='-d35:2',opt=[decFlags])
        else:
          (ref,k)=m.vd2arr(filename,rec='-d35:2')

    # We have the data, now select the segment we want
    Week    = ref[:,k.WEEK]
    Time    = ref[:,k.TIME]
    if(np.isnan(startWeek) == False) and (np.isnan(stopWeek) == False):
      i = find( (Time >= startSecs) & (Week >= startWeek) & (Week <= stopWeek) & (Time <= stopSecs) )
    elif(np.isnan(startWeek) == True) and (np.isnan(stopWeek) == False):
      i = find( (Time >= startSecs) & (Week <= stopWeek) & (Time <= stopSecs) )
    elif(np.isnan(startWeek) == False) and (np.isnan(stopWeek) == True):
      i = find( (Time >= startSecs) & (Week >= startWeek) & (Time <= stopSecs) )
    else: #(np.isnan(startWeek) == True) and (np.isnan(stopWeek) == True):
      i = find( (Time >= startSecs) & (Time <= stopSecs) )

    ref = ref[i,:]
    # Make sure time matches
    Week = ref[:,k.WEEK]
    Time = ref[:,k.TIME]

    if(args.kmz):
      name, file_extension = os.path.splitext(filename)
      if(args.SNPC):
        os.system("t012kml --ignore_err -o" + name + " navAPP.t04 " + name + ".kmz")
      else:
        os.system("t012kml --ignore_err -o" + name + " " + filename + " " + name + ".kmz")

    # Break out the parameters from the reference data
    getParameters(ref,k,range(len(ref)))
  
  dPP_TIME=0
  dPP_LAT=1

  have_truth = False
  if(args.pospac):
    pospac = m.doload(args.pospac)
    t, i_pp, i_ref = m.intersect( pospac[:,dPP_TIME], Time )
    LLH_Ref = pospac[i_pp,dPP_LAT:dPP_LAT+3]
    
    # Based on the common epochs break out the parameters from the 
    # reference data
    getParameters(ref,k,i_ref)

    have_truth = True
  elif(args.spirent):
    filename = args.spirent
    n = 2
    f = open(filename,'rb')
    data = pd.read_csv(f,skiprows=n).values

    # Now filter the data to match between the input and reference
    t, i_pp, i_ref = m.intersect( 0.001 * data[:,0], Time )
    LLH_Ref = data[i_pp,13:16]
    LLH_Ref[:,0]  = LLH_Ref[:,0] * 180.0 / math.pi
    LLH_Ref[:,1]  = LLH_Ref[:,1] * 180.0 / math.pi

    # Based on the common epochs break out the parameters from the 
    # reference data
    getParameters(ref,k,i_ref)
    have_truth = True
  elif args.truth_lla:
    lla_str = args.truth_lla
    LLH_Ref = array([float(x) for x in lla_str.split(',')])
    TimeLLH = Time
    have_truth = True
    print(LLH_Ref)
  else:
    # No truth - use the median as it protects to a first order
    # against outliers
    LLH_Ref = array([median(LLH[:,0]), median(LLH[:,1]), median(LLH[:,2])])
    TimeLLH = Time
    print(LLH_Ref)

  fig=figure()
  ax=fig.add_subplot(111)
  plot( LLH[:,1], LLH[:,0],'.')
  xlabel(r'Longitude[$^o$]')
  ylabel(r'Latitude[$^o$]')
  grid(True)
  tight_layout()
  # Prevent the axis numers having an offset
  ax.get_xaxis().get_major_formatter().set_useOffset(False)
  ax.get_yaxis().get_major_formatter().set_useOffset(False)
  show()
  # Save the data as a PNG file
  savefig(out_basename + ".LatLong.png",dpi=150)
  close()
  
  ######################################################################
  # Now plot a few things...
  ######################################################################

  colArray = ['b','r','g','m','c','k','#FF8C00','#FF99FF','#COCOCO','#CCFFCC','#FFCCCC'];
  
  DayNum = floor(TimeLLH[0]/86400)

  if(not args.hud and not args.winastra):
    figure()
    plot( Time/3600.0 - DayNum*24, used,     label='Total Used',color=colArray[0])
    plot( Time/3600.0 - DayNum*24, GPSUsd,   label='GPS',color=colArray[1])
    plot( Time/3600.0 - DayNum*24, SBASUsd,  label='SBAS',color=colArray[2])
    plot( Time/3600.0 - DayNum*24, GLOUsd,   label='GLONASS',color=colArray[3])
    plot( Time/3600.0 - DayNum*24, GALUsd,   label='GALILEO',color=colArray[4])
    plot( Time/3600.0 - DayNum*24, QZSSUsd,  label='QZSS',color=colArray[5])
    plot( Time/3600.0 - DayNum*24, BDSUsd,   label='BDS',color=colArray[6])
    plot( Time/3600.0 - DayNum*24, IRNSSUsd, label='IRNSS',color=colArray[7])

    xlabel('Time [Hour]')
    ylabel('Num Satellites')
    grid(True)
    legend(loc='best')
    tight_layout()
    show()
    # Save the data as a PNG file
    savefig(out_basename + ".SVsUsed.png",dpi=150)
    close()

    xlabel('Time [Hour]')
    grid(True)
    plot( TimeLLH/3600.0 - DayNum*24, PVTType, label=r'PVT Type',   color=colArray[0])
    plot( TimeLLH/3600.0 - DayNum*24, correctionAge, label=r'Correction Age',  color=colArray[1])
    legend(loc='best')
    tight_layout()
    ylim(-0.5,10.5)
    show()
    # Save the data as a PNG file
    savefig(out_basename + ".PVTT_and_Age.png",dpi=150)
    close()

    figure()
    plot( Time/3600.0 - DayNum*24, ClkOffset, label='ClockOffset')
    xlabel('Time [Hour]')
    ylabel('Receiver Clock Offset [ms]')
    grid(True)
    legend(loc='best')
    tight_layout()
    show()
    # Save the data as a PNG file
    savefig(out_basename + ".ClkOffset.png",dpi=150)
    close()

    figure()
    plot( Time/3600.0 - DayNum*24, ClkDrift, label='ClockDrift')
    xlabel('Time [Hour]')
    ylabel('Receiver Clock Drift [ppm]')
    grid(True)
    legend(loc='best')
    tight_layout()
    show()
    # Save the data as a PNG file
    savefig(out_basename + ".ClkDrift.png",dpi=150)
    close()


    figure()


    clks = concatenate((GPSClkDelta,GLNClkDelta, GALClkDelta, SBASClkDelta, BDSClkDelta), axis=0)
    i = find(isfinite(clks))
    if(len(i) > 0):
      maxDelta = 10 * ceil(max(clks[i])/10)
      minDelta = 10 * floor(min(clks[i])/10)

      plot( Time/3600.0 - DayNum*24, GPSClkDelta,  label='GPS')
      plot( Time/3600.0 - DayNum*24, GLNClkDelta,  label='GLN')
      plot( Time/3600.0 - DayNum*24, GALClkDelta,  label='GAL')
      plot( Time/3600.0 - DayNum*24, SBASClkDelta, label='SBAS')
      plot( Time/3600.0 - DayNum*24, BDSClkDelta,  label='BDS')
      ylim([minDelta,maxDelta])
      xlabel('Time [Hour]')
      ylabel('Reference Sys - Sys Clock Offset [ns]')
      grid(True)
      legend(loc='best')
      tight_layout()
      show()
      # Save the data as a PNG file
      savefig(out_basename + ".RefMinusSysClkDelta.png",dpi=150)
      close()

  (dE, dN, dU) = m.llh2enu( LLH, LLH_Ref, False, False)

  ######################################################################
  # Output a few statistics
  ######################################################################
  
  print("Dir: %s" % os.getcwd())
  print("East   Sigma = %.3fm" % std(dE))
  print("North  Sigma = %.3fm" % std(dN))
  print("Height Sigma = %.3fm" % std(dU))
  
  err_2d = sqrt( dE**2 + dN**2 )
  cx_2d,cy_2d = m.docdf( err_2d )
  i_50_2d = find( cy_2d >= .5 )[0]
  i_68_2d = find( cy_2d >= .68 )[0]
  i_90_2d = find( cy_2d >= .9 )[0]
  i_95_2d = find( cy_2d >= .95 )[0]
  print('Error:       50%    68%    90%    95%   100%')
  print('2D      : %6.2f %6.2f %6.2f %6.2f %6.2f' % (cx_2d[i_50_2d],cx_2d[i_68_2d],cx_2d[i_90_2d],cx_2d[i_95_2d],cx_2d[-1]))


  cx_hgt,cy_hgt = m.docdf( abs(dU) )
  i_50_hgt = find( cy_hgt >= .5 )[0]
  i_68_hgt = find( cy_hgt >= .68 )[0]
  i_90_hgt = find( cy_hgt >= .9 )[0]
  i_95_hgt = find( cy_hgt >= .95 )[0]
  print('abs(Hgt): %6.2f %6.2f %6.2f %6.2f %6.2f' % (cx_hgt[i_50_hgt],cx_hgt[i_68_hgt],cx_hgt[i_90_hgt],cx_hgt[i_95_hgt],cx_hgt[-1]))

  err_3d = sqrt( dE**2 + dN**2 + dU**2 )
  cx_3d,cy_3d = m.docdf( err_3d )
  i_50_3d = find( cy_3d >= .5 )[0]
  i_68_3d = find( cy_3d >= .68 )[0]
  i_90_3d = find( cy_3d >= .9 )[0]
  i_95_3d = find( cy_3d >= .95 )[0]
  print('3D      : %6.2f %6.2f %6.2f %6.2f %6.2f' % (cx_3d[i_50_3d],cx_3d[i_68_3d],cx_3d[i_90_3d],cx_3d[i_95_3d],cx_3d[-1]))

  ######################################################################
  # End of statistics
  ######################################################################

  # Set the ENU and 2D to approx the 95% horizontal
  Lim2D = cx_2d[i_95_2d]
  if(Lim2D < 0.9):
    Lim2D = ceil(Lim2D*10)/10
  else:
    Lim2D = ceil(Lim2D)
  LimitENU = Lim2D
  
  # Set the height to approx 95%
  LimHgt = cx_hgt[i_95_hgt]
  if(LimHgt < 0.9):
    LimHgt = ceil(LimHgt*10)/10
  else:
    LimHgt = ceil(LimHgt)
  
  figure()

  e95,e99 = np.nanpercentile(np.abs(dE),[95,99])
  n95,n99 = np.nanpercentile(np.abs(dN),[95,99])
  u95,u99 = np.nanpercentile(np.abs(dU),[95,99])

  plot( TimeLLH/3600.0 - DayNum*24, dE, label=r'East  $\sigma$=%.3fm 95%%=%.3fm 99%%=%.3fm'% (std(dE),e95,e99), color=colArray[0])
  plot( TimeLLH/3600.0 - DayNum*24, dN, label=r'North $\sigma$=%.3fm 95%%=%.3fm 99%%=%.3fm'% (std(dN),n95,n99), color=colArray[1])
  plot( TimeLLH/3600.0 - DayNum*24, dU, label=r'Up    $\sigma$=%.3fm 95%%=%.3fm 99%%=%.3fm'% (std(dU),u95,u99), color=colArray[2])


  LimitENU = 0.1
  ylim([-LimitENU,LimitENU])

  xlabel('Time [Hour]')
  ylabel('Error [m]')


  if have_truth:
    title('Relative to Truth : 2D 95%% = %.3fm' % cx_2d[i_95_2d])
  else:
    title('Relative to Median');

  grid(True)
  legend(loc='best',prop={'family': 'monospace', 'size': 6})
  tight_layout()
  show()
  # Save the data as a PNG file
  savefig(out_basename + ".ENUErr.png",dpi=300)
  close()


  figure()

  HorzErrEst = sqrt(sigmaE**2 + sigmaN**2)
  plot( TimeLLH/3600.0 - DayNum*24, HorzErrEst, label=r'2D err', color=colArray[0])
  plot( TimeLLH/3600.0 - DayNum*24, sigmaU,     label=r'Up err', color=colArray[1])
  xlabel('Time [Hour]')
  ylabel('Estimated Error [m]')
  ylim([0,0.5])

  title('Estimated Error')
  grid(True)
  legend(loc='best',prop={'family': 'monospace', 'size': 6})
  tight_layout()
  show()
  # Save the data as a PNG file
  savefig(out_basename + ".ErrEst.png",dpi=300)
  close()



  figure()
  plot( dE, dN, '.', color=colArray[0])
  #xlim([-Lim2D,Lim2D])
  #ylim([-Lim2D,Lim2D])
  xlabel('East Error [m]');
  ylabel('North Error [m]');
  grid(True)
  tight_layout()
  show()
  # Save the data as a PNG file
  savefig(out_basename + ".EN.png",dpi=150)
  close()

  figure()
  plot( cx_2d, cy_2d, label='2D Error%c68%%=%.4fm%c95%%=%.4fm%c100%%=%.4fm'%(10,cx_2d[i_68_2d],10,cx_2d[i_95_2d],10,cx_2d[-1]),   color=colArray[0])
  xlim([0,Lim2D])
  ylim([0,1])
  xlabel('Error [m]')
  ylabel('CDF')
  if have_truth:
    title('Relative to Truth');
  else:
    title('Relative to Median');
  grid(True)
  legend(loc='best')
  tight_layout()
  show()
  # Save the data as a PNG file
  savefig(out_basename + ".2DCDF.png",dpi=150)
  close()

  figure()
  plot( cx_hgt, cy_hgt, label='abs(Hgt) Error%c68%%=%.3fm%c95%%=%.3fm%c100%%=%.3fm'%(10,cx_hgt[i_68_hgt],10,cx_hgt[i_95_hgt],10,cx_hgt[-1]), color=colArray[0])
  xlim([0,0.10])
  ylim([0,1])
  xlabel('Error [m]')
  ylabel('CDF')
  if have_truth:
    title('Relative to Truth');
  else:
    title('Relative to Median');
  grid(True)
  legend(loc='best')
  tight_layout()
  show()
  # Save the data as a PNG file
  savefig(out_basename + ".HgtCDF.png",dpi=150)
  close()

  # Get a list of unique position types
  uniquePVTTypes = list(set(PVTType))
  # Set up the cmap - one row per position
  cmap = np.zeros(shape=(len(PVTType),4))
  # Set up a fallback colormap, so un-assigned different PVT
  # types are a little more obvious. Later, the PVT type is
  # scaled by the length of the colormap to provide more
  # distinct variation. cm.rainbow is pretty good too.
  # A list of supported colormaps can be generated by
  # colormaps()
  fallbackColormap = cm.jet

  # Map a colormap one row per position type. Default to black,
  # set colors for the position types we care about
  colType = np.zeros(shape=(len(m.PosTypeStrConst),3))
  colType[0,:]  = [1,0,0]   # Auto LSQ
  colType[1,:]  = [1,0,0.3] # DGNSS LSQ
  colType[2,:]  = [1,0,0.6] # SBAS LSQ
  colType[3,:]  = [1,1,0]   # RTK Float
  colType[4,:]  = [1,0.5,0] # RTK Fixed
  colType[9,:]  = [0,0,1]   # Auto KF
  colType[10,:] = [0,1,0]   # DGNSS KF
  colType[11,:] = [1,0,1] # SBAS KF
  colType[12,:] = [0.5,0.5,0.5] # RTX

  HorzErr = sqrt(dE**2 + dN**2)
  for mode in range(len(uniquePVTTypes)):
    # Loop for each unique position type in the data
    PosType = int(uniquePVTTypes[mode])
    tmp =  find(PVTType == PosType)
    
    # if there is no color, pick a color from the fallback map
    # scaling into the map by the size of the PVT types in use
    if ((colType[PosType,:] == [0.,0.,0.]).all()):
      colType[PosType,:] = fallbackColormap(int(PosType*(fallbackColormap.N/len(m.PosTypeStrConst))))[0:3]

    cmap[tmp,0] = colType[PosType,0]
    cmap[tmp,1] = colType[PosType,1]
    cmap[tmp,2] = colType[PosType,2]
    cmap[tmp,3] = 1

    PVTyield = 100.0*float(len(tmp))/float(len(PVTType))


    print(len(tmp), len(dU))
    # Hgt Error
    errTmp = np.sort(abs(dU[tmp]))
    err95  = errTmp[int(0.95 * len(tmp))]

    if(len(tmp) < 100):
      # IF we don't have many points the 95% have very little
      # confidence
      label = m.PosTypeStrConst[PosType] + " (points= " + str(len(tmp)) + ")"
    else:
      label = m.PosTypeStrConst[PosType] + " 95% = " + "{:.3f}".format(err95) + "m (points= " + str(len(tmp)) + ")"
    print("Hgt - %15s yield = %6.3f 95=%6.3f Points = %d" % (m.PosTypeStrConst[PosType], PVTyield,err95,len(tmp)))
    
    figure(1)
    # We could move the scatter command outside this loop as the
    # cmap is setup for all positions (or will when the loop
    # finishes). However, this makes label generation easier if
    # we call once per posiiton type
    scatter(Time[tmp],dU[tmp], c=cmap[tmp,:], s=5, edgecolor='none',label= label)

    # 2D Error
    errTmp = np.sort(HorzErr[tmp])
    err95  = errTmp[int(0.95 * len(tmp))]
    if(len(tmp) < 100):
      label = m.PosTypeStrConst[PosType] + " (points= " + str(len(tmp)) + ")"
    else:
      label = m.PosTypeStrConst[PosType] + " 95% = " + "{:.3f}".format(err95) + "m (points= " + str(len(tmp)) + ")"
    print("2D  - %15s yield = %6.3f 95=%6.3f Points = %d" % (m.PosTypeStrConst[PosType], PVTyield,err95,len(tmp)))

    print("Tracked Mean    = %.3f" % np.mean(tracked[tmp]))
    print("Used Mean       = %.3f" % np.mean(used[tmp]))
    print("GPS USed Mean   = %.3f" % np.mean(GPSUsd[tmp]))
    print("GLN Used Mean   = %.3f" % np.mean(GLOUsd[tmp]))
    print("GAL Used Mean   = %.3f" % np.mean(GALUsd[tmp]))
    print("BDS Used Mean   = %.3f" % np.mean(BDSUsd[tmp]))
    print("IRNSS Used Mean = %.3f" % np.mean(IRNSSUsd[tmp]))
    print("SBAS Used Mean  = %.3f" % np.mean(SBASUsd[tmp]))
    
    figure(2)
    scatter(Time[tmp],HorzErr[tmp], c=cmap[tmp,:], s=5, edgecolor='none',label= label)

    figure(3)
    win_len = int(np.minimum(500,len(tmp)))
    sd = m.moving_std(dU[tmp], win_len)
    sd = np.concatenate((sd, sd[-1]*np.ones(win_len-1)), axis=0)
    scatter(Time[tmp], sd, c=cmap[tmp,:], s=5, edgecolor='none',label= label)
  
  figure(1)
  xlabel('Time [GPS Seconds]')
  ylabel('Height Error [m]')
  title('Height Error')
  grid(True)
  legend(loc='best',markerscale=4)
  tight_layout()
  ylim(-0.20,0.20)
  show()
  # Save the data as a PNG file
  savefig(out_basename + ".HgtErr.png",dpi=150)
  close()
  
  figure(2)
  xlabel('Time [GPS Seconds]')
  ylabel('2D Error [m]')
  title('2D Error')
  grid(True)
  legend(loc='best',markerscale=4)
  tight_layout()
  ylim(-0.05,0.15)
  show()
  # Save the data as a PNG file
  savefig(out_basename + ".2DErr.png",dpi=150)
  close()

  figure(3)
  xlabel('Time [GPS Seconds]')
  ylabel('Height Std Deviation [m]')
  title('Height Std Deviation')
  grid(True)
  legend(loc='best',markerscale=4)
  tight_layout()
  ylim(-0.02,0.10)
  show()
  # Save the data as a PNG file
  savefig(out_basename + ".HgtStdDev.png",dpi=150)
  close()

