################################################################################
# Copyright Trimble 2022                                                       #
################################################################################

################################################################################
# Global Constants                                                             #
################################################################################

# Command line help text
USAGE_TEXT = """\
Takes output of Anti-Jam and produce some basic statictics for the web/ app.
Normally Anti-Jam handles calling this script, but you can call it manually 
for a couple reasons:
  1:  For manual testing, you can run:
        ./ProcessResults.py --reprocess RX#-####
      to force processing a single directory.
  2:  If you've manually modified XML files in ResultsQueue/ the
      script may not figure out that you made change, so force it with:
        ./ProcessResults.py --force
      to update the web pages.
"""

###################
# Directory paths #
###################

# Completed processing json directory
DATA_DIR = 'DataDir'
OUTPUT_DIR = 'OutputResults'
RESULTS_DIR = 'ResultsQueue'

#########################
# Executable tool paths #
#########################

# Tool for converting Septentrio SBF file to T04
SBF_TO_T0X = '../../../sbf2t0x'

# Tool for converting Novatel to T04
NOVATEL_TO_T0X = "../../../novatel2t0x"

###########
# Options #
###########

# Verbose error log
VERBOSE_LOG = False

################################################################################
# Imports                                                                      #
################################################################################

import matplotlib
import matplotlib.pyplot as plt
import multiprocessing as mp
import argparse

if __name__ == "__main__":
    # Allow running headless from the command line
    matplotlib.use("agg")
    # The default 'fork' method will randomly hang on rare occasions
    mp.set_start_method("spawn")

from pylab import *
from mutils import *
from mutils.PosPacConst import *
import xml.etree.ElementTree as ET
import json
import os
import subprocess
import shutil
import glob
import time
import sys
import csv
from datetime import datetime
import inspect
from scipy.fft import fft, fftshift
import math
import numpy as np
from statistics import mean
from ProcessResultsStorage import init_table, add_data_to_db
import ProcessResultsCalculations
import requests

run_dir = os.path.abspath(os.path.curdir)
nopi_dir = run_dir + "/../../NoPi/"
nopi_exe = "NoPi" # should be in path
nopi_di  = run_dir + "/../../NoPi_Display/NoPiDI_Main.py"

def fn():
    """
    Returns the caller's name if enabled

    TODO - consider deleting this and the inspect import

    :returns: Returns a string of the caller's name
    """
    if VERBOSE_LOG:
        frameinfo = inspect.getouterframes(inspect.currentframe())
        (frame, source, lineno, func, lines, index) = frameinfo[1]
        return func+"()::"
    else:
        return ""

def default_scenario():
    d = dotdict({
            'config_filename': "",
            'NoPI_base': None,
            'NoPI_ini': None
        })
    return d

def default_stats():
    """ Dot dictionary to save ant0, ant1, and single difference results
    eg. d.sd['GPS']['L1']['CA']['432000_432050']['cno']['mean'] = float
    """
    d = dotdict({})
    d.version = '1.0'
    d.sd = {}
    d.ant0 = {}
    d.ant1 = {}
    d.nopi = {} # FIXME
    return d

def default_combo(sat_sys_name, sat_type, band, track):
    d = dotdict({})
    d.sat_sys_name = sat_sys_name
    d.sat_type = sat_type
    d.band = band
    d.track = track
    return d

def write_table_html(html_filename='table.html', csv_filename='table.csv'):
    '''
    Create html file to show local csv file as table for any size
    Input: html_filnname = string
    '''
    content = """
<!DOCTYPE html>
<html lang='en-ca'>
  <style>
    body
    {
        font-family: 'Helvetica', 'Arial', sans-serif;
        color: #444444;
        font-size: 9pt;
        background-color: #FAFAFA;
    }    
  </style>

  <body>
    <div id='DynamicTable'></div>
  </body>

  <script>
    var filepath = 'table.csv';

    function csvToArray(str, delimiter = ',') {
      const headers = str.slice(0, str.indexOf('\\n')).split(delimiter);
      const rows = str.slice(str.indexOf('\\n') + 1).split('\\n');
      const arr = rows.map(function (row) {
        const values = row.split(delimiter);
        const el = headers.reduce(function (object, header, index) {
          object[header] = values[index];
          return object;
        }, {});
        return el;
      });
      return arr;
    }

    function addTable(arr) {

      var TableDiv = document.getElementById('DynamicTable');

      var table = document.createElement('TABLE');
      table.border = '1';
      table.style.borderCollapse  = 'collapse';

      var tableHead = document.createElement('THEAD');
      table.appendChild(tableHead);

      var tr = document.createElement('TR');
      tableHead.appendChild(tr);
      var keys = Object.keys(arr[0]);
      for (var j = 0; j < keys.length; j++) {
        var th = document.createElement('TH');
        th.width = '75';
        th.appendChild(document.createTextNode(keys[j]));
        tr.appendChild(th);
      }

      var tableBody = document.createElement('TBODY');
      table.appendChild(tableBody);

      for (var i = 0; i < arr.length; i++) {
        var keys = Object.keys(arr[i]);
        var tr = document.createElement('TR');
        tableBody.appendChild(tr);
        for (var j = 0; j < keys.length; j++) {
          var td = document.createElement('TD');
          td.width = '75';
          td.appendChild(document.createTextNode(arr[i][keys[j]]));
          tr.appendChild(td);
        }
      }
      TableDiv.appendChild(table);
    }

    function fileExists(url) {
      if(url){
          var req = new XMLHttpRequest();
          req.onreadystatechange = function(){
            if(req.status == 200 && req.readyState == 4){
              txt = req.responseText;
              if (txt.charAt(txt.length - 1) == '\\n') {txt = txt.slice(0, -1);}
              var arr = csvToArray(txt);
              addTable(arr);
            }
          };
          req.open('GET', url, false);
          req.send();
          return req.status==200;
      } else {
          return false;
      }
    }
    
    var rt = fileExists(filepath);
    // console.log('fileExists(),'+filepath+', returns,'+rt.toString());
  </script>

</html>
    """
    content = content.replace('table.csv', csv_filename)
    with open(html_filename,'w') as f:
        f.write(content)

def do_NoPI( config, data_dir, out_nopi_path, stats, interval = [] ):
    """Inputs: config = default_scenario() = info from scenario XML file
       data_dir = where are T04 receiver files?
       out_nopi_path = directory to put NoPI output in?
       interval = [t_start, t_end] if not empty, it will use -s -e to do partial postprocessing
    Output: stats = default_stats() = will fill in stats.nopi
    Run NoPi and NoPi_Display on the given receiver files
    """
    
    if out_nopi_path.find(OUTPUT_DIR + '/RX') != 0:
        print("Suspicious NoPI output directory",out_nopi_path)
        raise RuntimeError("Suspicious NoPI output directory",out_nopi_path)

    if not os.path.isdir(out_nopi_path):
        # Copy files into place for NoPI
        os.mkdir(out_nopi_path)
    
    if True:
        shutil.copy( nopi_dir+'/NoPi_Default.cfg', out_nopi_path )
        shutil.copyfile( './piline.ini', out_nopi_path+'/piline.ini' )

        # Run NoPI
        if interval == []:
            cmd = "cd %s && %s %s/.rcvr.T04 %s/.rcvr.T04 -ab0 -ar1" % (out_nopi_path,
                                               nopi_exe,
                                               os.getcwd() + '/' + data_dir,
                                               os.getcwd() + '/' + data_dir)
        else:
            cmd = "cd %s && %s %s/.rcvr.T04 %s/.rcvr.T04 -s%d -e%d -ab0 -ar1" % (out_nopi_path,
                                               nopi_exe,
                                               os.getcwd() + '/' + data_dir,
                                               os.getcwd() + '/' + data_dir,
                                               interval[0],
                                               interval[1])
        print(cmd)
        subprocess.check_call(cmd,shell=True)

        # Generate plot and html page
        # cmd = "cd %s && %s -p ." % (out_nopi_path, nopi_di)
        # FIXME
        cmd = "cd %s && /opt/anaconda/bin/python3 %s -p ." % (out_nopi_path, nopi_di)
        print(cmd)
        subprocess.check_call(cmd,shell=True)

        # zip file
        cmd = "cd %s && gzip *.mtb" % (out_nopi_path)
        print(cmd)
        subprocess.check_call(cmd,shell=True)

    num_pos = -1
    cno_mean_pos = -1
    cno_std_pos = -1
    car_std_pos = -1
    code_std_pos = -1
    combo_type_pos = -1
    valid_pos = -1
    for line in open(out_nopi_path+'/NoPiDI_Stats_Summary.txt'):
        w = line.rstrip().split()
        if line.startswith('%%'):
            valid_pos = w.index("Valid")-1
            combo_type_pos = w.index("ComboType")-1
            num_pos = w.index("DDCa_#epochs")-1
            cno_mean_pos = w.index("SDCNo_mean")-1
            cno_std_pos = w.index("SDCNo_std")-1
            car_std_pos = w.index("DDCa_std")-1
            code_std_pos = w.index("DDCd_std")-1
            continue
        if w[valid_pos] != '1':
            continue
        combo_type = w[combo_type_pos]
        stats.nopi[combo_type] = {}
        stats.nopi[combo_type]['num'] = int(w[num_pos])
        stats.nopi[combo_type]['cno_mean'] = float(w[cno_mean_pos])
        stats.nopi[combo_type]['cno_std'] = float(w[cno_std_pos])
        stats.nopi[combo_type]['car_std'] = float(w[car_std_pos])
        stats.nopi[combo_type]['code_std'] = float(w[code_std_pos])


def create_table_from_summary(path, interval=None):
  """ create frequency band vs. constellation table from NoPiDI_Stats_Summary.txt
        Input:  path = string, postprocessing location
                interval = list, [start_tow, stop_tow] for output file naming purpose
  """
  
  matplotlib.use("agg")

  fname = path+'/NoPiDI_Stats_Summary.txt'
  ComboTyp = []

  with open(fname) as f:
    reader = csv.reader(f, delimiter=' ')
    
    for row in reader:
      if row[1] == 'Combo#':
        continue
      ComboTyp.append(row[1])

  arr2d = np.loadtxt(fname, dtype='float', delimiter=' ', skiprows = 1, usecols = (8,14,18,19,20)) #read col: DDCa_mav, DDCd_mav, SDCNo_mav    
  DDCa_mav = arr2d[:,0]
  DDCd_mav = arr2d[:,1]
  SDCNo_mean = arr2d[:,2]
  SDCNo_std = arr2d[:,3]
  SDCNo_mav = arr2d[:,4]
  
  # sort frequency band is need to group by band later
  # GPS < GLONASS < GALILEO < BEIDOU
  # L < G < E < B
  # 1 < 2 < 5

  ranked_consellation = {'GPS':1,'GLONASS':2,'GALILEO':3,'BEIDOU':4}
  data = []
  for i in range(len(ComboTyp)):
    data.append( (ComboTyp[i],DDCa_mav[i],DDCd_mav[i],SDCNo_mean[i],SDCNo_std[i]) )
  data.sort(key=lambda cell: ( ranked_consellation[cell[0].split('_')[0].upper()], cell[0].split('_')[1]))

  for i in range(len(data)):
    # print(data[i])
    ComboTyp[i] = data[i][0]
    DDCa_mav[i] = data[i][1]
    DDCd_mav[i] = data[i][2]
    SDCNo_mean[i] = data[i][3]
    SDCNo_std[i] = data[i][4]

  # Create x,y-axis labels and ComboType->(x,y) dictionary
  ComboTyp_table = {}
  constellation_arr = []
  freq_band_arr = []

  for ele in ComboTyp:
    arr_str = ele.split('_')
    constellation, freq = arr_str[0], '_'.join(arr_str[1:])
    i,j = None, None
    try:
      i = freq_band_arr.index(freq)
    except:
      i = len(freq_band_arr)
      freq_band_arr.append(freq)
    try:
      j = constellation_arr.index(constellation)
    except:
      j = len(constellation_arr)
      constellation_arr.append(constellation)
    ComboTyp_table[ele] = (i,j)


  def mergemulticells_clean_edge(table, cells, hide_rest_txt=False):
    '''
    Merge N matplotlib.Table cells

    Parameters
    -----------
    table: matplotlib.Table
        the table
    cells: list[set]
        list of sets od the table coordinates
        - example: [(0,1), (0,0), (0,2)]

    Notes
    ------
    https://stackoverflow.com/a/53819765/12684122
    '''
    cells_array = [np.asarray(c) for c in cells]
    h = np.array([cells_array[i+1][0] - cells_array[i][0] for i in range(len(cells_array) - 1)])
    v = np.array([cells_array[i+1][1] - cells_array[i][1] for i in range(len(cells_array) - 1)])

    # if it's a horizontal merge, all values for `h` are 0
    if not np.any(h):
        # sort by horizontal coord
        cells = np.array(sorted(list(cells), key=lambda v: v[1]))
        edges = ['BTL'] + ['BT' for i in range(len(cells) - 2)] + ['BTR']
    elif not np.any(v):
        cells = np.array(sorted(list(cells), key=lambda h: h[0]))
        edges = ['TRL'] + ['RL' for i in range(len(cells) - 2)] + ['BRL']
    else:
        raise ValueError("Only horizontal and vertical merges allowed")

    for cell, e in zip(cells, edges):
        table[cell[0], cell[1]].visible_edges = e
        
    txts = [table[cell[0], cell[1]].get_text() for cell in cells]
   
    if hide_rest_txt:
      for txt in txts[1:]:
          txt.set_visible(False)


  def create_table_v2(path, constellation_arr, freq_band_arr, ComboTyp_table, arr, table_name, postfix='', arr_std=None):

    import pandas as pd

    merge_cell = True    
    df = pd.DataFrame()

    # define 1st col
    df[table_name] = [x[:2] for x in freq_band_arr]

    # define col from 2nd to end
    for constellation in constellation_arr:
      df[constellation] = ['' for i in range(len(freq_band_arr))]

    # assign value from sparse matrix
    for k,v in ComboTyp_table.items():
      value = arr[ComboTyp.index(k)]
      value_str = "{:.2f}".format(value)
      if arr_std is not None:
        std = arr_std[ComboTyp.index(k)]
        value_str += " +/-" + "{:.2f}".format(std)
      arr_str = k.split('_')
      constellation, freq = arr_str[0], '_'.join(arr_str[1:])
      j = constellation_arr[ constellation_arr.index( constellation ) ]
      i = v[0]
      df[j][i] = freq + ': ' + value_str

    # calculate list of cell list to merge 
    total_cell_list = []
    if merge_cell:
      cell_list = []
      arr = df[table_name]
      for i in range(1,len(arr)+1):
        if i != len(arr) and arr[i-1] == arr[i]:
          if cell_list == []:
            cell_list.append( (i,0) )
          cell_list.append( (i+1,0) )
        else:
          if cell_list != []:
            total_cell_list.append(cell_list)
            cell_list = []

    # draw table
    fig = plt.figure() #(figsize=(9,2))
    ax=fig.gca()
    ax.axis('off')
    r,c = df.shape
    # plot the real table
    table = ax.table(cellText=np.vstack([df.columns, df.values]), cellColours=[['lightgray']*c]+[['none']*c]*r, bbox=[0, 0, 1, 1])

    # need to draw here so the text positions are calculated
    fig.canvas.draw()

    # After drawing the table, merge cell with same band
    if merge_cell:
      for cell_list in total_cell_list:
        mergemulticells_clean_edge(table, cell_list, True)
        for j in range(1, 1+len(constellation_arr)):
          new_cell_list = []
          for cell in cell_list:
            new_cell_list.append( (cell[0],j) )
          mergemulticells_clean_edge(table, new_cell_list)
            
    img_name = path+'/table_'+table_name+postfix+'.png'
    plt.savefig(img_name,dpi=150)
    plt.close()

  postfix = '' if interval == None else '_'+str(int(interval[0]))+'_'+str(int(interval[1]))
  create_table_v2(path, constellation_arr, freq_band_arr, ComboTyp_table, DDCa_mav, 'DD_CaPh_MAV[mcycles]', postfix)
  create_table_v2(path, constellation_arr, freq_band_arr, ComboTyp_table, DDCd_mav, 'DD_Code_MAV[m]', postfix)
  create_table_v2(path, constellation_arr, freq_band_arr, ComboTyp_table, SDCNo_mean, 'SD_CNo_mean_std[dB-Hz]', postfix, SDCNo_std)


def gpsTimeToWnTow(time_str):
  """Calculate WN, Tow from GPS time"""

  # Unix timestamp of the GPS epoch 1980-01-06 00:00:00 UTC
  gpsEpochSeconds = 315964800
  # number of seconds in a week
  weekSeconds = 60 * 60 * 24 * 7
  # epoch time
  epoch_time = datetime(1980, 1, 6)

  gps_time = datetime.strptime(time_str.strip(),'%d-%b-%Y %H:%M:%S')
  # subtract Datetime from epoch datetime
  delta = (gps_time - epoch_time)
  
  total_sec = delta.total_seconds()
  
  wn = int(total_sec / weekSeconds)
  tow = total_sec - wn * weekSeconds

  return {'wn':wn, 'tow':tow}


def get_postprocessing_time_range(steps, freq_type, test_name_num):
    """ parse steps from sting to list of start and stop time.
        Input:  steps = string, eg. ... step 41 : {'t': 1830.0, 'device_name': 'cw', 'gpib_addr': 18, 'freq': 1600.0, 'pwr': -999, 'msg': 'off_end'} ...
                freq_type = string in ['Constant', 'Stepped', 'Customized']
                test_name_num = string, scenario id, eg. '1', '2b' etc.
        Output: list of pairs, eg. [[start_time_0, stop_time_0], [start_time_1, stop_time_1] ...]
    """
    # print("Input steps:", steps)
    dict_arr = []
    arr = steps.split('\n')
    for l in arr:
        if len(l) > 0:
            txt = l
            json_acceptable_string = txt.replace("'", "\"")
            d = json.loads(json_acceptable_string)
            # print(d)
            dict_arr.append(d)

    # print("post_arr:", str(dict_arr))
    post_arr = []
    if freq_type in ['Constant', 'Stepped']:
        for i in range(len(dict_arr)-1):
            if dict_arr[i]['msg'] == 'on' and dict_arr[i+1]['msg'] == 'off':
                if 'freq' in dict_arr[i]:
                    post_arr.append([dict_arr[i]['t'], dict_arr[i+1]['t'], dict_arr[i]['freq']])
                else:
                    post_arr.append([dict_arr[i]['t'], dict_arr[i+1]['t'], 0])
            # else:
            #     print('Skip.')
    elif freq_type == 'Customized':
        dict_arr_gpib_18 = []
        dict_arr_gpib_16 = []
        for i in range(len(dict_arr)):
            if dict_arr[i]['gpib_addr'] == 16:
                dict_arr_gpib_16.append(dict_arr[i])
            if dict_arr[i]['gpib_addr'] == 18:
                dict_arr_gpib_18.append(dict_arr[i])
        if test_name_num == '3b':
            # gpib_18 is on&off
            # gpib_16 is on&off
            for i in range(len(dict_arr)-1):
                if dict_arr[i+1]['t'] > dict_arr[i]['t']:
                    # 3b has CW at two different freq on and off
                    # [[time_start, time_end, gpib16_starting_freq, gpib18_starting_freq, gpib16_starting_power, gpib18_starting_power], ... ]
                    post_arr.append([dict_arr[i]['t'],dict_arr[i+1]['t'], dict_arr[i-1]['freq'], dict_arr[i]['freq'], dict_arr[i-1]['pwr'], dict_arr[i]['pwr']]) 
        elif test_name_num == '3c':
            # gpib_18 is constant frequency
            # gpib_16 is varying frequency
            gpib_18_const_freq = dict_arr_gpib_18[0]['freq'] # eg. {'t': 60.0, 'device_name': 'cw', 'gpib_addr': 18, 'freq': 1580.42, 'pwr': 1.2, 'msg': 'on'}
            gpib_18_const_pwr = dict_arr_gpib_18[0]['pwr']
            for i in range(len(dict_arr_gpib_16)-1):
                if dict_arr_gpib_16[i+1]['t'] > dict_arr_gpib_16[i]['t']:
                    # 3b has CW at two different freq on and off
                    post_arr.append([dict_arr_gpib_16[i]['t'],dict_arr_gpib_16[i+1]['t'], dict_arr_gpib_16[i]['freq'], gpib_18_const_freq, dict_arr_gpib_16[i]['pwr'], gpib_18_const_pwr])
        elif test_name_num == '11b':
            # gpib_18 is varying frequency and on&off
            # eg.   {'t': 360.0, 'device_name': 'cw', 'gpib_addr': 18, 'freq': 1593.259951, 'pwr': 1.5, 'msg': 'on'}
            #       {'t': 380.0, 'device_name': 'cw', 'gpib_addr': 18, 'freq': 1593.259951, 'pwr': -999.0, 'msg': 'off'}
            # gpib_16 is Not used
            for i in range(len(dict_arr_gpib_18)-1):
                if dict_arr_gpib_18[i+1]['t'] > dict_arr_gpib_18[i]['t'] and dict_arr_gpib_18[i+1]['msg'] == 'off' and dict_arr_gpib_18[i]['msg'] == 'on':
                    post_arr.append([dict_arr_gpib_18[i]['t'],dict_arr_gpib_18[i+1]['t'], 1000.0, dict_arr_gpib_18[i]['freq'], -999.0, dict_arr_gpib_18[i]['pwr']])
    else:
        # chirp, hackrf
        print(fn()+'Unknown freq_type:',freq_type)
        for i in range(len(dict_arr)-1):
            if dict_arr[i]['msg'] == 'on' and dict_arr[i+1]['msg'] == 'off':
                post_arr.append([dict_arr[i]['t'], dict_arr[i+1]['t'], dict_arr[i]['freq']])
            else:
                print('Skip.')
        
    # print("Time range for postprocessing:",str(post_arr))
    return post_arr 


def compute_protection_level(t04, ref_llh, intervals, out_plot_path, time_offset):
    """
    Split in RF_band, in satellite system/constellation
    Plot 3D error, 3D sigma, number of GPS SV vs. tow
    Input:    t04 = .T04 files
              ref_llh = a list of reference latitude[degree], longtitude[degree] and height[meter], eg. [37 + 23/60., -(122), 0.] for Auger test
              intervals = list of pairs of on and off time
              out_plot_path = OUTPUT_DIR/plots/x/ = directory where plots go
              time_offset = offset time in plot, unit [sec]
    """
    print('Calling compute_protection_level ...')
    err_3d_threshold = 4 #[cm]
    sigma_3d_threshold = 5 #[cm]

    ref_pos = r_[ref_llh[0], ref_llh[1], ref_llh[2]] #[37 + 23/60., -(122), 0.]

    # Calculate 3D error, 3D sigma
    d = vd2cls(t04, '-d35:2')
    t = d[:, d.k.TIME]
    fixtype = d[:, d.k.FIXTYPE]
    err_3d = comp_3d_dist( d[:,d.k.LAT:d.k.LAT+3], ref_pos )
    sigma_3d = sqrt( d.SigU**2 + d.SigE**2 + d.SigN**2 )

    fixtype_percentiles = np.percentile(fixtype, [95, 99, 99.9])
    err_3d_percentiles = np.percentile(err_3d, [95, 99, 99.9])
    sigma_3d_percentiles = np.percentile(sigma_3d, [95, 99, 99.9])
    
    data_mat = np.concatenate([[t], [err_3d], [sigma_3d], [fixtype]], axis=0)
    fname = out_plot_path+'/'+'RTK_check.txt'
    hdr = "Time[sec] Error_3D[m] Sigma_3D[m] FIXTYPE[-]"
    np.savetxt(fname, data_mat.T, delimiter=' ', header=hdr)

    # Calculate number of GPS SV each sec
    d2,k2 = vd2arr(t04, "-d35:19")
    ticol = d2[:, k2['TIME']]
    svcol = d2[:, k2['SV']]
    sycol = d2[:, k2['SAT_TYPE']]

    sec_arr = range(int(min(ticol)), int(max(ticol)))
    nsv_arr = [0]*len(sec_arr)

    for i in range(len(sec_arr)):
        idx = np.where( (ticol==sec_arr[i])&(sycol==RTConst.RT_SatType_GPS) )
        nsv_arr[i] = len(np.unique(svcol[idx]))

    def plot_jammer_on_off(data, intervals, y_min=None, y_max=None):
        x = []
        y = []
        plot_y_min = min(data) if y_min==None else y_min
        plot_y_max = max(data) if y_max==None else y_max
        for pair in intervals:
            for z in [pair[0],pair[0],pair[1],pair[1]]:
                x.append(z)
            for z in [plot_y_min/1.05, plot_y_max*1.05, plot_y_max*1.05, plot_y_min/1.05]:
                y.append(z)
        matplotlib.pyplot.fill( [ xi-time_offset for xi in x], y, 'c', alpha=0.3, label='Jammer on')
        plt.legend(loc=1)
    
    # Calculate RTK metrics
    # Calculate mask for the period when jammer is on
    interval_included_by_union = np.zeros((err_3d.size,), dtype=bool)
    for j,interval in enumerate(intervals):
        interval_included_by_union |= (t>=interval[0])&(t<=interval[1])

    # Calculate 95% and 100% error
    i0 = find( interval_included_by_union )
    cx3, cy3 = docdf( err_3d[i0] )
    cdf95, cdf99, cdf100 = get_cdf_percents( cx3, cy3, [95,99,100] )
    print('cdf95:', cdf95)
    print('cdf99:', cdf99)
    print('cdf100:', cdf100)

    # Calculate protection level
    i1 = find( (err_3d > .01*err_3d_threshold) & interval_included_by_union )
    j1 = find( interval_included_by_union )
    len_j1 = len(j1)
    if len_j1 == 0:
        len_j1 = 1
    msg_3d_error = "3D error events > %d cm: %.1f%% (during jamming)\n %.1f cm @ 95%%cdf (during jamming)"%(err_3d_threshold, len(i1)*100./len_j1, 100*cdf95)
    print(msg_3d_error)

    i2 = find( (sigma_3d > .01*sigma_3d_threshold) & interval_included_by_union)
    j2 = find( interval_included_by_union )
    len_j2 = len(j2)
    if len_j2 == 0:
        len_j2 = 1
    msg_3d_sigma = "3D sigma events > %d cm: %.1f%% (during jamming)"%(sigma_3d_threshold, len(i2)*100./len_j2)
    print(msg_3d_sigma)

    # Plot RTK metrics
    plt.subplots(4,1)
    ind = np.where(t[:]-time_offset > 120)

    plt.subplot(4,1,1)
    plt.plot(t-time_offset, 100*err_3d, '.', label='3D error')
    y_max_plot = 1.1*max(100*err_3d[ind])
    plot_jammer_on_off(100*err_3d, intervals, y_min=0.0, y_max=y_max_plot)
    plt.ylim([0, y_max_plot])
    plt.title(msg_3d_error)
    plt.ylabel('3D error[cm]')

    plt.subplot(4,1,2)
    plt.plot(t-time_offset, 100*sigma_3d, '.', label='3D sigma')
    y_max_plot = 1.1*max(100*sigma_3d[ind])
    plot_jammer_on_off(100*sigma_3d, intervals, y_min=0.0, y_max=y_max_plot)
    plt.ylim([0, y_max_plot])
    plt.title(msg_3d_sigma)
    plt.ylabel('3D sigma[cm]')

    plt.subplot(4,1,3)
    plt.plot([x-time_offset for x in sec_arr], nsv_arr, '.', label='#SV(GPS)')
    plot_jammer_on_off(nsv_arr, intervals)
    plt.ylabel('GPS #SV[-]')

    plt.subplot(4,1,4)
    plt.plot(t-time_offset, fixtype, '.', label='FIXTYPE')
    plot_jammer_on_off(fixtype, intervals)
    plt.ylabel('FIXTYPE[-]')

    plt.tight_layout()

    fname = out_plot_path+'/'+'RTK_check.png'
    print('Save', fname)
    save_compressed_png(fname)
    plt.close()
    return fixtype_percentiles, err_3d_percentiles, sigma_3d_percentiles


def plot_cno(d, k, intervals, out_plot_path, sv_elev_mask, time_offset):
    """
    Split in RF_band, in satellite system/constellation
    Plot CNO vs. tow for each constellation for only L1, L2 RF band
    Input:    d = 2D array loaded from .T04 files
              k = key dictionary from .T04 files  
              intervals = list of pairs of on and off time
              out_plot_path = OUTPUT_DIR/plots/x/ = directory where plots go
              sv_elev_mask = elevation mask in degree
              time_offset = offset time in plot, unit [sec]
    """
    
    
    tow = d[:, k['TIME']]
    # sv_ids = d[:, k['SV']]
    rt_ids = d[:, k['SAT_TYPE']]
    rt_types = d[:, k['FREQ']]
    track_col = d[:, k['TRACK']]
    sv_elev = d[:, k['EL']]
    cno = d[:, k['CNO']]
    ant = d[:, k['ANTENNA']]

    plot_y_min = float("inf")
    plot_y_max = -float("inf")
    
    fig = plt.figure()

    plot_list = []
    plot_list.append(['GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L1Band, RTConst.dRec27Track_CA, 'b'])
    plot_list.append(['GLONASS', RTConst.RT_SatType_GLONASS, RTConst.dRec27L1Band, RTConst.dRec27Track_CA, 'y'])
    plot_list.append(['Galileo', RTConst.RT_SatType_GALILEO, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_DP_MBOC, 'm'])
    plot_list.append(['Beidou', RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27B1Band, RTConst.dRec27Track_B1, 'g'])

    plot_list.append(['GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, RTConst.dRec27Track_E, 'b'])
    plot_list.append(['GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, RTConst.dRec27Track_CMCL, 'g'])
    plot_list.append(['GLN', RTConst.RT_SatType_GLONASS, RTConst.dRec27L2Band, RTConst.dRec27Track_CA, 'y'])

    plot_list.append(['GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_IQ, 'b'])
    plot_list.append(['Galileo', RTConst.RT_SatType_GALILEO, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_BPSK10_PD, 'm'])
    plot_list.append(['Beidou', RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27E5BBand, RTConst.dRec27Track_B2, 'g'])

    plot_list_L1 = [ plot_list[i] for i in [0,1,2,3] ]
    plot_list_L2 = [ plot_list[i] for i in [4,5,6] ]
    plot_list_L5 = [ plot_list[i] for i in [7,8,9] ]

    for desc, plot_list_i in zip(['L1','L2', 'L5'],[plot_list_L1,plot_list_L2,plot_list_L5]):

        print(fn()+"Processing RF band =", desc)

        for rt_name, rt_id, band, track, clr in plot_list_i:

            tow_vec_ant0 = np.asarray( [] )
            snr_vec_ant0 = np.asarray( [] )
                
            idx = np.where( (rt_ids==rt_id)&(rt_types==band)&(track_col==track)&(ant==0) )
            tow_vec_ant0 = np.concatenate( (tow_vec_ant0, tow[idx]), axis=0 )
            snr_vec_ant0 = np.concatenate( (snr_vec_ant0, cno[idx]), axis=0 )

            tow_vec_ant1 = np.asarray( [] )
            snr_vec_ant1 = np.asarray( [] )
            
            idx = np.where( (rt_ids==rt_id)&(rt_types==band)&(track_col==track)&(ant==1) )
            tow_vec_ant1 = np.concatenate( (tow_vec_ant1, tow[idx]), axis=0 )
            snr_vec_ant1 = np.concatenate( (snr_vec_ant1, cno[idx]), axis=0 )

            # Method 1: plot all data
            # if (len(tow_vec_ant0)>0): plt.plot( tow_vec_ant0, snr_vec_ant0, clr+'--', lw=0.8, label=rt_name+' ant0')
            # if (len(tow_vec_ant1)>0): plt.plot( tow_vec_ant1, snr_vec_ant1, clr+'-',  lw=0.8, label=rt_name+' ant1')

            # Method 2: plot data for every sec
            for ant_i in [0,1]:
                # print('DEBUG','ant_i',ant_i)
                tow_vec = tow_vec_ant0 if ant_i == 0 else tow_vec_ant1
                snr_vec = snr_vec_ant0 if ant_i == 0 else snr_vec_ant1
                
                if (len(tow_vec)>0):
                    t_arr = range(int(min(tow_vec)), int(max(tow_vec)))
                    s_arr = [0]*len(t_arr)

                    # Plot data every sec
                    for i in range(len(t_arr)):
                        idx = np.where(tow_vec==t_arr[i])
                        if len(snr_vec[idx]) > 0:
                            s_arr[i] = np.mean(snr_vec[idx])

                    s_narr = np.asarray(s_arr[:])
                    index = np.where(s_narr[:]>5)
                    
                    if len(index[0]) == 0:
                        # If no S.D. CNo > 5, eg. sometimes for L2, skip plot
                        return

                    plot_y_min = min(plot_y_min, min(s_narr[index]) )
                    plot_y_max = max(plot_y_max, max(s_narr[index]) )

                    if ant_i == 0:
                        plt.plot( [ x-time_offset for x in t_arr], s_arr, clr+'--', lw=0.8, label=rt_name+' '+desc+' ant0')
                    if ant_i == 1:
                        plt.plot( [ x-time_offset for x in t_arr], s_arr, clr+'-', lw=0.5, label=rt_name+' '+desc+' ant1')

        def plot_jammer_on_off(intervals):
            x = []
            y = []
            for pair in intervals:
                for z in [pair[0],pair[0],pair[1],pair[1]]:
                    x.append(z)
                for z in [plot_y_min/1.05, plot_y_max*1.05, plot_y_max*1.05, plot_y_min/1.05]:
                    y.append(z)
            matplotlib.pyplot.fill( [ xi-time_offset for xi in x], y, 'c', alpha=0.3, label='Post-processing')

        plot_jammer_on_off(intervals)

        plt.ylim([plot_y_min, plot_y_max])
        plt.xlabel(r'Time [sec]')
        plt.ylabel(r'CNo [dB-Hz]')
        plt.legend(loc=3)
        plt.tight_layout()
        fname = out_plot_path+'/'+'cno_vs_t_'+desc+'.png'
        save_compressed_png(fname)
        plt.close()

        print(fn()+'Save to', fname)

    return


def get_sys_name_and_sv_range(sy):
    if (sy == 0):
        sv_range=range(1,32)
        systr = 'GPS '
    elif (sy == 1):
        sv_range=range(120,169)
        systr = 'SBAS '
    elif (sy == 2):
        sv_range=range(1,24)
        systr = 'GLN '
    elif (sy == 3):
        sv_range=range(1,35)
        systr = 'Gal '
    elif (sy == 4):
        sv_range=range(193,202)
        systr = 'QZSS '
    elif (sy == 9):
        sv_range=range(1,14)
        systr = 'IRNSS '
    elif (sy == 5 or sy == 7 or sy == 10):
        sv_range=range(1,63)
        systr = 'BDS '
    else:
        sv_range=[]
        systr = '? '
    return systr, sv_range

def get_band_name(band):
    if band == RTConst.dRec27L1Band:
        return 'L1'
    elif band == RTConst.dRec27L2Band:
        return 'L2'
    elif band == RTConst.dRec27L5E5ABand:
        return 'L5E5A'
    elif band == RTConst.dRec27E5BBand:
        return 'E5B'
    elif band == RTConst.dRec27E5ABCentreBand:
        return 'E5ABCentre'
    elif band == RTConst.dRec27E6Band:
        return 'E6'
    elif band == RTConst.dRec27B1Band:
        return 'B1'
    elif band == RTConst.dRec27B3Band:
        return 'B3'
    elif band == RTConst.dRec27E1Band:
        return 'E1'
    elif band == RTConst.dRec27G3Band:
        return 'G3'
    elif band == RTConst.dRec27XPSBand:
        return 'XPS'
    elif band == RTConst.dRec27S1Band:
        return 'S1'    
    else:
        return 'Unknown'

def get_track_name(track):

    txt = """dRec27Track_CA             = 0
dRec27Track_P              = 1
dRec27Track_E              = 2
dRec27Track_CM             = 3
dRec27Track_CL             = 4
dRec27Track_CMCL           = 5
dRec27Track_I              = 6
dRec27Track_Q              = 7
dRec27Track_IQ             = 8
dRec27Track_Y              = 9
dRec27Track_M              = 10
dRec27Track_BPSK10_PD      = 11
dRec27Track_BPSK10_P       = 12
dRec27Track_BPSK10_D       = 13
dRec27Track_AltBOCCompPD   = 14
dRec27Track_AltBOCCompP    = 15
dRec27Track_AltBOCCompD    = 16
dRec27Track_AltBOCNonComp  = 17
dRec27Track_L2E2           = 18
dRec27Track_L1E            = 19
dRec27Track_BOC_1_1_DP     = 20
dRec27Track_BOC_1_1_P      = 21
dRec27Track_BOC_1_1_D      = 22
dRec27Track_BOC_1_1_DP_MBOC= 23
dRec27Track_BOC_1_1_P_MBOC = 24
dRec27Track_BOC_1_1_D_MBOC = 25
dRec27Track_B1             = 26
dRec27Track_B1_2           = 27
dRec27Track_B2             = 28
dRec27Track_B3             = 29
dRec27Track_SAIF           = 30
dRec27Track_LEX            = 31
dRec27Track_G3_PD          = 32
dRec27Track_G3_P           = 33
dRec27Track_G3_D           = 34
dRec27Track_XPS            = 35
dRec27Track_E6_PD          = 36
dRec27Track_E6_P           = 37
dRec27Track_E6_D           = 38
dRec27Track_L5S_PD         = 39
dRec27Track_L5S_D          = 40
dRec27Track_L5S_P          = 41
dRec27Track_L1CB           = 42"""

    arr = txt.split('\n')
    for ele in arr:
        arr = ele.split('=')
        if track == int(arr[1]):
            return arr[0].replace(" ", "").replace('dRec27Track_','')
    return 'Unknown'
    
def get_frequency(sat_type, band):
    '''
    Input:  sat_type = int = satellite system
            band = int = frequnecy band
    Output: frequency [Hz] of given satellite system + band frequency
    '''
    wave_frequency = None
    unknown_freq = False
    if sat_type == RTConst.RT_SatType_GPS:
        if band == RTConst.dRec27L1Band:       wave_frequency = GnssConst.L1_FREQ
        elif band == RTConst.dRec27L2Band:     wave_frequency = GnssConst.L2_FREQ
        elif band == RTConst.dRec27L5E5ABand:  wave_frequency = GnssConst.L5_FREQ
        else: unknown_freq = True
    elif sat_type == RTConst.RT_SatType_GLONASS:
        if band == RTConst.dRec27L1Band:    wave_frequency = GnssConst.GLONASS_L1_BASE_FREQ # ? GLONASS_L1_FREQ_OFFSET
        elif band == RTConst.dRec27L2Band:  wave_frequency = GnssConst.GLONASS_L2_BASE_FREQ # ?
        else: unknown_freq = True
    elif sat_type == RTConst.RT_SatType_GALILEO:
        if band == RTConst.dRec27L1Band:            wave_frequency = GnssConst.L1_FREQ
        elif band == RTConst.dRec27L5E5ABand:       wave_frequency = GnssConst.L5_FREQ
        elif band == RTConst.dRec27E5BBand:         wave_frequency = GnssConst.GALILEO_E5B_FREQUENCY
        elif band == RTConst.dRec27E5ABCentreBand:  wave_frequency = GnssConst.GALILEO_E5AltBOC_FREQUENCY
        elif band == RTConst.dRec27E6Band:          wave_frequency = GnssConst.GALILEO_E6_FREQUENCY
        else: unknown_freq = True
    elif sat_type == RTConst.RT_SatType_BEIDOU_B1GEOPhs:
        if band == RTConst.dRec27B1Band:       wave_frequency = GnssConst.BEIDOU_B1_FREQUENCY
        elif band == RTConst.dRec27E5BBand:    wave_frequency = GnssConst.GALILEO_E5B_FREQUENCY
        elif band == RTConst.dRec27B3Band:     wave_frequency = GnssConst.BEIDOU_B3_FREQUENCY
        elif band == RTConst.dRec27L1Band:     wave_frequency = GnssConst.L1_FREQ
        elif band == RTConst.dRec27L5E5ABand:  wave_frequency = GnssConst.L5_FREQ
        else: unknown_freq = True        
    else:
        unknown_freq = True
    
    if unknown_freq:
        raise RuntimeError('Unacceptable satellite system + band. Exit')

    return wave_frequency

def cal_single_diff(d, k, rec, sy, freq, sv_range, track,
                    intervals=[[]], union_intervals=False,
                    jam_start=432360, jam_end=432660,
                    plot_png=False, plot_time_offset=0,
                    out_path='./', max_svs=math.inf):
    """
    Calculate single difference on CNo, code phase, carrier phase
    Input: 
        d = 2D array loaded from .T04 files
        k = key dictionary from .T04 files  
        rec = string, eg. '-d27' or '-d35:19'
        sy = satellite type
        freq = block type (frequency)
        sv_range = list of satellite IDs, eg. [1,2,3,..32]
        track = list of track/signal type, eg. [0]
        intervals = eg. [[]] or [[t0, t1],[t2,t3], ...]
        union_intervals = True/False, union intervals or not
        plot_png = True/False
        plot_time_offset = offset time in plot, unit [sec]
        out_path = path to save image
    Returns: 
        (tow, sd_cno, sd_code[m], sd_carrier[cycles])
        tow = time of week
        sd_cno = single difference of CNo
        sd_code = single difference of code phase
        sd_carrier = single different of carrier phase
    Notes:
        - length is the same for all returns 
        - tries to remove cycle slips from carrier single diff
    """

    ticol = d[:, k['TIME']]
    svcol = d[:, k['SV']]
    sycol = d[:, k['SAT_TYPE']]
    # elcol = d[:, k['EL']]
    rtcol = d[:, k['FREQ']]
    tkcol = d[:, k['TRACK']]
    ancol = d[:, k['ANTENNA']] # antenna id
    cncol = d[:, k['CNO']]   # CNo
    cocol = d[:, k['RANGE']] # code phase
    cacol = d[:, k['PHASE']] # carrier phase    
    slcol = None # slip flag
    mscol = None # meas 
    sccol = d[:, k['SLIP_CNT']]

    if rec == '-d27':
        # The drawback is that some data will be dropped when it is invalid to be converted to rec27.
        slcol = d[:, k['SLIP_FLG']]
    elif rec == '-d35:19':
        # The drawback is to time consuming to loop through meas to calculate slip flag by using 'dMEAS_SLIP',  'dMEAS_PHASE_VALID' and 'dMEAS_LOCK_POINT'
        mscol = d[:, k['MEAS']]
    else:
        raise RuntimeError('Unknown record', rec)

    output_tow = np.asarray([])
    output_sd_cno = np.asarray([])
    output_sd_code = np.asarray([])
    output_sd_carrier = np.asarray([])

    output_ant0_cno_list_per_sv = []
    output_ant1_cno_list_per_sv = []

    output_tow_list_per_sv = []
    output_sd_cno_list_per_sv = []
    output_sd_code_list_per_sv = []
    output_sd_carrier_list_per_sv = []
    output_center_frequency = None

    output_max_code = None
    output_max_code_sv = None
    output_min_code = None
    output_min_code_sv = None

    output_max_carr = None
    output_max_carr_sv = None
    output_min_carr = None
    output_min_carr_sv = None

    n_MD = 0 # Missed detection
    n_FA = 0 # False alarm
    systr, _ = get_sys_name_and_sv_range(sy)

    interval_included_by_union = np.ones((ticol.size,), dtype=bool)
    if union_intervals:
        interval_included_by_union = np.zeros((ticol.size,), dtype=bool)
        for j,interval in enumerate(intervals):
            interval_included_by_union |= (ticol>=interval[0])&(ticol<=interval[1])
        # Reset intervals to cover all
        intervals == [[min(ticol), max(ticol)]]

    for ctr,interval in enumerate( intervals ):
        print(fn()+'  Processing time interval:', interval)
        if interval == []:
            interval = [min(ticol), max(ticol)]

        sv_ctr = 0
        sdcno_total = 0
        carrier_bias_total = 0
        bias_ctr = 0
        for sv in sv_range:
            header = '\t# sy='+str(sy)+' sv='+str(sv)+': '
            time_interval_str = '_'+str(int(interval[0]))+'_'+str(int(interval[1]))
            
            # Calculate unioned track mask
            if track == []:
                track_included = np.ones((ticol.size,), dtype=bool)
            else:
                track_included = np.zeros((ticol.size,), dtype=bool)
                for j,track_j in enumerate(track):
                    track_included |= (tkcol==track[j])
            
            # Calculate intervals mask
            if union_intervals == True:
                interval_included = interval_included_by_union
            else:
                interval_included = np.zeros((ticol.size,), dtype=bool)
                interval_included |= (ticol>=interval[0])&(ticol<=interval[1])

            ant0 = np.where( (svcol==sv)&(sycol==sy)&(ancol==0)&(rtcol==freq)&track_included&interval_included )
            ant1 = np.where( (svcol==sv)&(sycol==sy)&(ancol==1)&(rtcol==freq)&track_included&interval_included )

            if len(ant0[0])>0 and len(ant1[0])>0:
                sv_ctr += 1
                if sv_ctr > max_svs:
                    break
                # Calculate S.D. cno, code, carrier when data from both antenna is overlapped by time
                def intersect_mtlb(a, b):
                    a1, ia = np.unique(a, return_index=True)
                    b1, ib = np.unique(b, return_index=True)
                    c = np.intersect1d(a1, b1)
                    return c, ia[np.isin(a1, c)], ib[np.isin(b1, c)]

                a, b = ticol[ant0], ticol[ant1]
                c, ia, ib = intersect_mtlb(a, b)
                
                if len(c)>0:
                    # David wants ant1 - ant0
                    sdcno  = -(cncol[ant0][ia] - cncol[ant1][ib])
                    sdcode = -(cocol[ant0][ia] - cocol[ant1][ib])
                    sdcarr = -(cacol[ant0][ia] - cacol[ant1][ib])
                    
                    slip0 = slip1 = None
                    if rec == '-d27':
                        # Get slip flag for rec27 directly
                        slip0 = slcol[ant0][ia]
                        slip1 = slcol[ant1][ib]
                    else:
                        # Calculate slip flag for rec35:19
                        meas0 = mscol[ant0][ia]
                        meas1 = mscol[ant1][ib]
                        sccol0 = sccol[ant0][ia]
                        sccol1 = sccol[ant1][ib]

                        def get_slip_index(meas_arr, slip_cnt_arr, k):
                            """
                            Condition 1: If reset_ambiguity == true, slip occurs.
                            Condition 2: If slip_cnt changes, slip occurs. (Based on monitor_sub19_slip() in derived_data_utilities.c in t01)
                            Conditions overlaping each other are fine.
                            """
                            flag1 = mask1 = k['dMEAS_SLIP']
                            flag2 = mask2 = k['dMEAS_PHASE_VALID']|k['dMEAS_LOCK_POINT']
                            slip_list = [0]
                            reset_ambiguity = True
                            p_slip = {'count':-1,'flag':0}
                            for j, meas in enumerate(meas_arr.astype('int')):
                                if (meas&mask1) == flag1:
                                    reset_ambiguity = True
                                if (meas&mask2) == flag2:
                                    if reset_ambiguity:
                                        slip_list.append(j)
                                        reset_ambiguity = False

                                if p_slip['count'] == -1:
                                    p_slip['count'] = slip_cnt_arr[j]
                                if p_slip['count'] != slip_cnt_arr[j]:
                                    p_slip['flag'] = 1
                                    p_slip['count'] = slip_cnt_arr[j]
                                if p_slip['flag'] == 1:
                                    p_slip['flag'] = 0
                                    slip_list.append(j)

                            return slip_list
                        
                        slip0 = np.zeros(len(meas0))
                        slip_index0 = get_slip_index(meas0, sccol0, k)
                        slip0[slip_index0] = 1
                        slip1 = np.zeros(len(meas1))
                        slip_index1 = get_slip_index(meas1, sccol1, k)
                        slip1[slip_index1] = 1
              
                    # superimpose slip array from ant0 ant1
                    slips = slip0 + slip1
                    index_no_slip = np.where( slips[:]==0 )
                    index_slip = np.where( slips[:]==1 )

                    # FA/MD
                    MD_array = np.zeros(len(meas1))
                    FA_array = np.zeros(len(meas1))
                    for i in range(len(slip0)-1):
                        if i == 0:
                            continue

                        delta_sdcarr = abs(sdcarr[i] - sdcarr[i-1])
                        delta_t = c[i] - c[i-1]

                        if delta_t > 0.11:
                            continue
                            
                        # if ref has no cycle slips
                        if slip0[i]==0:
                            if slip1[i] == 0 and delta_sdcarr >= 0.6:
                                MD_array[i] = 1
                                n_MD += 1
                            elif slip1[i] == 1 and delta_sdcarr < 0.6:
                                FA_array[i] = 1
                                n_FA += 1
                        if slip0[i]==1:
                            if slip1[i] == 0 and delta_sdcarr < 0.6:
                                FA_array[i] = 1
                                n_FA += 1
 
                    def remove_mean_on_slips(sdcarr_in, time_in, slips_in):

                        sdcarr_out = [0] * len(time_in)
                        dt = time_in[1:] - time_in[:-1]
                        data_rate = median(dt[np.where(dt[:]!=0)])
                        # print("median(dt!=0) =", data_rate) #median(dt!=0) = 0.09999999997671694
                        tmp1 = np.where( (dt-data_rate) > 1e-6 ) 
                        tmp2 = np.where( slips_in[:] != 0 )
                        tmp3 = [0] + list(tmp1[0]) + list(tmp2[0]) + [len(time_in)-1]

                        tmp3 = list(set(tmp3)) # return unique list
                        tmp3.sort()
                        islips = tmp3
            
                        for i in range(len(islips)-1):
                            curr_slip = islips[i]
                            next_slip = islips[i+1]
                            tmp = sdcarr_in[curr_slip+1:next_slip]
                            if (len(tmp) > 0):
                                Ncycles = median(tmp) #median(round(tmp)+0.5) - 0.5
                                # print("From curr_slip i",curr_slip,'to next_slip i',next_slip,'Ncycle =',Ncycles)
                                sdcarr_out[curr_slip+1:next_slip] = sdcarr_in[curr_slip+1:next_slip] - Ncycles

                        return sdcarr_out, len(tmp1[0]), len(tmp2[0])

                    sdcarr, n_jump, n_slip = remove_mean_on_slips(sdcarr, c, slips)
                    sdcarr = np.asarray(sdcarr)

                    sdcarr_no_slip = sdcarr[index_no_slip]
                    jammed_idx = np.where( (c[index_no_slip] >= jam_start)&(c[index_no_slip] <= jam_end) )[0]

                    # Method 1
                    output_tow = np.append(output_tow, c)
                    output_sd_cno = np.append(output_sd_cno, sdcno[jammed_idx])

                    output_sd_code = np.append(output_sd_code, sdcode)
                    if len(sdcode > 0):
                        if output_max_code == None or max(sdcode) > output_max_code:
                            output_max_code = max(sdcode)
                            output_max_code_sv = sv
                        if output_min_code == None or min(sdcode) < output_min_code:
                            output_min_code = min(sdcode)
                            output_min_code_sv = sv
                        
                    output_sd_carrier = np.append(output_sd_carrier, sdcarr[jammed_idx])
                    if len(sdcarr[jammed_idx] > 0):
                        if output_max_carr == None or max(np.absolute(sdcarr[jammed_idx])) > output_max_carr:
                            output_max_carr = max(np.absolute(sdcarr[jammed_idx]))
                            output_max_carr_sv = sv
                        if output_min_carr == None or min(np.absolute(sdcarr[jammed_idx])) < output_min_carr:
                            output_min_carr = min(np.absolute(sdcarr[jammed_idx]))
                            output_min_carr_sv = sv
                    
                    # Method 2
                    output_tow_list_per_sv.append(c)
                    output_sd_cno_list_per_sv.append(sdcno)
                    output_sd_code_list_per_sv.append(sdcode)
                    output_sd_carrier_list_per_sv.append(sdcarr[jammed_idx])

                    sd_cno_sort = np.sort(-sdcno)
                    sd_cno_999 = sd_cno_sort[round(len(sd_cno_sort)*0.999)-1]
                    sd_cno_max = sd_cno_sort[-1]
                    sd_cno_diff = np.diff(sdcno)
                    sd_cno_drop_sort = np.sort(-sd_cno_diff)
                    sd_cno_drop_999 = sd_cno_drop_sort[round(len(sd_cno_drop_sort)*0.999)-1]
                    sd_cno_drop_max = sd_cno_drop_sort[-1]

                    output_center_frequency = get_frequency(sy, freq)/1.0
                    # output_sd_carrier_cy2mm.append(cycle_to_length*1000.0)

                    output_ant0_cno_list_per_sv.append(cncol[ant0][ia])
                    output_ant1_cno_list_per_sv.append(cncol[ant1][ib])

                    if plot_png:
                        
                        tt = 'S.D.cno_vs_t_'+str(systr[:-1])+'_'+get_band_name(freq)+''.join([get_track_name(x) for x in track])+'_sv_'+str(sv).zfill(2)
                        fig = plt.figure()

                        ax = fig.add_subplot(311, title=tt)
                        ax.scatter(c-plot_time_offset, sdcno, s=2, color="green", alpha=0.5, label='cno ant0-ant1')

                        textstr = 'S.D. C/No 99.9%:' + str("{0:.1f}".format(-sd_cno_999)) + '\nD.D. C/No 99.9%:' + str("{0:.1f}".format(-sd_cno_drop_999))
                        props = dict(boxstyle='round', facecolor='white', alpha=0.5)
                        ax.text(0.8, 0.01, textstr, transform=ax.transAxes, fontsize=8, verticalalignment='bottom', horizontalalignment='left', bbox=props)
                        plt.xlim( [ticol[0]-60-plot_time_offset, ticol[-1]+60-plot_time_offset] )
                        plt.ylim( min(-30,ax.get_ylim()[0]), max(5,ax.get_ylim()[1]) )
                        plt.xlabel(r'Time[sec]')
                        plt.ylabel(r'S.D. CNo[dB-Hz]')

                        y0, y1 = ax.get_ylim()
                        plt.plot( c-plot_time_offset, slip0*(y1-y0)+y0, 'm--', lw=1.5, label='slip ant0')
                        plt.plot( c-plot_time_offset, slip1*(y1-y0)+y0, 'k-', lw=0.5, label='slip ant1')

                        ax = fig.add_subplot(312)
                        ax.scatter(c-plot_time_offset, sdcode, s=2, color="blue", alpha=0.5, label='code phase ant0-ant1')

                        plt.xlim( [ticol[0]-60-plot_time_offset, ticol[-1]+60-plot_time_offset] )
                        y0, y1 = ax.get_ylim()
                        y_avg = (y0 + y1)/2
                        plt.ylim( min(y_avg-5,y0), max(y_avg+5,y1) )
                        plt.xlabel(r'Time[sec]')
                        plt.ylabel(r'S.D. Code[m]')

                        y0, y1 = ax.get_ylim()
                        plt.plot( c-plot_time_offset, slip0*(y1-y0)+y0, 'm--', lw=1.5, label='slip ant0')
                        plt.plot( c-plot_time_offset, slip1*(y1-y0)+y0, 'k-', lw=0.5, label='slip ant1')
                        
                        ax = fig.add_subplot(313)
                        # Plot all S.D. carrier phase excluding time when slip occurs

                        factor = GnssConst.LIGHT/float(output_center_frequency)*1000

                        index_no_slip = np.where( slips[:]==0 )
                        ax.scatter(c[index_no_slip]-plot_time_offset, factor*sdcarr[index_no_slip], s=2, color="red", alpha=0.5, label='S.D. carrier')

                        plt.xlim( [ticol[0]-60-plot_time_offset, ticol[-1]+60-plot_time_offset] )
                        y0, y1 = ax.get_ylim()
                        y_avg = (y0 + y1)/2
                        plt.ylim( min(y_avg-10,y0), max(y_avg+10,y1) )
                        plt.xlabel(r'Time[sec]')
                        plt.ylabel(r'S.D. Carrier[mm]')

                        y0, y1 = ax.get_ylim()
                        plt.plot( c-plot_time_offset, slip0*(y1-y0)+y0, 'm--', lw=1.5, label='slip ant0')
                        plt.plot( c-plot_time_offset, slip1*(y1-y0)+y0, 'k-', lw=0.5, label='slip ant1')

                        plt.legend(loc=1)

                        fname = out_path+'/'+tt+time_interval_str+'.png'
                        save_compressed_png(fname)
                        plt.close()

                        """
                        # FA/MD
                        if len(index_no_slip[0]) > 0:
                            index_md = np.where( abs(sdcarr[index_no_slip]) >= 0.75 )
                            n_MD += len(index_md[0])
                        index_slip = np.where( slips[:]==1 )
                        if len(index_slip[0]) > 0:
                            index_fa = np.where(abs(sdcarr[index_slip]) < 0.75)
                            n_FA += len(index_fa[0])
                        """
    # Plot cycle slips missed detection and false alarm
    if plot_png:
        tt = str(systr[:-1])+' '+get_band_name(freq)+'- N of Cycle Slips Missed Detection/False Alarm'
        fig = plt.figure()
        ax = fig.add_subplot(111, title=tt)
        xaxis = np.arange(1, 3)
        ax.bar(1, n_MD, label='Missed Detection')
        ax.bar(2, n_FA, label='False Alarm')
        ax.set_xticks(xaxis, ['MD','FA'])
        ax.legend(['MD','FA'])
        ax.set_ylabel('# of counts')
        max_y = max([n_MD, n_FA])
        if max_y == 0:
            max_y = 10
        ax.set_ylim(0, max_y)
        fname = out_path+'/'+str(systr[:-1])+'_'+get_band_name(freq)+'_Cycle_Slips_MD_FA.png'
        save_compressed_png(fname)
        plt.close()
        
    # Method 1: eg. Sum(cno[i]) / Total number of measuremnt, cno[i] is cno of epoch.
    if len(output_sd_cno) <= 0:
        raise RuntimeError('empty sd_cno')
      
    sd_cno_m = output_sd_cno.mean()
    sd_cno_std = output_sd_cno.std()
    sd_cno_mad = np.mean(np.absolute( output_sd_cno - output_sd_cno.mean() ))
    sd_cno_sort = np.sort(-output_sd_cno)
    sd_cno_999 = sd_cno_sort[round(len(sd_cno_sort)*0.999)-1]
    sd_cno_max = sd_cno_sort[-1]
    sd_cno_diff = np.diff(output_sd_cno)
    sd_cno_drop_sort = np.sort(-sd_cno_diff)
    sd_cno_drop_999 = sd_cno_drop_sort[round(len(sd_cno_drop_sort)*0.999)-1]
    sd_cno_drop_max = sd_cno_drop_sort[-1]

    sd_code_m = output_sd_code.mean()
    sd_code_std = output_sd_code.std()
    sd_code_mad = np.mean(np.absolute( output_sd_code - output_sd_code.mean() ))

    sd_carrier_m = output_sd_carrier.mean()
    sd_carrier_std = output_sd_carrier.std()
    sd_carrier_mad = np.mean(np.absolute( output_sd_carrier ))

    # Method 2: eg. Sum(cno[i]) / Total number of SV, cno[i] is the mean of cno of SV[i].
    output_ = output_sd_cno_list_per_sv
    arr = np.asarray([ np.nan if xs.size == 0 else np.mean(xs) for xs in output_])
    arr = arr[np.logical_not(np.isnan(arr))] # Remove nan element
    sd_cno_m__by_sv = np.mean(arr)
    sd_cno_std__by_sv = np.std(arr)
    sd_cno_mad__by_sv = np.mean(np.absolute( arr - arr.mean() ))

    output_ = output_sd_code_list_per_sv
    arr = np.asarray([ np.nan if xs.size == 0 else np.mean(xs) for xs in output_])
    arr = arr[np.logical_not(np.isnan(arr))] # Remove nan element
    sd_code_m__by_sv = np.mean(arr)
    sd_code_std__by_sv = np.std(arr)
    sd_code_mad__by_sv = np.mean(np.absolute( arr - arr.mean() ))

    output_ = output_sd_carrier_list_per_sv
    arr = np.asarray([ np.nan if xs.size == 0 else np.mean(xs) for xs in output_])
    arr = arr[np.logical_not(np.isnan(arr))] # Remove nan element
    sd_carrier_m__by_sv = np.mean(arr)
    sd_carrier_std__by_sv = np.std(arr)
    sd_carrier_mad__by_sv = np.mean(np.absolute( arr ))

    # Save Ant0 and Ant1 cno
    output_ = output_ant0_cno_list_per_sv
    arr = np.asarray([ np.nan if xs.size == 0 else np.mean(xs) for xs in output_])
    arr = arr[np.logical_not(np.isnan(arr))] # Remove nan element
    ant0_cno_m__by_sv = np.mean(arr)
    ant0_cno_std__by_sv = np.std(arr)

    output_ = output_ant1_cno_list_per_sv
    arr = np.asarray([ np.nan if xs.size == 0 else np.mean(xs) for xs in output_])
    arr = arr[np.logical_not(np.isnan(arr))] # Remove nan element
    ant1_cno_m__by_sv = np.mean(arr)
    ant1_cno_std__by_sv = np.std(arr)

    d = dotdict({})
    d.sd_n = len(output_sd_cno)
    d.sd_cno_m = sd_cno_m
    d.sd_cno_std = sd_cno_std
    d.sd_cno_mad = sd_cno_mad
    d.sd_cno_999 = -sd_cno_999
    d.sd_cno_max = -sd_cno_max
    d.sd_cno_drop_999 = -sd_cno_drop_999
    d.sd_cno_drop_max = -sd_cno_drop_max
    d.sd_code_m = sd_code_m
    d.sd_code_std = sd_code_std
    d.sd_code_mad = sd_code_mad
    d.sd_carrier_m = sd_carrier_m
    d.sd_carrier_std = sd_carrier_std
    d.sd_carrier_mad = sd_carrier_mad

    d.sd_cno_m_by_sv = sd_cno_m__by_sv
    d.sd_cno_std_by_sv = sd_cno_std__by_sv
    d.sd_cno_mad_by_sv = sd_cno_mad__by_sv
    d.sd_code_m_by_sv = sd_code_m__by_sv
    d.sd_code_std_by_sv = sd_code_std__by_sv
    d.sd_code_mad_by_sv = sd_code_mad__by_sv
    d.sd_carrier_m_by_sv = sd_carrier_m__by_sv
    d.sd_carrier_std_by_sv = sd_carrier_std__by_sv
    d.sd_carrier_mad_by_sv = sd_carrier_mad__by_sv
    
    d.center_frequency = output_center_frequency

    d.ant0_n = sum(len(x) for x in output_ant0_cno_list_per_sv)
    d.ant0_cno_m_by_sv = ant0_cno_m__by_sv
    d.ant0_cno_std_by_sv = ant0_cno_std__by_sv

    d.ant1_n = sum(len(x) for x in output_ant1_cno_list_per_sv)
    d.ant1_cno_m_by_sv = ant1_cno_m__by_sv
    d.ant1_cno_std_by_sv = ant1_cno_std__by_sv

    d.sv_count = sv_ctr

    d.max_code = output_max_code
    d.max_code_sv = output_max_code_sv
    d.min_code = output_min_code
    d.min_code_sv = output_min_code_sv

    d.max_carr = output_max_carr
    d.max_carr_sv = output_max_carr_sv
    d.min_carr = output_min_carr
    d.min_carr_sv = output_min_carr_sv

    return d


def loadBBSample(sbf_path = './', sbf_filename = 'log_rx0.sbf'):
    '''
    Input:
        sbf_filename = septentrio sbf file path
    Output: 
        2d array of all BBSample data

    1. Convert .sbf to txt by >> sbf2t0x log_rx0.sbf -b > bb_sample.txt
    2. Load sbf txt to create 2d array
    '''
    
    bb_sample_filename = 'bb_sample.txt'
    
    # Run sbf2t0x
    cmd = SBF_TO_T0X + ' ' + sbf_path + sbf_filename + ' -b > ' + sbf_path + bb_sample_filename
    print('cmd:', cmd)
    subprocess.check_call(cmd,shell=True)

    # Load BBSample txt
    #     BBS meas_epoch_tow blk_info_tow ant LOFreq(Frequency of the local oscillator) i q
    # eg. BBS 432059200 432059001 1 1231000000 -7 8
    #     BBS ...
    #     BBS ...
    # Load txt and create 2d arr, eg.
    # data = [[meas_tow, blk_tow, ant_id, lo_freq, q_arr_string, i_arr_string],
    #       ...]

    f = open(sbf_path+bb_sample_filename, 'r')
    
    data = []
    n_bbsample_block = 0
    new_block = False
    prev_block = {}

    for li in f:
        if li == 'BBSamples_Block\n':
            n_bbsample_block += 1
            new_block = True
            if prev_block:
                data.append( prev_block['header'] + [','.join(prev_block['q'])] + [','.join(prev_block['i'])] )
                prev_block = {}
        else:
            arr = li.replace('\n','').split(' ')
            if arr[0] == 'BBS':                
                if new_block:
                    prev_block['header'] = arr[1:-2]
                    prev_block['i'] = [arr[-2]]
                    prev_block['q'] = [arr[-1]]
                else:
                    prev_block['i'].append(arr[-2])
                    prev_block['q'].append(arr[-1])
            new_block = False

    print('n_bbsample_block =', n_bbsample_block)
    data = np.array(data)
    
    f.close()
    os.remove( sbf_path + bb_sample_filename )
    return data


def plot_sept_fft_after_input_tow(sbf_path, sbf_filename, out_plot_path, tow = 432501.501, ant_id = 1, lofreq = 1584):
    '''
    Input:
        sbf_path        = full path of .sbf file
        sbf_filename    = file name of .sbf
        out_plot_path   = path to save fig
        tow     =   float,  The closest tow(from block info) after input value will be found and be used. eg. 432000.501
        ant_id  =   int,    Antenna ID. eg. 0 or 1
        lofreq  =   int,    Frequency of the local oscillator MHz. eg. 1188, 1231, 1541, 1584
    Output:
        One block of BBsample with desired antenna id, center freqency and closest tow after
    '''
    
    data = loadBBSample(sbf_path, sbf_filename)

    index = np.where( (data[:,2] == str(ant_id)) & (data[:,3] == str(int(lofreq*1000*1000))) )
    rows = data[index]

    if len(rows) == 0:
        print('No data found. Exit.')
        return
    
    row = None
    for row_i in rows:
        if int(row_i[1]) > tow*1000: #(data[:,1] == str(int(tow*1000)))
            row = row_i
            break
    
    if row is None:
        print('No data found after input tow %f. Exit.' % tow)
        return

    # Load LoFreq
    epch_tow = int(row[0]) # msec
    blk_tow = int(row[1]) # msec
    center_freq = int(row[3])/1000000 # MHz

    # Load Q, I
    septq = [int(x) for x in (row[-2]).split(',')]
    septi = [int(x) for x in (row[-1]).split(',')]

    # Create tukey window
    tukeywn = []
    for i in range(0,50):
        tmp = (1 - math.cos(math.pi * (i + 0.5) / 50)) / 2
        tukeywn.append(tmp)

    window = [1]*2000
    window[:50] = tukeywn
    window[-50:] = tukeywn[::-1]

    d = []
    for i in range(2000):
        d.append( complex(septi[i],septq[i]) )

    d = np.asarray(d)
    window = np.asarray(window)
    A = fft(d*window, 2048) / (len(window)/2.0) # If it is larger, the input is padded with zeros.
    sampling_freqency = 60 # [MHz] No other value is seen by septentrio
    freq = np.linspace(center_freq-sampling_freqency/2, center_freq+sampling_freqency/2, len(A))
    response = 20 * np.log10(np.abs(fftshift(A)))

    plt.figure()
    plt.plot(freq, response)
    plt.grid()
    plt.title("TOW[msec]=%d, ANT_ID=%d, Freq[MHz]=%d" % (blk_tow, ant_id, lofreq))
    plt.ylabel("Magnitude [dB]")
    plt.xlabel("Frequency [MHz]")

    # plt.savefig('Spectrum.png')
    fname = out_plot_path+'/'+'FFT.png'
    save_compressed_png(fname)
    plt.close()
    print(fn()+'Save to', fname)

    return row


# None interactive function that extracts FFT data from a .sbf and plots it
def plot_fft_sbf_data_png(sbf_path, sbf_filename,fig_title,out_fileroot=None,minpower=30,maxpower=45,dpi=300,antenna_num=1):
    '''
    Plot 2D FFT from .sbf
    '''
    centerFreqs = [1188, 1231, 1541, 1584]
    bandLabels = ['L5','L2','B1','L1']

    data = loadBBSample(sbf_path, sbf_filename)

    for band, lofreq in zip(range(len(centerFreqs)), centerFreqs):
        
        index = np.where( (data[:,2] == str(antenna_num)) & (data[:,3] == str(int(lofreq*1000*1000))) )
        rows = data[index]

        if len(rows) == 0:
            print('No data found at %s. Continue.' % bandLabels[band])
            continue
        
        freq_mhz = None
        sec = []
        d0 = []

        for row in rows:

            # Load LoFreq
            epch_tow = int(row[0]) # msec
            blk_tow = int(row[1]) # msec
            center_freq = int(row[3])/1000000 # MHz

            # Load Q, I
            septq = [int(x) for x in (row[-2]).split(',')]
            septi = [int(x) for x in (row[-1]).split(',')]

            # Create tukey window
            tukeywn = []
            for i in range(0,50):
                tmp = (1 - math.cos(math.pi * (i + 0.5) / 50)) / 2
                tukeywn.append(tmp)

            window = [1]*2000
            window[:50] = tukeywn
            window[-50:] = tukeywn[::-1]

            d = []
            for i in range(2000):
                d.append( complex(septi[i],septq[i]) )

            d = np.asarray(d)
            window = np.asarray(window)
            A = fft(d*window, 2048) / (len(window)/2.0) # If it is larger, the input is padded with zeros.
            sampling_freqency = 60 # [MHz] No other value is seen by septentrio
            freq = np.linspace(center_freq-sampling_freqency/2, center_freq+sampling_freqency/2, len(A))
            response = 20 * np.log10(np.abs(fftshift(A)))

            freq_mhz = freq
            sec.append(int(blk_tow/1000))
            d0.append(response)


        if(out_fileroot == None):
            # Use the input name
            png_filename = sbf_filename[:-3] + '-' + bandLabels[band] + '.png'
        else:
            png_filename = out_fileroot + '-' + bandLabels[band] + '.png'

        fig,ax=subplots()
        X,Y=meshgrid(freq_mhz, sec)
        if 'cb' in dir(ax):
            ax.cb.remove()

        plt_im = ax.pcolormesh(X,Y,d0,vmin=minpower,vmax=maxpower,cmap='rainbow')
        ax.cb = colorbar(plt_im, ax=ax, orientation='vertical')
        ax.cb.set_label('Mean Power [dB]')

        ax.set_xlabel('Freq [MHz]')
        ax.set_ylabel('Time [Sec]')
        title(fig_title + ' - ' + bandLabels[band])
        tight_layout()
        print('Save', png_filename)
        save_compressed_png(png_filename, dpi=dpi)
        plt.close()
  

def generate_results_summary(data, rx_run, test_name_num, notes):
    """
    Generates a human readable results summary of the data. Output is saved 
    under DataDir/RX#-####/humanReadable.txt (or humanReadableOutlier if an
    outlier was detected). A copy of the summary is printed to console.

    :param data: The value from stats.sd
    :returns: None
    """
    summary_txt = (f"{rx_run}\n"
                   f"Test Scenario #{test_name_num}\n"
                   f"desc {'temp'}\n"
                   f"Notes: {notes}\n")

    has_outlier = False
    for sat_type in data:
        summary_txt += (f"\nConstellation:  {sat_type}\n")
        for band in data[sat_type]:
            summary_txt += (f"{'':4}Band:  {band}\n")
            for track in data[sat_type][band]:
                summary_txt += (f"{'':8}Track:  {track}\n")

                entry = list(data[sat_type][band][track].items())[0][1]

                max_code_txt = ""
                min_code_txt = ""
                max_carr_txt = ""
                ant0_slip_txt = ""
                ant1_slip_txt = ""
                if entry['max_code'] > entry['code']['mean'] + 2:
                    max_code_txt = f"{'':8}*** EXCEEDS +2 METERS FROM MEAN ***"
                    has_outlier = True
                if entry['min_code'] < entry['code']['mean'] - 2:
                    min_code_txt = f"{'':8}*** EXCEEDS -2 METERS FROM MEAN ***"
                    has_outlier = True
                if entry['max_carr'] > 0.03:
                    max_carr_txt = f"{'':3}*** EXCEEDS +0.03 CYCLE SLIPS ***"
                    has_outlier = True 
                if entry['ant0_slips'] / entry['ant1_slips'] >= 2:
                    ant0_slip_txt = f"{'':19}*** EXCEEDS 2x SLIPS ON ANTENNA #0 ***"
                    has_outlier = True
                if entry['ant1_slips'] / entry['ant0_slips'] >= 2:
                    ant1_slip_txt = f"{'':19}*** EXCEEDS 2x SLIPS ON ANTENNA #0 ***"
                    has_outlier = True

                summary_txt += (
                    f"{'':12}Number of SVs:  {entry['sv_count']}\n"
                    f"{'':12}Mean single-diff C/N0:   {entry['cno']['mean']:+5.1f} dB\n"
                    f"{'':12}Mean single-diff code:   {entry['code']['mean']:+5.2f} m\n"
                    f"{'':28}Max:     {entry['max_code']:+5.2f} m on SV{entry['max_code_sv']:2}{max_code_txt}\n"
                    f"{'':28}Min:     {entry['min_code']:+5.2f} m on SV{entry['min_code_sv']:2}{min_code_txt}\n"
                    f"{'':12}Mean single-diff carr:   {entry['carrier']['mad']:5.3f} cycles\n"
                    f"{'':28}Max:     {entry['max_carr']:5.3f} cycles on SV{entry['max_carr_sv']:2}{max_carr_txt}\n"
                    f"{'':28}Min:     {entry['min_carr']:5.3f} cycles on SV{entry['min_carr_sv']}\n"
                    f"{'':12}Cycle slips antenna #0:  {int(entry['ant0_slips']):4}{ant0_slip_txt}\n"
                    f"{'':24}antenna #1:  {int(entry['ant1_slips']):4}{ant1_slip_txt}\n")

    print(summary_txt)
    # if it contains outliers, save with Outlier
    if has_outlier:
        output_path = f"DataDir/{rx_run}/humanReadableOutlier.txt"
    else:
        output_path = f"DataDir/{rx_run}/humanReadable.txt"
    with open(output_path, "w") as f: 
        f.write(summary_txt)


def analyze_single_rx( tree, rx_plus_run ):
    """
    Analyzes a single run for a single receiver

    If the scenario config file say so, run NoPi on data.
    Compare position residuals to truth.

    :param tree: ET.ElementTree(RESULTS_DIR/rx_plus_run.xml)
    :param rx_plus_run: Receiver and run combo in the format: RX#-####

    :returns: info on RX/config file and data directory, e.g.:
              (DataDir/latest-RX9-2018-10-02-tree100, DataDir/RX9-567)
    """
    print("This is analyze_single_rx()")
    
    in_file_name  = f'{RESULTS_DIR}/{rx_plus_run}.xml'
    data_dir      = f'{DATA_DIR}/{rx_plus_run}'
    out_nopi_path = f'{OUTPUT_DIR}/{rx_plus_run}'
    out_plot_path = f'{OUTPUT_DIR}/plots/{rx_plus_run}'

    rx_name       = rx_plus_run.split('-')[0]

    output = None

    if tree.find('skipPost') is not None:
        print("Skip processing", in_file_name)
        return None

    data_dir_bytes = sum([d.stat().st_size for d in os.scandir(data_dir) 
                                           if d.is_file()])
    if data_dir_bytes / 1e6 > 500.0:
        print(f"Skip processing {in_file_name} because it is huge")
        return None

    # Process any new data
    print("Processing", in_file_name)

    # TODO consider removing to save space? This gets config at time of processing, not run
    shutil.copy('config.xml', data_dir)

    tmp_all_T04 = data_dir + '/.rcvr.T04'

    # convert or merge T04
    if tree.find('septentrio') is not None:
        print("This is septentrio data.", rx_name)
        tmp_all_T04 = data_dir + '/rcvr.T04'

        # Run sbf2t0x
        rx_name_lower = rx_name.replace("RX", "rx")
        cmd = (f'{SBF_TO_T0X} '
               f'{os.getcwd()}/{data_dir}/log_{rx_name_lower}.sbf '
               f'{os.getcwd()}/{data_dir}/rcvr.T04')
        print(cmd)
        subprocess.check_call(cmd, shell=True)

    elif tree.find('novatel') is not None:
        print("This is novatel data.", rx_name)
        tmp_all_T04 = data_dir + '/rcvr.T04'

        # Run novatel2t0x 
        cmd = (f'{NOVATEL_TO_T0X} '
               f'{os.getcwd()}/{data_dir}/{rx_name}_obs.txt '
               f'{os.getcwd()}/{data_dir}/rcvr.T04')
        print(cmd)
        subprocess.check_call(cmd, shell=True)

    else:
        with open(tmp_all_T04, "wb") as f_rcvr:
            for fin_name in sorted( glob.glob( data_dir + '/*.T0?') ):
                with open(fin_name, 'rb') as fin:
                    shutil.copyfileobj( fin, f_rcvr )

    # Find test start time
    el = tree.find('StartTime')
    if el is not None and el.text:
        start_date = el.text
    else:
        start_date = time.strftime( "%Y-%m-%d %H:%M:%S",
                                    time.localtime(os.path.getmtime(in_file_name)) )

    # Find scenario start time tow
    el = tree.find('ScnStartTime')
    if el is not None and el.text:
        d = gpsTimeToWnTow(el.text) # eg. el.text = '02-Oct-2020 00:00:00'
        scn_start_time_tow = d['tow']
    else:
        scn_start_time_tow = None

    # Find notes
    notes = tree.find('notes').text if tree.find('notes') is not None else ""
    detailed_notes = "" # in run_para below

    # Find 
    # - anti jam test name: 0 is unknown, the rest test name is 1, 2a, 2b, 3a, 3b, 4 etc
    # - test parameter is default or not
    test_name_num = '0'
    test_para_default = False
    test_do_rtk = False
    run_para = tree.find('run_para')
    if run_para is not None:
        txt = run_para.text
        d = json.loads(txt)
        if 'scenario' in d:
            test_name_num = d['scenario']
        if 'default_scenario_parameters' in d:
            test_para_default = d['default_scenario_parameters']
        if 'do_rtk' in d:
            test_do_rtk = True
        if 'detailed_notes' in d:
            detailed_notes = d['detailed_notes']

    # Find frequency type
    el = tree.find('freq_type')
    if el is not None and el.text:
        freq_type = el.text
    else:
        freq_type = 'Unknown'
        
    if freq_type not in ['Constant', 'Stepped', 'Customized']:
        print(f"Unknown frequency type: {freq_type}. Skip.")
        return

    # Find receiver's filter type
    filter_type = 'Unknown'
    el = tree.find('filter_type')
    if el is not None and el.text:
        filter_type = el.text
        print('Receiver filter type =', filter_type)

    if os.path.isdir( out_plot_path ):
        shutil.rmtree( out_plot_path )
    os.makedirs( out_plot_path )

    ###################
    # Post-processing #
    ###################

    stats = default_stats()
    do_nopi = False
    config = None

    if freq_type is None or scn_start_time_tow is None:
        print("Old run without frequency type or scenario start time. Skip.")
        return None
  
    ##################
    # Find intervals #
    ##################
    print("Process scn_start_time_tow:", scn_start_time_tow)
    print(data_dir + "/steps.txt")
    with open(data_dir + '/steps.txt', 'r') as f:  
        data = f.read()

    trimmed_sec = 1
    intervals = []
    intervals_freq = []
    intervals_power = ['']
    try:
        intervals_jamming = get_postprocessing_time_range(data, freq_type, test_name_num)
        intervals = [[intervals_jamming[0][0], intervals_jamming[-1][1]]]
        print("Full time intervals:", str(intervals_jamming))
        intervals_freqs_total = 0
        times_total = 0
        for ele in intervals_jamming:
            # FIXME use dictionary instead of hardcoded format
            intervals_freqs_total += ele[2] * (ele[1] - ele[0])
            times_total += ele[1] - ele[0]
            # scenario 3b, 3c FIXME
            #intervals_freq.append(str(ele[2])+'_'+str(ele[3]))
            #intervals_power.append(str(ele[4])+'_'+str(ele[5]))
        intervals_freq = [str(intervals_freqs_total / times_total)]
    except:
        print("Failed to get time intervals for %s !! " % data_dir)
        raise RuntimeError("Failed to get time intervals")

    t04 = data_dir+'/*.T04'
    if tree.find('septentrio') is not None or tree.find('novatel') is not None:
        t04 = os.getcwd() + '/' + tmp_all_T04

    if not os.path.isdir(out_nopi_path):
        os.mkdir(out_nopi_path)
    
    if not os.path.isdir(out_plot_path): # Todo this is probably unnecessary
        os.mkdir(out_plot_path)

    ##############################
    # Additional Post-processing #
    ##############################

    # 0.b For septentrio, plot FFT from BBSample
    if tree.find('septentrio') is not None:
        sbf_path = os.getcwd() + '/' + data_dir + '/'
        sbf_filename = 'log_%s.sbf' % rx_name.replace("RX","rx")

        AfterIM = -1
        BBSample_mode = 'Unknown'
        try:
            AfterIM = int(tree.find('AfterIM').text)

            if AfterIM == 0:
                BBSample_mode = 'BeforeIM'
            if AfterIM == 1:
                BBSample_mode = 'AfterIM'
            print("BBSampling mode %s" % (BBSample_mode))

            tow = 432000.000 + 500
            ant_id = 1
            lofreq = 1584
            row = plot_sept_fft_after_input_tow(sbf_path, sbf_filename, out_plot_path, tow, ant_id, lofreq)
            if row is not None:
                print("Save fft plot for ant %d at tow = %d msec" % (ant_id, int(row[1])))

            # Plot 2D FFT
            fig_title = '2D_FFT_'+BBSample_mode
            plot_fft_sbf_data_png(sbf_path, sbf_filename,fig_title,out_fileroot=out_plot_path+'/'+fig_title,minpower=-30,maxpower=0,dpi=300,antenna_num=1)

        except Exception as error:
            print('No <AfterIM> tag in xml. Skip plotting FFT.')
            print(error)

    # For Auger data
    if tree.find('septentrio') is None and tree.find('novatel') is None:
        ant_id = 1
        print("Save 2D FFT plot for device %s ant %d at %s" % (rx_name, ant_id, out_plot_path+'/.'))
        try:
            plot_fft_data_png(t04, '2D_FFT_BeforeIM', out_fileroot=out_plot_path+'/2D_FFT_BeforeIM', minpower=10,maxpower=35, sample_point=1, antenna_num=ant_id, rec='-d35:24')
        except:
            print("Calling plot_fft_data_png() with -d35:24 failed. Try -d35:25.")
            try:
                plot_fft_data_png(t04, '2D_FFT_BeforeIM', out_fileroot=out_plot_path+'/2D_FFT_BeforeIM', minpower=10,maxpower=35, sample_point=1, antenna_num=ant_id, rec='-d35:25')
            except:
                print("Calling plot_fft_data_png() with -d35:25 failed. Skip.")
        
        try:
            plot_fft_data_png(t04, '2D_FFT_AfterIM', out_fileroot=out_plot_path+'/2D_FFT_AfterIM', minpower=10,maxpower=35, sample_point=2, antenna_num=ant_id, rec='-d35:24')
        except:
            print("Calling plot_fft_data_png() with -d35:24 failed. Try -d35:25.")
            try:
                plot_fft_data_png(t04, '2D_FFT_AfterIM', out_fileroot=out_plot_path+'/2D_FFT_AfterIM', minpower=10,maxpower=35, sample_point=2, antenna_num=ant_id, rec='-d35:25')
            except:
                print("Calling plot_fft_data_png() with -d35:25 failed. Skip.")

    # 0.c Do additional post-processing for Auger with RTK
    intervals_jamming = [ [pair[0]+scn_start_time_tow, pair[1]+scn_start_time_tow] for pair in intervals_jamming]
    fixtype_percentiles = 0
    err_3d_percentiles = 0
    sigma_3d_percentiles = 0
    if test_do_rtk and tree.find('novatel') == None:
        fixtype_percentiles, err_3d_percentiles, sigma_3d_percentiles = compute_protection_level(t04, [37 + 23/60., -(122), 0.], intervals_jamming, out_plot_path, time_offset=scn_start_time_tow)

    ####################################################################################################
    # Plot CNo vs. time from ant0,1 for L1 RF band with indication of time interval for postprocessing #
    ####################################################################################################
    
    rec = "-d35:19"
    
    try:
        print('Load file at', t04, "with rec =",rec)
        d,k = vd2arr(t04, rec)
        d2 = d
        k2 = k
        if (rx_plus_run.split('-')[0] == 'RX9' or rx_plus_run.split('-')[0] == 'RX10' or rx_plus_run.split('-')[0] == 'RX11'):
            print("changing ant0 for silk antenna to ant1")
            d[:, k['ANTENNA']] = 1
            print("loading fake antenna")
            known_t04 = f'{DATA_DIR}/RX6-{rx_plus_run.split("-")[1]}/*.T04'
            append_d, append_k = vd2arr(known_t04, rec)
            print("vd2arr loaded")
            append_indices = np.where(append_d[:, append_k['ANTENNA']] == 0)
            print(append_indices)
            to_append = append_d[append_indices]
            print(d)
            print(to_append)
            d = np.append(d, to_append, axis=0)
            print("d appended")
    except:
        print('Load file failed. Skip.')
        return None

    # Get time range
    tow = d[:, k['TIME']]
    [tow_start, tow_end] =  [min(tow), max(tow)]

    # 1 Plot CNO from both antenna
    plot_cno(d, k, intervals_jamming, out_plot_path, sv_elev_mask=0, time_offset=scn_start_time_tow)

    #######################################
    # Postprocessing                      #
    # S.D. CNo, code-phase, carrier-phase #
    #######################################

    combos_all = []
    #GPS L1CA, L2E, L2C, L5
    combos_all.append( default_combo('GPS', RTConst.RT_SatType_GPS, 
                       RTConst.dRec27L1Band, [RTConst.dRec27Track_CA]) )
    combos_all.append( default_combo('GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, [RTConst.dRec27Track_P]) )
    combos_all.append( default_combo('GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, [RTConst.dRec27Track_E]) )
    combos_all.append( default_combo('GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, [RTConst.dRec27Track_CM]) )
    combos_all.append( default_combo('GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, [RTConst.dRec27Track_CL]) )
    combos_all.append( default_combo('GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, [RTConst.dRec27Track_CMCL]) )
    combos_all.append( default_combo('GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L5E5ABand, [RTConst.dRec27Track_I]) )
    combos_all.append( default_combo('GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L5E5ABand, [RTConst.dRec27Track_Q]) )
    combos_all.append( default_combo('GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L5E5ABand, [RTConst.dRec27Track_IQ]) )
    #GLO G1C, G1P, G2C, G2P
    combos_all.append( default_combo('GLN', RTConst.RT_SatType_GLONASS, RTConst.dRec27L1Band, [RTConst.dRec27Track_CA]) )
    combos_all.append( default_combo('GLN', RTConst.RT_SatType_GLONASS, RTConst.dRec27L1Band, [RTConst.dRec27Track_P]) )
    combos_all.append( default_combo('GLN', RTConst.RT_SatType_GLONASS, RTConst.dRec27L2Band, [RTConst.dRec27Track_CA]) )
    combos_all.append( default_combo('GLN', RTConst.RT_SatType_GLONASS, RTConst.dRec27L2Band, [RTConst.dRec27Track_P]) )
    #GAL E1, E5A, E5B, E5AltBOC, E6
    combos_all.append( default_combo('Gal', RTConst.RT_SatType_GALILEO, RTConst.dRec27L1Band, [RTConst.dRec27Track_BOC_1_1_DP,
                                                                                                RTConst.dRec27Track_BOC_1_1_D,
                                                                                                RTConst.dRec27Track_BOC_1_1_P,
                                                                                                RTConst.dRec27Track_BOC_1_1_DP_MBOC,
                                                                                                RTConst.dRec27Track_BOC_1_1_D_MBOC,
                                                                                                RTConst.dRec27Track_BOC_1_1_P_MBOC]) )
    combos_all.append( default_combo('Gal', RTConst.RT_SatType_GALILEO, RTConst.dRec27L5E5ABand, [RTConst.dRec27Track_BPSK10_PD,
                                                                                                  RTConst.dRec27Track_BPSK10_P,
                                                                                                  RTConst.dRec27Track_BPSK10_D]) )
    combos_all.append( default_combo('Gal', RTConst.RT_SatType_GALILEO, RTConst.dRec27E5BBand, [RTConst.dRec27Track_BPSK10_PD,
                                                                                                RTConst.dRec27Track_BPSK10_P,
                                                                                                RTConst.dRec27Track_BPSK10_D]) )
    combos_all.append( default_combo('Gal', RTConst.RT_SatType_GALILEO, RTConst.dRec27E5ABCentreBand, [RTConst.dRec27Track_AltBOCCompPD,
                                                                                                        RTConst.dRec27Track_AltBOCCompP,
                                                                                                        RTConst.dRec27Track_AltBOCCompD,
                                                                                                        RTConst.dRec27Track_AltBOCNonComp]) )
    combos_all.append( default_combo('Gal', RTConst.RT_SatType_GALILEO, RTConst.dRec27E6Band, [RTConst.dRec27Track_E6_PD,
                                                                                                RTConst.dRec27Track_E6_P,
                                                                                                RTConst.dRec27Track_E6_D]) )
    #BDS B1, B2, B3, B1C, B2A, B2B
    combos_all.append( default_combo('BDS', RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27B1Band, [RTConst.dRec27Track_B1]) )
    combos_all.append( default_combo('BDS', RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27E5BBand, [RTConst.dRec27Track_B2]) )
    combos_all.append( default_combo('BDS', RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27B3Band, [RTConst.dRec27Track_B3]) )
    combos_all.append( default_combo('BDS', RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L1Band, [RTConst.dRec27Track_BOC_1_1_DP,
                                                                                                        RTConst.dRec27Track_BOC_1_1_D,
                                                                                                        RTConst.dRec27Track_BOC_1_1_P,
                                                                                                        RTConst.dRec27Track_BOC_1_1_DP_MBOC,
                                                                                                        RTConst.dRec27Track_BOC_1_1_D_MBOC,
                                                                                                        RTConst.dRec27Track_BOC_1_1_P_MBOC]) )
    combos_all.append( default_combo('BDS', RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L5E5ABand, [RTConst.dRec27Track_I,
                                                                                                          RTConst.dRec27Track_Q,
                                                                                                          RTConst.dRec27Track_IQ]) )
    combos_all.append( default_combo('BDS', RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27E5BBand, [RTConst.dRec27Track_I,
                                                                                                        RTConst.dRec27Track_Q,
                                                                                                        RTConst.dRec27Track_IQ]) )
    # 2. Plot data(S.D. cno, code, carrier) per satellite for GPS only currently
    #combo = []#default_combo('GPS', RTConst.RT_SatType_GPS, RTConst.dRec27L1Band, [RTConst.dRec27Track_CA])
    #for combo in combos_all:
    #    print(fn()+"Input combo:", combo)
    #    sat_type, band, track = combo.sat_type, combo.band, combo.track
    #    _, sv_range = get_sys_name_and_sv_range(sat_type)
    #    t0 = time.time()
    #    try:
    #        print("#2")
    #        print(intervals_jamming)
    #        stat_i = cal_single_diff(d, k, rec, sat_type, band, sv_range, track, [[tow_start, tow_end]], jam_start=intervals_jamming[0][0], jam_end=intervals_jamming[-1][1], plot_png=True, plot_time_offset=scn_start_time_tow, out_path=out_plot_path)
    #    except Exception as error:
    #        print("cal_single_diff() failed, Skip.")
    #        print(error)
    #        continue
    #    print('  It takes [sec] ', time.time() - t0)

    # 3. Calculate data(S.D. cno, code, carrier) for all intervals 
    combos = []
    combos.append( default_combo('GPS', RTConst.RT_SatType_GPS,     RTConst.dRec27L1Band, [RTConst.dRec27Track_CA]) )
    combos.append( default_combo('GLN', RTConst.RT_SatType_GLONASS, RTConst.dRec27L1Band, [RTConst.dRec27Track_CA]) )
    combos.append( default_combo('Gal', RTConst.RT_SatType_GALILEO, RTConst.dRec27L1Band, [RTConst.dRec27Track_BOC_1_1_DP_MBOC]) )
    
    stats.version = '1.1'
    try:
        for combo in combos_all:
            print("Input combo:", combo)
            sat_type, band, track = combo.sat_type, combo.band, combo.track
            sat_sys_name = combo.sat_sys_name
            band_name = get_band_name(band)
            track_name = get_track_name(track[0]) # FIXME if needed
            _, sv_range = get_sys_name_and_sv_range(sat_type)

            for interval, interval_freq, interval_power in zip(intervals, intervals_freq, intervals_power):
                try:
                    stat_i = cal_single_diff(d, k, rec, sat_type, band, sv_range, track, [[tow_start, tow_end]], jam_start=intervals_jamming[0][0], jam_end=intervals_jamming[-1][1], plot_png=True, plot_time_offset=scn_start_time_tow, out_path=out_plot_path)
                except Exception as error:
                    print("cal_single_diff() failed at interval",interval, interval_freq,". Skip.")
                    print(error)
                    continue
                try:
                    ###############
                    # cycle slips #
                    ###############
                    slip_t_start = d[0,k.TIME]
                    if slip_t_start < scn_start_time_tow:
                        slip_t_start = scn_start_time_tow
                    slip_t_end = d[-1,k.TIME]
                    slip_interval = [ [slip_t_start, slip_t_end] ]
                    ant0, ant1 = cycle_slip_analysis(d, k, rec='-d35:19',sy=sat_type,freq=band,track=track,out_plot_path=out_plot_path,intervals=slip_interval, plot_time_offset=scn_start_time_tow)
                    if ant0.size == 0:
                        ant0_slips = 0
                        ant0_sliprate = 0
                    else:
                        ant0_slips = sum(ant0[:, 4])
                        ant0_sliprate = mean(ant0[:, 5])

                    if ant1.size == 0:
                        ant1_slips = 0
                        ant1_sliprate = 0
                    else:
                        ant1_slips = sum(ant1[:, 4])
                        ant1_sliprate = mean(ant1[:, 5])

                    total_slips = ant0_slips + ant1_slips # sum total slips from both antennas
                    avg_slip_rate = (ant0_sliprate + ant1_sliprate) / 2 # todo maybe the wrong way to get this
                except Exception as error:
                    print("cycle_slip_analysis() failed at interval",interval, interval_freq,". Skip.")
                    print(error)
                    continue

                if sat_sys_name not in stats.sd:
                    stats.sd[sat_sys_name] = {}
                if band_name not in stats.sd[sat_sys_name]:
                    stats.sd[sat_sys_name][band_name] = {}
                if track_name not in stats.sd[sat_sys_name][band_name]:
                    stats.sd[sat_sys_name][band_name][track_name] = {}

                interval_index = str(int(interval[0])) + '_' + str(int(interval[1]))
                
                stats.sd[sat_sys_name][band_name][track_name][interval_index] = {}
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['interval_freq'] = interval_freq
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['interval_power'] = interval_power
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['n'] = 0 if np.isnan(stat_i.sd_n) else stat_i.sd_n
                
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['cno'] = {}
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['cno']['mean'] = 0 if np.isnan(stat_i.sd_cno_m) else stat_i.sd_cno_m
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['cno']['std'] = 0 if np.isnan(stat_i.sd_cno_std) else stat_i.sd_cno_std
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['cno']['mad'] = 0 if np.isnan(stat_i.sd_cno_mad) else stat_i.sd_cno_mad

                stats.sd[sat_sys_name][band_name][track_name][interval_index]['code'] = {}
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['code']['mean'] = 0 if np.isnan(stat_i.sd_code_m) else stat_i.sd_code_m
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['code']['std'] = 0 if np.isnan(stat_i.sd_code_std) else stat_i.sd_code_std
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['code']['mad'] = 0 if np.isnan(stat_i.sd_code_mad) else stat_i.sd_code_mad

                stats.sd[sat_sys_name][band_name][track_name][interval_index]['carrier'] = {}
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['carrier']['mean'] = 0 if np.isnan(stat_i.sd_carrier_m) else stat_i.sd_carrier_m
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['carrier']['std'] = 0 if np.isnan(stat_i.sd_carrier_std) else stat_i.sd_carrier_std
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['carrier']['mad'] = 0 if np.isnan(stat_i.sd_carrier_mad) else stat_i.sd_carrier_mad

                stats.sd[sat_sys_name][band_name][track_name][interval_index]['cno_over_sv'] = {}
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['cno_over_sv']['mean'] = 0 if np.isnan(stat_i.sd_cno_m_by_sv) else stat_i.sd_cno_m_by_sv
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['cno_over_sv']['std'] = 0 if np.isnan(stat_i.sd_cno_std_by_sv) else stat_i.sd_cno_std_by_sv
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['cno_over_sv']['mad'] = 0 if np.isnan(stat_i.sd_cno_mad_by_sv) else stat_i.sd_cno_mad_by_sv

                stats.sd[sat_sys_name][band_name][track_name][interval_index]['code_over_sv'] = {}
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['code_over_sv']['mean'] = 0 if np.isnan(stat_i.sd_code_m_by_sv) else stat_i.sd_code_m_by_sv
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['code_over_sv']['std'] = 0 if np.isnan(stat_i.sd_code_std_by_sv) else stat_i.sd_code_std_by_sv
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['code_over_sv']['mad'] = 0 if np.isnan(stat_i.sd_code_mad_by_sv) else stat_i.sd_code_mad_by_sv
                
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['carrier_over_sv'] = {}
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['carrier_over_sv']['mean'] = 0 if np.isnan(stat_i.sd_carrier_m_by_sv) else stat_i.sd_carrier_m_by_sv
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['carrier_over_sv']['std'] = 0 if np.isnan(stat_i.sd_carrier_std_by_sv) else stat_i.sd_carrier_std_by_sv
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['carrier_over_sv']['mad'] = 0 if np.isnan(stat_i.sd_carrier_mad_by_sv) else stat_i.sd_carrier_mad_by_sv
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['carrier_over_sv']['center_frequency'] = 1 if stat_i.center_frequency is None else stat_i.center_frequency
                                
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['sv_count'] = 0 if stat_i.sv_count is None else stat_i.sv_count

                stats.sd[sat_sys_name][band_name][track_name][interval_index]['max_code'] = 0 if stat_i.max_code is None else stat_i.max_code
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['max_code_sv'] = 0 if stat_i.max_code_sv is None else stat_i.max_code_sv
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['min_code'] = 0 if stat_i.min_code is None else stat_i.min_code
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['min_code_sv'] = 0 if stat_i.min_code_sv is None else stat_i.min_code_sv
                
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['max_carr'] = 0 if stat_i.max_carr is None else stat_i.max_carr
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['max_carr_sv'] = 0 if stat_i.max_carr_sv is None else stat_i.max_carr_sv
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['min_carr'] = 0 if stat_i.min_carr is None else stat_i.min_carr
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['min_carr_sv'] = 0 if stat_i.min_carr_sv is None else stat_i.min_carr_sv

                stats.sd[sat_sys_name][band_name][track_name][interval_index]['ant0_slips'] = ant0_slips
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['ant0_sliprate'] = ant0_sliprate
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['ant1_slips'] = ant1_slips
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['ant1_sliprate'] = ant1_sliprate
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['total_cycle_slips'] = total_slips
                stats.sd[sat_sys_name][band_name][track_name][interval_index]['avg_slip_rate'] = avg_slip_rate

                # Save additional results from ant0, ant1
                if sat_sys_name not in stats.ant0:
                    stats.ant0[sat_sys_name] = {}
                if band_name not in stats.ant0[sat_sys_name]:
                    stats.ant0[sat_sys_name][band_name] = {}
                if track_name not in stats.ant0[sat_sys_name][band_name]:
                    stats.ant0[sat_sys_name][band_name][track_name] = {}
                

                stats.ant0[sat_sys_name][band_name][track_name][interval_index] = {}
                stats.ant0[sat_sys_name][band_name][track_name][interval_index]['interval_freq'] = interval_freq
                stats.ant0[sat_sys_name][band_name][track_name][interval_index]['n'] = 0 if np.isnan(stat_i.ant0_n) else stat_i.ant0_n

                stats.ant0[sat_sys_name][band_name][track_name][interval_index]['cno_over_sv'] = {}
                stats.ant0[sat_sys_name][band_name][track_name][interval_index]['cno_over_sv']['mean'] = 0 if np.isnan(stat_i.ant0_cno_m_by_sv) else stat_i.ant0_cno_m_by_sv
                stats.ant0[sat_sys_name][band_name][track_name][interval_index]['cno_over_sv']['std'] = 0 if np.isnan(stat_i.ant0_cno_std_by_sv) else stat_i.ant0_cno_std_by_sv
                
                if sat_sys_name not in stats.ant1:
                    stats.ant1[sat_sys_name] = {}
                if band_name not in stats.ant1[sat_sys_name]:
                    stats.ant1[sat_sys_name][band_name] = {}
                if track_name not in stats.ant1[sat_sys_name][band_name]:
                    stats.ant1[sat_sys_name][band_name][track_name] = {}
                
                stats.ant1[sat_sys_name][band_name][track_name][interval_index] = {}
                stats.ant1[sat_sys_name][band_name][track_name][interval_index]['interval_freq'] = interval_freq
                stats.ant1[sat_sys_name][band_name][track_name][interval_index]['n'] = 0 if np.isnan(stat_i.ant1_n) else stat_i.ant1_n
                
                stats.ant1[sat_sys_name][band_name][track_name][interval_index]['cno_over_sv'] = {}
                stats.ant1[sat_sys_name][band_name][track_name][interval_index]['cno_over_sv']['mean'] = 0 if np.isnan(stat_i.ant1_cno_m_by_sv) else stat_i.ant1_cno_m_by_sv
                stats.ant1[sat_sys_name][band_name][track_name][interval_index]['cno_over_sv']['std'] = 0 if np.isnan(stat_i.ant1_cno_std_by_sv) else stat_i.ant1_cno_std_by_sv

    except:
        print("Failed to calculate single difference for %s !! " % data_dir)
    
    ##################################################
    # Plot CNO, code, carrier bias vs. time/freqency #
    ##################################################
    try:
        if True:
            print('Plot CNO, code, carrier bias vs. time for L1')
            tow_offset = scn_start_time_tow
            combos_ = combos[:min(3,len(combos))]

            for i, key, sub_key, name, unit in zip(range(3),['cno_over_sv', 'code_over_sv', 'carrier_over_sv'],['mean','mad','mad'],['Mean difference in CN0','Zero-baseline mean absolute code-phase bias','Zero-baseline mean absolute carrier-phase bias'],['dB-Hz','m','mm']):
                name +=  ' vs. time'
                print('Plot ' + name)
                n_rows = len(combos_)+1
                plotted_freq = False
                # fig = plt.figure()
                fig, ax = plt.subplots(n_rows, 1)
                ylim_min,ylim_max = None,None
                total_num_meas = 0
                for j, combo in enumerate(combos_):
                    # print("Input combo:", combo)
                    sat_sys_name = combo.sat_sys_name
                    band, track = combo.band, combo.track
                    band_name = get_band_name(band)
                    track_name = get_track_name(track[0])
                    if (sat_sys_name in stats.sd) and (band_name in stats.sd[sat_sys_name]) and (track_name in stats.sd[sat_sys_name][band_name]):
                        x, y1, y2, y3_pwr, n = [],[],[],[],[]
                        data_dict = stats.sd[sat_sys_name][band_name][track_name]
                        for interval_index in data_dict:
                            total_num_meas += data_dict[interval_index]['n']
                            x.append([int(x)-tow_offset for x in interval_index.split('_')])
                            if key=='carrier_over_sv':
                                wavelength_mm = GnssConst.LIGHT/float(data_dict[interval_index][key]['center_frequency'])*1000.0
                                y1.append( data_dict[interval_index][key][sub_key]*wavelength_mm) # convert cycle into mm 
                            else:
                                y1.append( data_dict[interval_index][key][sub_key] )
                            y2.append( data_dict[interval_index]['interval_freq'])
                            y3_pwr.append( data_dict[interval_index]['interval_power']) 
                            n.append( data_dict[interval_index]['n'] ) # Number of valid meas for ant1-ant0

                        # Plot frequency vs. time
                        if plotted_freq == False:
                            y = y2
                            for xi,yi,yi_pwr in zip(x,y,y3_pwr):
                                if '_' in yi: # two jammer
                                    yi0 = float( yi.split('_')[0] )
                                    yi1 = float( yi.split('_')[1] )
                                    yi0_text = yi_pwr.split('_')[0] if test_name_num == '3b' else yi0
                                    yi1_text = yi_pwr.split('_')[0] if test_name_num == '3b' else yi1
                                    if yi0 > 1000:
                                        ax[n_rows-1].plot(xi, [yi0,yi0], "r-", lw=1)
                                        matplotlib.pyplot.text(xi[0], yi0, str(yi0_text), fontsize = 6)
                                    if yi1 > 1000:
                                        ax[n_rows-1].plot(xi, [yi1,yi1], "g--", lw=1.5)
                                        matplotlib.pyplot.text(xi[0], yi1, str(yi1_text), fontsize = 6)
                                else: # single jammer
                                    yi0 = float(yi)
                                    if yi0 > 1000:
                                        ax[n_rows-1].plot(xi, [yi0,yi0], color="r", lw=1)
                                        matplotlib.pyplot.text(xi[0], yi0, str(yi0), fontsize = 6)

                            ax[n_rows-1].set_xlim( [tow_start-tow_offset, tow_end-tow_offset] )
                            ax[n_rows-1].set_xlabel('Time[sec]')
                            ax[n_rows-1].set_ylabel('Frequency\n[MHz]')
                            plotted_freq = True
                        
                        # Plot sd.CNO,code,carrier vs. time
                        current_title = None
                        if j == 0:
                            if i == 0: # Plot ant0/1 cno vs. freq 
                                current_title = 'S.D. CNo[dB-Hz]'
                            elif i == 1:
                                current_title = 'Zero-baseline mean absolute code-phase bias'
                            else:
                                current_title = 'Zero-baseline mean absolute carrier-phase bias'
                            ax[j].set_title(current_title)
                        # ax = fig.add_subplot(n_rows,1,j+1,title=current_title)
                        # ax = plt.subplot(n_rows,1,j+1,title=current_title)

                        y = y1
                        for xi,yi,ni in zip(x,y,n):
                            if ni == 0: # no valid meas
                                ax[j].plot(xi, [-50,-50], color="b", lw=1, alpha=0.2)
                                ylim_min = -55
                                ylim_max = yi if ylim_max is None else max(ylim_max, yi)
                            else:
                                ax[j].plot(xi, [yi,yi], color="b", lw=1)
                                ylim_min = yi if ylim_min is None else min(ylim_min, yi)
                                ylim_max = yi if ylim_max is None else max(ylim_max, yi)

                        ax[j].set_xlim( [tow_start-tow_offset, tow_end-tow_offset] )
                        ax[j].get_xaxis().set_visible(False)
                        tk_name = track_name
                        if track[0] in [20,21,22,23,24,25]:
                            tk_name = 'E1'
                        ax[j].set_ylabel(sat_sys_name+' '+band_name+tk_name+'\n'+'['+unit+']')
                
                if ylim_min is not None and ylim_max is not None and ylim_min != ylim_max:
                    dy = ylim_max - ylim_min
                    for j, combo in enumerate(combos_):
                        ax[j].set_ylim([ylim_min-0.1*dy, ylim_max+0.1*dy])

                fname = out_plot_path+'/summary_'+name.replace(' ','_')+'.png'
                plt.tight_layout()
                if total_num_meas > 0:
                    save_compressed_png(fname)
                plt.close()
    except Exception as error:
        print("Failed to create summary time plots for %s !! " % data_dir)
        print(error)

    # Plot CNO, code, carrier bias vs. freq for 2b, 5b
    try:
        if test_name_num in ['1', '2b', '5b']:
            print('Plot CNO, code, carrier bias vs. frequency for L1 for scenario',test_name_num)
            tow_offset = scn_start_time_tow
            combos_ = combos[:min(3,len(combos))]

            for i, key, sub_key, name, unit in zip(range(4),['cno_over_sv','cno_over_sv', 'code_over_sv', 'carrier_over_sv'],['mean','mean','mad','mad'],['Mean Ant0 Ant1 CN0','Mean difference in CN0','Zero-baseline mean absolute code-phase bias','Zero-baseline mean absolute carrier-phase bias'],['dB-Hz','dB-Hz','m','mm']):
                name +=  ' vs. frequnecy'
                print('Plot ' + name)
                n_rows = len(combos_)
                fig, ax = plt.subplots(n_rows, 1)
                ylim_min,ylim_max = None,None
                total_num_meas = 0

                for j, combo in enumerate(combos_):
                    
                    sat_sys_name = combo.sat_sys_name
                    band, track = combo.band, combo.track
                    band_name = get_band_name(band)
                    track_name = get_track_name(track[0])
                    if (sat_sys_name in stats.sd) and (band_name in stats.sd[sat_sys_name]) and (track_name in stats.sd[sat_sys_name][band_name]):
                        t, y, y_freq, y_ant0, y_ant1 = [],[],[],[],[]
                        data_dict = stats.sd[sat_sys_name][band_name][track_name]

                        for interval_index in data_dict:
                            total_num_meas += data_dict[interval_index]['n']
                            t.append([int(x)-tow_offset for x in interval_index.split('_')])
                            if key=='carrier_over_sv':
                                wavelength_mm = GnssConst.LIGHT/float(data_dict[interval_index][key]['center_frequency'])*1000.0
                                y.append( data_dict[interval_index][key][sub_key]*wavelength_mm) # convert cycle into mm 
                            else:
                                y.append( data_dict[interval_index][key][sub_key] )
                            y_freq.append( data_dict[interval_index]['interval_freq'])

                            if i==0: # Additional plot for CNo of ant0,ant1
                                y_ant0.append( stats.ant0[sat_sys_name][band_name][track_name][interval_index][key][sub_key] )
                                y_ant1.append( stats.ant1[sat_sys_name][band_name][track_name][interval_index][key][sub_key] )

                        # Plot sd. CNO,code,carrier vs. frequency
                        current_title = None
                        if j == 0:
                            if i == 0: # Plot ant0/1 cno vs. freq 
                                current_title = 'CN0 [dB-Hz] vs. Jamming Frequency'
                            elif i == 1:
                                current_title = 'Single difference CN0 mean ['+unit+']'
                            elif i == 2:
                                current_title = 'Zero-baseline mean absolute code-phase bias ['+unit+']'
                            elif i == 3:
                                current_title = 'Zero-baseline mean absolute carrier-phase bias ['+unit+']'
                            ax[j].set_title(current_title)
                        # ax = fig.add_subplot(n_rows,1,j+1,title=current_title)
                        # Plot cno, S.D.-cno, -code, -carrier for each freq
                        first_time_plot = True
                        freq_going_down = False
                        prev_freq = 0
                        marker_ = '^'
                        lb = 'pos. steps'
                        clr = 'r'
                        for k, ti, yi, fi in zip(range(len(x)), t, y, y_freq):
                            fi = float(fi)
                            if fi <= prev_freq and not freq_going_down:
                                marker_ = 'v'
                                lb = 'neg. steps'
                                clr = 'b'
                                first_time_plot = True
                                freq_going_down = True
                            if i == 0: # Plot ant0/1 cno vs. freq
                                if fi > 1000:
                                    ylim_min = min(y_ant0[k], y_ant1[k]) if ylim_min is None else min(ylim_min, min(y_ant0[k], y_ant1[k]))
                                    ylim_max = max(y_ant0[k], y_ant1[k]) if ylim_max is None else max(ylim_max, max(y_ant0[k], y_ant1[k]))
                                    if first_time_plot == True:
                                        lb_loc = 'ant0' if len(y_freq)==1 else 'ant0:'+lb
                                        ax[j].plot(fi, y_ant0[k], marker_, color="r", label=lb_loc)
                                        lb_loc = 'ant1' if len(y_freq)==1 else 'ant0:'+lb
                                        ax[j].plot(fi, y_ant1[k], marker_, color="g", label=lb_loc)
                                        # plt.legend(loc=1, fancybox=True, framealpha=0.3)
                                        ax[j].legend(bbox_to_anchor=(1.05, 1.0), loc='upper left')
                                        first_time_plot = False
                                    else:
                                        ax[j].plot(fi, y_ant0[k], marker_, color="r")
                                        ax[j].plot(fi, y_ant1[k], marker_, color="g")
                            else: # Plot S.D. cno, code, carrier vs. freq
                                if fi > 1000:
                                    ylim_min = yi if ylim_min is None else min(ylim_min, yi)
                                    ylim_max = yi if ylim_max is None else max(ylim_max, yi)
                                    if first_time_plot == True:
                                        lb_loc = '' if len(y_freq)==1 else lb
                                        ax[j].plot(fi, yi, marker_, color=clr, label=lb_loc)
                                        # plt.legend(loc=1, fancybox=True, framealpha=0.3)
                                        ax[j].legend(bbox_to_anchor=(1.05, 1.0), loc='upper left')
                                        first_time_plot = False
                                    else:
                                        ax[j].plot(fi, yi, marker_, color=clr)
                            prev_freq = fi

                        # show x-axis value only for input frequency
                        xtk = []
                        for k,yi in enumerate(y_freq):
                            yi = float(yi)
                            if k==0 or (k>0 and yi >= xtk[-1]+10):
                                xtk.append(yi)
                        # plt.xticks(xtk, xtk)
                        # ax[j].set_xticks(xtk, xtk)
                        ax[j].set_xticks(xtk)
                        ax[j].set_xticklabels(xtk)
                        
                        if j!= n_rows-1:
                            ax[j].get_xaxis().set_visible(False)
                        tk_name = track_name
                        if track[0] in [20,21,22,23,24,25]:
                            tk_name = 'E1'
                        # plt.ylabel(sat_sys_name+' '+band_name+tk_name+'\n'+'['+unit+']')
                        ax[j].set_ylabel(sat_sys_name+' '+band_name+tk_name+'\n'+'['+unit+']')

                if ylim_min is not None and ylim_max is not None:
                    # print('Rescale ylim', ylim_min, ylim_max)
                    dy = ylim_max - ylim_min
                    for j, combo in enumerate(combos_):
                        ax[j].set_ylim([ylim_min-0.1*dy, ylim_max+0.1*dy])

                fname = out_plot_path+'/summary_'+name.replace(' ','_')+'.png'
                plt.tight_layout()
                if total_num_meas > 0:
                    save_compressed_png(fname)
                plt.close()
    except:
        print("Failed to create summary frequency plots for %s !! " % data_dir)

    # Create Table for S.D. CNO, code, carrier 
    try:
        if test_name_num in ['1','2a','3a','4','5a','6','7','8a','8b','8c','9','10','11']:
            print('Create Table for S.D. CNO, code, carrier for scenario',test_name_num)
            tow_offset = scn_start_time_tow
            combos_ = combos[:len(combos)]

            filename = 'table.csv'
            fname = out_plot_path+'/'+filename
            print("Write",fname)
            f = open(fname, 'w')
            writer = csv.writer(f)

            table_head_list = [
                'Satellite System','Band', 'Track',
                'S.D. CN0 mean [dB-Hz]',
                'S.D. CN0 std [dB-Hz]',
                'Zero-baseline mean absolute code-phase bias [m]',
                'Zero-baseline mean absolute carrier-phase bias [mm]',
                'Number of measurement Ant0 ∩ Ant1'
                ]
            writer.writerow(table_head_list)

            for j, combo in enumerate(combos_):

                sat_sys_name = combo.sat_sys_name
                band, track = combo.band, combo.track
                band_name = get_band_name(band)
                track_name = get_track_name(track[0])
                tk_name = track_name
                if track[0] in [20,21,22,23,24,25]:
                    tk_name = 'E1' #FIXME
                row_list = [sat_sys_name,band_name,tk_name]
                if (sat_sys_name in stats.sd) and (band_name in stats.sd[sat_sys_name]) and (track_name in stats.sd[sat_sys_name][band_name]):
                    data_dict = stats.sd[sat_sys_name][band_name][track_name]
                    for i, key, sub_key, unit in zip(range(5),['cno', 'cno', 'code_over_sv', 'carrier_over_sv','n'],['mean','std','mad','mad',None],['dB-Hz','dB-Hz','m','mm','#']):
                        for interval_index in data_dict:
                            # Currently only support scenario for single time_interval
                            if key=='carrier_over_sv':
                                wavelength_mm = GnssConst.LIGHT/float(data_dict[interval_index][key]['center_frequency'])*1000.0
                                val = data_dict[interval_index][key][sub_key]*wavelength_mm 
                                row_list.append( "{:.3f}".format(float(val)) )
                            elif key=='n':
                                val = data_dict[interval_index][key]
                                row_list.append( str(int(val)) )
                            else:
                                val = data_dict[interval_index][key][sub_key]
                                row_list.append( "{:.3f}".format(float(val)) )
                            break
                writer.writerow(row_list)
            f.close()
            
            fname = out_plot_path+'/'+'table.html'
            print("Write",fname)
            write_table_html(fname)
    except:
        print("Failed to create table for %s !! " % data_dir)

    ###########
    # Do NoPi #
    ###########
    if do_nopi:
        config = default_scenario()
        config.NoPI_ini = './piline.ini' 
        config.NoPI_base = ''
        try:
            for interval in intervals:
                if interval[1] > interval[0]:
                    print("Do NoPI for time range",interval)
                    do_NoPI( config, data_dir, out_nopi_path, stats, interval)
                    # create summary table
                    create_table_from_summary(out_nopi_path, interval)

                    # Move results
                    out_nopi_sub_path = out_nopi_path+'/tow_'+str(int(interval[0]))+'_'+str(int(interval[1]))
                    os.mkdir( out_nopi_sub_path )
                    for f in os.listdir(out_nopi_path):
                        # print(f, os.path.isfile( os.path.join(out_nopi_path,f)) )
                        if (os.path.isfile( os.path.join(out_nopi_path,f) ) and f != 'CNo_tow.png') or f=='plots' or f=='web':
                            shutil.move( os.path.join(out_nopi_path,f), out_nopi_sub_path)
        except:
            print("NoPi error error for %s !! " % data_dir)

    ###########################
    # Save xml lookup to file #
    ###########################
    rx_run = out_plot_path.split('/')[-1]
    with open(f"DataDir/{rx_run}/{rx_run}.xml", mode="wb") as xml_file:
        response = requests.get(f"http://10.1.148.239:10004/server_main/{RESULTS_DIR}/{rx_run}.xml")
        xml_file.write(response.content)


    #########################################
    # Save notes and detailed notes to file #
    #########################################
    with open(f"DataDir/{rx_run}/notes.txt", mode="w") as notes_file:
        notes_file.write("Notes:\n")
        notes_file.write(f"{notes}\n\n")
        notes_file.write("Detailed Notes:\n")
        notes_file.write(f"{detailed_notes}")
        

    ##################################################
    # Generate summary text file based on sd results #
    ##################################################
    generate_results_summary(stats.sd, rx_run, test_name_num, notes)

    ########################################################
    # Save and return dictionary output for database usage #
    ########################################################
    output = {}
    if os.path.isfile(data_dir + "/custom_fw.txt"):
        output["custom_fw"] = 1
    else:
        output["custom_fw"] = 0
    
    if not isinstance(fixtype_percentiles, np.ndarray):
        fixtype_percentiles = np.array([fixtype_percentiles] * 3)
    elif len(fixtype_percentiles) == 2:
        fixtype_percentiles = np.append(fixtype_percentiles, fixtype_percentiles[-1])
    output["fixtype_percentiles"] = fixtype_percentiles.tolist()
    
    if not isinstance(err_3d_percentiles, np.ndarray):
        err_3d_percentiles = np.array([err_3d_percentiles] * 3)
    elif len(err_3d_percentiles) == 2:
        err_3d_percentiles = np.append(err_3d_percentiles, err_3d_percentiles[-1])
    output["err_3d_percentiles"] = err_3d_percentiles.tolist()

    if not isinstance(sigma_3d_percentiles, np.ndarray):
        sigma_3d_percentiles = np.array([sigma_3d_percentiles] * 3)
    elif len(sigma_3d_percentiles) == 2:
        sigma_3d_percentiles = np.append(sigma_3d_percentiles, sigma_3d_percentiles[-1])
    output["sigma_3d_percentiles"] = sigma_3d_percentiles.tolist()

    output["date"] = start_date
    output["file_base"] = os.path.basename(data_dir)
    # output["config"] = config
    output["unit_desc"] = tree.find('desc').text
    if tree.find('septentrio') is not None:
        output["unit_type"] = 'septentrio'
        output["filter_type"] = 'septentrio.'+filter_type
    elif tree.find('novatel') is not None:
        output["unit_type"] = 'novatel'
        output["filter_type"] = 'novatel.'+filter_type
    else:
        output["unit_type"] = 'auger'
        output["filter_type"] = filter_type
    output["test_num"] = test_name_num
    output["test_para_default"] = test_para_default
    output["freq_type"] = freq_type

    output["stats"] = stats

    output["notes"] = notes

    # output["NoPI"] = out_nopi_path + '/NoPiDI_Top.html'

    if tree.find('septentrio') == None and tree.find('novatel') == None:
        os.remove( tmp_all_T04 )

    return output


############################
# analyze_all_rx() helpers #
############################

def get_unprocessed_runs():
    """
    Gets a list of all run files in ResultsQueue without an accompanying 
    OutputResults run file.

    TODO may be better to store list of unprocessed runs in the database

    :returns: The list of unprocessed runs sorted by order of lowest run num, 
              then lowest rx, as strings in the format RX#-####
    """
    # Get filepath and strip leading directories and .xml extension
    all_run_list = [x.split('/')[-1][:-4] 
                    for x in glob.glob(RESULTS_DIR + '/RX*.xml')]
    # Get filepath and strip leading directories and .json extension
    processed_runs = [x.split('/')[-1][:-5] 
                      for x in glob.glob(OUTPUT_DIR + '/RX*.json')]

    unprocessed_runs = set(all_run_list).difference(processed_runs)

    # Sort by run num then RX num
    def f_sort(x):
        rx, run = x.split('-')
        rx = int(rx[2:]) # Strip RX prefix
        run = int(run)
        return (run, rx)
    return sorted(unprocessed_runs, key = f_sort)


def analyze_all_rx():
    """ Process all ResultsQueue/*.xml files with missing outputs. """
    print("Reading ResultsQueue/ RX data..")
    all_rx_plus_run = get_unprocessed_runs()
    print(f"Processing {len(all_rx_plus_run)} new runs: {all_rx_plus_run}")
    runs_failed = []

    for rx_plus_run in all_rx_plus_run:
        # Run information
        in_file_name  = f'{RESULTS_DIR}/{rx_plus_run}.xml'
        out_file_name = f'{OUTPUT_DIR}/{rx_plus_run}.json'

        tree = ET.parse(in_file_name)
        try:
            output = analyze_single_rx(tree, rx_plus_run)
        except Exception as e:
            os.rename(in_file_name, f'{RESULTS_DIR}Disabled/{rx_plus_run}.xml')
            runs_failed.append(rx_plus_run + " " + str(e))
        print("Saving results..")
        if output == None:
            print("output == None. Skip.")
            continue

        print([output])
        add_data_to_db([output])
        with open(out_file_name, 'w') as f:
            # Do we want to get rid of this eventually to save space?
            try:
                json.dump( output, f, allow_nan=False )
            except ValueError as e:
                r = 'Error in converting dict into json. '+str(e) + '. Skip.'
                print(r)
                os.remove(out_file_name)
    if runs_failed != []:
        exception_message = 'Runs ' + '\n'.join(runs_failed) + ' failed!'
        raise Exception(exception_message)
        

def any_changes():
    """Return True if ResultsQueue/ is newer than OutputResults/ """
    # getctime() should update when a file is added or removed
    in_file_t = os.path.getctime(RESULTS_DIR)
    out_file_t = os.path.getctime(OUTPUT_DIR)
    return in_file_t >= out_file_t

def get_lock(wait_for_lock):
    import zc.lockfile
    """
    Prevent more than 1 person from running this script at a time.
    wait_for_lock - if True, wait forever until we have access.
    Return True if we got a lock.
    Return False if failed.  Should stop processing..
    """
    have_lock = False
    while True:
        # Without holding a reference to our lock somewhere it gets garbage
        # collected when the function exits.  Put a reference in _lock_ref so
        # we don't drop the lock until this script exits.
        try:
            get_lock._lock_ref = zc.lockfile.LockFile('.LockProcessResultsPython')
            have_lock = True
        except:
            print('Someone is already running this script')
        if have_lock or not wait_for_lock:
            break
        else:
            time.sleep(60)
    return have_lock

def cycle_slip_analysis(d, k, rec, sy, freq, track, out_plot_path, intervals=[[]], plot_time_offset=0):
    """
    Get cycle slip yields for GPS L1-C/A and GLO G1C over the jamming periods
    eg. ant_cs0,ant_cs1=cycle_slip_analysis(d, k, '-d35:19', 0, [0], [0], intervals=[[100,200]], plot_time_offset = scn_start_time_tow)

    Input: 
        d = 2D array loaded from .T04 files
        k = key dictionary from .T04 files  
        rec = string '-d35:19'
        sy = satellite type
        freq = block type (frequency), eg. 0
        track = list of track/signal type, eg. [0] 
        intervals = eg. [[]] or [[t0, t1],[t2,t3], ...]
        plot_time_offset = offset time in plot [sec]
    Returns:
        ant_cslip0, ant_cslip1 
        [start_tow, end_tow, sys, sv, cslip_epoch_cnt, cslip_yield_rate]

    Notes:
        - length is the same for all returns 
    """
    import matplotlib.ticker as ticker
    
    ticol = d[:, k['TIME']]
    svcol = d[:, k['SV']]
    sycol = d[:, k['SAT_TYPE']]
    rtcol = d[:, k['FREQ']]
    tkcol = d[:, k['TRACK']]
    ancol = d[:, k['ANTENNA']] # antenna id
    mscol = d[:, k['MEAS']] # meas 
    sccol = d[:, k['SLIP_CNT']]

    # For 35:19 only currently
    if rec != '-d35:19':
        raise RuntimeError('Not support or unknown record', rec)

    output_ant0_cslip_per_sv = []
    output_ant1_cslip_per_sv = []
    output_tt_cslip_per_sv = []
    sv_list = list()
    interval_list = list()
    tt_sv_list = list()
    t0_slips = []
    t1_slips = []
    t1_losslock = []
    cslip_intv_0 = np.zeros(len(intervals), dtype=int)
    cslip_intv_1 = np.zeros(len(intervals), dtype=int)
    loss_lock_intv_1 = np.zeros(len(intervals), dtype=int)
    ant0_cslip_per_sv = np.array([])
    ant1_cslip_per_sv = np.array([])
    
    # make sure the data contains 2 antennas (antenna 0 and antenna 1)
    ant0_data = np.where(ancol==0)
    ant1_data = np.where(ancol==1)
    if len(ant0_data[0]) == 0 or len(ant1_data[0]) == 0:
        print('One of the antenna data are unavailable')
        return ant0_cslip_per_sv, ant1_cslip_per_sv

    systr, sv_range = get_sys_name_and_sv_range(sy)

    for sv in sv_range:
        header = '\t# sy='+str(sy)+' sv='+str(sv)+': '

        cslip_tt_cnt0 = 0
        cslip_tt_rate0 = 0
        cslip_tt_cnt1 = 0
        cslip_tt_rate1 = 0
        cslip_tt_len0 = 0
        cslip_tt_len1 = 0
        loselock_tt_cnt1 = 0
        for ctr,interval in enumerate( intervals ):
            print(fn()+'  Processing time interval:', interval)
            if interval == []:
                interval = [min(ticol), max(ticol)]

        
            
            time_interval_str = '_'+str(int(interval[0]))+'_'+str(int(interval[1]))
            
            # Calculate unioned track mask
            if track == []:
                track_included = np.ones((ticol.size,), dtype=bool)
            else:
                track_included = np.zeros((ticol.size,), dtype=bool)
                for j,track_j in enumerate(track):
                    track_included |= (tkcol==track[j])
            
            # Calculate intervals mask
            interval_included = np.zeros((ticol.size,), dtype=bool)
            interval_included |= (ticol>=interval[0])&(ticol<=interval[1])

            ant0 = np.where( (svcol==sv)&(sycol==sy)&(ancol==0)&(rtcol==freq)&track_included&interval_included )
            ant1 = np.where( (svcol==sv)&(sycol==sy)&(ancol==1)&(rtcol==freq)&track_included&interval_included )

            indm = 0
            loss_lock_cnt = 0
            for ind0 in range(len(ant0[0])):
                t1 = ticol[ant0[0][ind0]]
                loss_lock = 1
                for ind1 in range(indm,len(ant1[0])):
                    t2 = ticol[ant1[0][ind1]]

                    if (t2 - t1) < 0:
                        continue
                    if (t2 - t1) >= 0.001:
                        indm = ind1
                        break
                    if abs(t2 - t1) < 0.001:
                        loss_lock = 0

                if loss_lock == 1:
                    t1_losslock.append(t1)
                    loss_lock_cnt += 1

            if len(ant0[0])>0 and len(ant1[0])>0:
                # calculate the number of cycle slips over total measurements
                meas0 = mscol[ant0]
                meas1 = mscol[ant1]
                sccol0 = sccol[ant0]
                sccol1 = sccol[ant1]
                 
                flag1 = mask1 = k['dMEAS_SLIP']
                flag2 = mask2 = k['dMEAS_PHASE_VALID']|k['dMEAS_LOCK_POINT']
                
                cslip_ep_cnt0 = 0
                cslip_ep_rate0 = 0
                cslip_ep_cnt1 = 0
                cslip_ep_rate1 = 0
                # antenna0
                slip_cnt = -1
                
                for ind, meas in enumerate(meas0.astype('int')):
                    has_slip = False
                    if (meas&mask2) == flag2 and (meas&mask1) == flag1:
                        has_slip = True
                    
                    if slip_cnt == -1:
                        slip_cnt = sccol0[ind]
                    if slip_cnt != sccol0[ind]:
                        slip_cnt = sccol0[ind]
                        if has_slip == False:
                            has_slip = True
                            
                    if has_slip == True:
                        cslip_ep_cnt0 += 1
                        t0_slips.append(ticol[ant0[0][ind]])
                        cslip_intv_0[ctr] += 1

                cslip_tt_cnt0 += cslip_ep_cnt0
                cslip_tt_len0 += len(meas0)
                if len(meas0) == 0:
                    cslip_ep_cnt0 = 0
                    cslip_ep_rate0 = 0
                else:
                    cslip_ep_rate0 = cslip_ep_cnt0/len(meas0)
                output_ant0_cslip_per_sv.append(np.asarray([interval[0],interval[1],sy,sv,cslip_ep_cnt0,cslip_ep_rate0]))
                
                # antenna1
                slip_cnt = -1
                
                
                for ind, meas in enumerate(meas1.astype('int')):
                    has_slip = 0
                    if (meas&mask2) == flag2 and (meas&mask1) == flag1:
                        has_slip = True
                            
                    if slip_cnt == -1:
                        slip_cnt = sccol1[ind]
                    if slip_cnt != sccol1[ind]:
                        slip_cnt = sccol1[ind]
                        if has_slip == False:
                            has_slip = True
                            
                    if has_slip == True:
                        cslip_ep_cnt1 += 1
                        t1_slips.append(ticol[ant1[0][ind]])
                        cslip_intv_1[ctr] += 1

                loss_lock_intv_1[ctr] += loss_lock_cnt
                loselock_tt_cnt1 += loss_lock_cnt
                cslip_tt_cnt1 += cslip_ep_cnt1
                cslip_tt_len1 += len(ant0[0])#len(meas1)
                if len(ant0[0]) == 0:
                    cslip_ep_cnt0 = 0
                    cslip_ep_rate0 = 0
                else:
                    cslip_ep_rate1 = (cslip_ep_cnt1 + loss_lock_cnt) /len(ant0[0])
                output_ant1_cslip_per_sv.append(np.asarray([interval[0],interval[1],sy,sv,cslip_ep_cnt1,cslip_ep_rate1, loss_lock_cnt]))
                sv_list.append(str(sv))
                interval_list.append(time_interval_str)
        tt_ant0_rate = 0
        tt_ant1_rate = 0
        tt_ant1_ll_rate = 0
        if cslip_tt_len0 > 0:
            tt_ant0_rate = cslip_tt_cnt0/cslip_tt_len0
        if cslip_tt_len1 > 0:
            tt_ant1_rate = cslip_tt_cnt1/cslip_tt_len1
            tt_ant1_ll_rate = loselock_tt_cnt1/cslip_tt_len1
        if cslip_tt_len0 > 0 and cslip_tt_len1 > 0:
            output_tt_cslip_per_sv.append(np.asarray([sy,sv,cslip_tt_cnt0,tt_ant0_rate,cslip_tt_cnt1,tt_ant1_rate,tt_ant1_ll_rate]))
            tt_sv_list.append(str(sv))
    sort_t0_slips = np.sort(t0_slips)
    sort_t1_slips = np.sort(t1_slips)
    sort_t1_losslock = np.sort(t1_losslock)
    slip_t0 = np.asarray([])
    slip_t1 = np.asarray([])
    losslock_t1 = np.asarray([])
    slip_cnt0 = np.asarray([])
    slip_cnt1 = np.asarray([])
    losslock_cnt1 = np.asarray([])
    # get cycle slips counts verse time
    for t0 in sort_t0_slips:
        if t0 not in slip_t0:
            slip_t0 = np.append(slip_t0, t0)
            slip_cnt0 = np.append(slip_cnt0, 1)
        else:
            indx = np.where(slip_t0 == t0)
            slip_cnt0[indx[0]] += 1

    for t1 in sort_t1_slips:
        if t1 not in slip_t1:
            slip_t1 = np.append(slip_t1, t1)
            slip_cnt1 = np.append(slip_cnt1, 1)
        else:
            indx = np.where(slip_t1 == t1)
            slip_cnt1[indx[0]] += 1

    for t1 in sort_t1_losslock:
        if t1 not in losslock_t1:
            losslock_t1 = np.append(losslock_t1, t1)
            losslock_cnt1 = np.append(losslock_cnt1, 1)
        else:
            indx = np.where(losslock_t1 == t1)
            losslock_cnt1[indx[0]] += 1
            
    tt_cslip_per_sv = np.asarray(output_tt_cslip_per_sv)
    ant0_cslip_per_sv = np.asarray(output_ant0_cslip_per_sv)
    ant1_cslip_per_sv = np.asarray(output_ant1_cslip_per_sv)
    width = 0.25
    if len(slip_cnt0)>0 or len(slip_cnt1)>0:
        # plot cycle slips vs time
        tt = str(systr[:-1])+' '+get_band_name(freq)+'_'+get_track_name(track)+' Cycle Slips Counts vs Time'
        plt.figure()
        plt.plot(slip_t0-plot_time_offset, slip_cnt0, '*')
        plt.plot(slip_t1-plot_time_offset, slip_cnt1, 'x')
        plt.plot(losslock_t1-plot_time_offset, losslock_cnt1, 'o')
        plt.title(tt)
        plt.legend(['Ant0','Ant1','Ant1 Loss of Lock'])
        plt.xlabel('Time [sec]')
        plt.ylabel('counts')
        fname = out_plot_path + '/'+str(systr[:-1])+'_'+get_band_name(freq)+'_'+get_track_name(track)+'_cycle_slip_vs_t.png'
        print(fname)
        save_compressed_png(fname)
        plt.close()
        
        # plot total cycle slip counts for each interval
        fig, ax = plt.subplots(1,1)
        tt = str(systr[:-1])+' '+get_band_name(freq)+'_'+get_track_name(track)+' Cycle Slips Counts per Interval'
        xaxis = np.arange(1, len(intervals)+1)
        ax.bar(xaxis-width/2, cslip_intv_0, width)
        ax.bar(xaxis+width/2, cslip_intv_1, width)
        ax.bar(xaxis+width/2, loss_lock_intv_1, width, bottom=cslip_intv_1)
        ax.set_xlim(0, len(intervals)+1)
        ax.legend(['Ant0','Ant1','Ant1 Loss of Lock'])
        ax.set_xlabel('Interval')
        ax.set_ylabel('counts')
        ax.xaxis.set_major_locator(ticker.MultipleLocator(1))
        ax.set_title(tt)
        fname = out_plot_path + '/'+str(systr[:-1])+'_'+get_band_name(freq)+'_'+get_track_name(track)+'_cycle_slip_per_interval.png'
        print(fname)
        save_compressed_png(fname)
        plt.close(fig)

    if len(tt_cslip_per_sv) > 0 and (np.any(tt_cslip_per_sv[:,3]) or np.any(tt_cslip_per_sv[:,5])) :
        fig = plt.figure()
        plt.yscale('log')
        tt = str(systr[:-1])+' '+get_band_name(freq)+'_'+get_track_name(track)+' Cycle Slip and Loss of Lock Yield Rates'
        sv_xaxis = np.arange(len(tt_cslip_per_sv))
        # plot ant0
        if np.any(tt_cslip_per_sv[:,3]):
            plt.bar(sv_xaxis-width/2, tt_cslip_per_sv[:,3]*100, width, label='ant0')
        # plot ant1
        if np.any(tt_cslip_per_sv[:,5]):
            plt.bar(sv_xaxis+width/2, tt_cslip_per_sv[:,5]*100, width, label='ant1')
        if np.any(tt_cslip_per_sv[:,6]):
            plt.bar(sv_xaxis+width/2, tt_cslip_per_sv[:,6]*100, width, label='ant1 loss of lock', bottom=tt_cslip_per_sv[:,5]*100)
        plt.title(tt)
        plt.xlabel('SV')
        plt.ylabel('cycle slip rate %')
        plt.legend()
        plt.xticks(sv_xaxis, tt_sv_list)
        plt.ylim(0,100)
        fname = out_plot_path + '/'+str(systr[:-1])+'_'+get_band_name(freq)+'_'+get_track_name(track)+'_total_cycle_slip_yield_rate.png'
        print(fname)
        save_compressed_png(fname)
        plt.close()
	
    return ant0_cslip_per_sv, ant1_cslip_per_sv

    
if __name__ == "__main__":
    parser = argparse.ArgumentParser(
        formatter_class = argparse.RawTextHelpFormatter,
        description = USAGE_TEXT)
        
    parser.add_argument(
        '-f', '--force',
        help = 'If a run is deleted, force update of web page',
        action = 'store_true')
    parser.add_argument(
        '-r', '--reprocess',
        help = 'Force reprocessing on a single dir (e.g., RX17-1234)')
    parser.add_argument(
        '-t', '--test_dir',
        help = "Don't update global storage.  Only output results to given directory.  Must be used with --reprocess option.")
    parser.add_argument(
        '-w', '--wait',
        help = 'If ProcessResults is already running, wait for lock. (Normally we exit immediately)',
        action = 'store_true')
    parser.add_argument('-d','--debug', help = 'Force to reprocess')

    args = parser.parse_args()

    if not get_lock(args.wait):
        sys.exit()

    if args.debug:
        print('Input:', args.debug)
        
        # Parse debug run list input
        rx = int(s.split('-')[0].replace('RX', ''))
        from_to = s.split('-')[1].split('_')
        if len(from_to) == 1:
            rnum_start = int(from_to[0])
            rnum_end = int(from_to[0]) + 1
        elif len(from_to) == 2:
            rnum_start = int(from_to[0])
            rnum_end = int(from_to[1])
        else:
            raise RuntimeError('Invalid debug argument')
        
        target_list = []
        for i in range(rnum_start, rnum_end):
            target_list.append(f'RX{rx}-{i}')
        print('target_list:', target_list)
        time.sleep(2)

        output_dir_local = OUTPUT_DIR
        for rx_run in target_list:
            target_file = '%s/%s.json'%(output_dir_local,rx_run)
            print('Do postprocessing on target_file', target_file)
            if os.path.isfile(target_file):
                os.remove(target_file)
                out_nopi_path = output_dir_local + '/' + rx_run
                out_plot_path = output_dir_local + '/plots/' + rx_run
                in_file_name = f'{RESULTS_DIR}/{rx_run}.xml'
                tree = ET.parse(file=in_file_name)
                output = analyze_single_rx( tree, rx_run )
        
                # print(output)
                
                # Save to db
                print("Saving results to db.")
                init_table()
                add_data_to_db([output])

                # Save to json
                outFileName = out_nopi_path + ".json"        
                print("Saving results to json.", outFileName)
                with open(outFileName, 'w') as f:
                    try:
                        json.dump( output, f, allow_nan=False )
                    except ValueError as e:
                        r = 'Error in converting dict into json. '+str(e) + '. Skip.'
                        print(r)
                        os.remove(outFileName)
            else:
                print("Warning: Can't find "+target_file)

    # eg. rm -rf  OutputResults/RX3-41 && python3a ProcessResults.py -t OutputResults -r RX3-109
    if args.test_dir:
        if not args.reprocess:
            print("--test_dir must be used with --reprocess")
            sys.exit()
        out_nopi_path = args.test_dir + '/' + args.reprocess
        out_plot_path = args.test_dir + '/plots/' + args.reprocess
        in_file_name = f'{RESULTS_DIR}/{args.reprocess}.xml'
        tree = ET.ElementTree(file=in_file_name)
        output = analyze_single_rx( tree, args.reprocess )
        # Save to db
        print("Saving results to db.")
        add_data_to_db([output])

        # Save to json
        outFileName = out_nopi_path + ".json"        
        print("Saving results to json.", outFileName)
        
        with open(outFileName, 'w') as f:
            try:
                json.dump( output, f, allow_nan=False )
            except ValueError as e:
                r = 'Error in converting dict into json. '+str(e) + '. Skip.'
                print(r)
                os.remove(outFileName)
        sys.exit()

    if args.reprocess:
        target_file = f'{OUTPUT_DIR}/{args.reprocess}.json'
        if os.path.isfile(target_file):
            os.remove(target_file)
        else:
            print("Warning: Can't find", target_file)

    if args.force or args.reprocess or any_changes():
        analyze_all_rx()
        print("Do nothing")
    else:
        print("No changes")

