# Copyright Trimble Inc. 2015-2025
# $Id: mutils.py,v 1.66 2024/09/17 23:43:39 wlentz Exp $

from __future__ import absolute_import

from matplotlib import pylab
from pylab import *
from scipy.signal import butter, filtfilt
from six import print_
from six.moves import range
import mutils.GnssConst as GnssConst
import mutils.RcvrConst as RcvrConst
import mutils.RTConst as RTConst
import mutils.PosTypeConst as PosTypeConst
import mutils.PosPacConst as PosPacConst
import struct
import io
import os
from PIL import Image
import numpy as np
import importlib
from glob import glob
from collections import namedtuple
import re

# Override pylab find() to avoid deprecation warning
# pylint: disable=function-redefined
def find(x):
    return where(x)[0]



PosTypeStrConst = [
  "Auto LSQ",
  "GNSS LSQ",
  "SBAS LSQ",
  "RTK Float",
  "RTK Fixed",
  "WA RTK Float",
  "WA RTK Fixed",
  "OmniSTAR",
  "CDGPS - Obs",
  "Auto KF",
  "DGNSS KF",
  "SBAS KF",
  "CDGPS KF - Obs",
  "SBAS+ KF",
  "SBAS/CDPGS+ KF - Obs",
  "RTX",
  "XPS Coarse",
  "XPS Fine",
  "INS Auto",
  "INS DGNSS",
  "INS RTK",
  "INS SBAS",
  "INS RTX",
  "INS GVBS",
  "INS Omni",
  "INS DR",
  "GVBS",
  "RTX Fast",
  "xFillX",
  "RTX Lite",
  "RTX Lite L1",
  "RTX Field Point",
  "RTX Fast RAM -??",
  "Auto TN",
  "GNSS TN",
  "SBAS TN",
  "QZSS SLAS LSQ",
  "QZSS SLAS KF",
  "INS xFillX",
  "CLAS",
  "INS CLAS",
  "HAS",
  "INS HAS",
  "SouthPAN",
  "INS SouthPAN",
  "BEIDOU PPP",
  "INS BEIDOU PPP",
  "INS SLAS",
  "BVRS",
  "INS BVRS"]


def doload(filename, verbose=True, error_bad_lines=True):
    """
    filename=text file or viewdat --mat output.
    [verbose=True - print status info when loading file?]
    [error_bad_lines=False - pass error_bad_lines to pandas read_csv?  Skips lines with errors.]
    Accepts gzip'd files ending in '.gz' and bzip'd files ending in '.bz2'
    Returns numpy array with data.
    """
    filename = os.path.expanduser(filename)
    if verbose:
        print("Loading %s..." % filename)

    def myopen( fname, mode ):
        if filename.endswith('.gz'):
            import gzip
            return gzip.open(filename,mode)
        elif filename.endswith('.bz2'):
            import bz2
            return bz2.BZ2File(filename,mode)
        else:
            return open(filename,mode)


    # Test for a Matlab file (& then use loadmat())
    f = myopen(filename,'rb')
    bytes = f.read(512)
    if b'\0' in bytes:
        from scipy.io import loadmat
        f = myopen(filename,'rb')
        raw_d = loadmat(f)
        # raw_d may have several data arrays if the size is > 2GB.
        # first raw_d['data'], then raw_d['data1'], raw_d['data2'], etc.
        all_d = []
        curr_i = 0

        while True:
            if curr_i == 0:
                var_name = 'data'
            else:
                var_name = 'data%d' % curr_i
            if not var_name in raw_d:
                break
            if b'C library' in bytes:
                all_d.append( raw_d[var_name].T )
            else:
                all_d.append( raw_d[var_name] )
            curr_i += 1
        
        return concatenate( all_d )

    # Strip header from file
    def is_string_a_float(s):
        try:
            float(s)
        except ValueError:
            return False
        return True
    f = myopen(filename,'r')
    n = 0
    for l in f.readlines():
        w = l.strip().split()
        if len(w) > 0 and is_string_a_float(w[0]):
            break
        n += 1

    # pandas reads text data really quickly
    import pandas as pd
    from packaging.version import parse as parse_version
    f = myopen(filename,'rb')
    try:
        csv_args = {'skiprows':n,
                    'header':None,
                    'delim_whitespace':True}
        if parse_version(pd.__version__)>=parse_version("1.3.0"):
            if not error_bad_lines:
                csv_args['on_bad_lines']='skip'
        else:
            if not error_bad_lines:
                csv_args['error_bad_lines']=False
        return pd.read_csv(f,**csv_args)\
                  .apply(pd.to_numeric,errors='coerce')\
                  .values
    except pd.errors.EmptyDataError:
        return array([])

def _substitute_lines(input_stream, pattern1, pattern2):
    for line in input_stream:
        if pattern1 in line:  # Check if the pattern exists
            new_line = line.replace(pattern1, pattern2)  # Replace and yield the result
            yield new_line.split()

def _doload_sed_help(info,sep):
    """See doload_sed"""
    import shlex
    import subprocess as sp
    import pandas as pd
    import bz2, gzip

    filename,pat1,pat2,cmd = info
    print("doload_sed loading "+filename)
    if cmd.startswith('viewdat'):
        cat = sp.Popen(shlex.split(cmd+' '+filename), stdout=sp.PIPE, text=True)
        cat = cat.stdout
    elif cmd.startswith('cat'):
        cat = open(filename,'rt')
    elif cmd.startswith('zcat'):
        cat = gzip.open(filename,'rt')
    elif cmd.startswith('bzcat'):
        cat = bz2.open(filename,'rt')
    # only find lines where substitution matched
    substituted_lines = _substitute_lines(cat, pat1, pat2)
    try:
        return pd.DataFrame(substituted_lines) \
                 .apply(pd.to_numeric, errors='coerce') \
                 .values
    except:
        return zeros(0)

def doload_sed(filename,pat1,pat2='',cmd=None, parallel=True, sep='\s+'):
    """
    Used to run "sed" on a file before loading it.  This is useful for diagnostic data.
    filename=text file.  Accepts gzip'd files ending in '.gz' and bzip'd files ending in '.bz2'
    pat1 = String(sed pattern) to search for.  Only match lines with this pattern.
    pat2 = default "".  Replace 'pat1' with this string before importing data.
    cmd = default None.  Run "cmd filename" to get data from filename.  (e.g., "viewdat -d16 -mb")
    Returns numpy array with data.
    """
    import multiprocessing.pool

    if type(filename) is str:
        files = sorted(glob(os.path.abspath(filename)))
    else:
        files = sorted([os.path.abspath(x) for x in filename])
    if cmd is None:
        if files[0].endswith('.gz'):
            cmd = 'zcat'
        elif files[0].endswith('.bz2'):
            cmd = 'bzcat'
        elif files[0].lower().endswith('.t02') or files[0].lower().endswith('.t04'):
            cmd = 'viewdat -d99 -mb'
        else:
            cmd = 'cat'

    all_info = []
    for f in files:
        all_info.append( (f,pat1,pat2,cmd) )

    d = None
    doload_func = lambda x: _doload_sed_help(x,sep=sep)
    if len(files) > 1 and parallel:
        # IO-heavy, so ThreadPool is OK
        with multiprocessing.pool.ThreadPool() as pool:
            d = pool.map( doload_func, all_info )
    else:
        d = map( doload_func, all_info )
    d2 = [x for x in d if len(x) > 0]
    return concatenate(d2)


def load_pos(filename):
    """
    Load a WinAstra .pos file
    Expects rtkPosOutputFormat = LLH and rtkOutputFileHeaders = yes in winastra.ini
    filename=text ".pos" file from WinAstra.exe
    Returns (header,data)
    header = dictionary of column names -> index of data column
    data = numpy array of data.  'nan' values are empty
    """
    if not os.path.isfile(filename):
        return (None,None)

    def _is_number(x):
        try:
            float(x)
            return True
        except ValueError:
            return False

    def _extend_data(all_data,all_hdr):
        tmp = full([all_data.shape[0]+3600,len(all_hdr)], nan)
        tmp[:all_data.shape[0],:all_data.shape[1]] = all_data
        return tmp

    all_hdr = dotdict({})
    all_data = empty([0,0])
    pos = 0
    hdr_idx = []
    for line_num,l in enumerate(open(filename)):
        if line_num == 1:
            w = l.split()[:-1]
            hdr_idx = []
            for x in w:
                x = x.strip()
                if x not in all_hdr:
                    all_hdr[x] = len(all_hdr)
                hdr_idx.append(all_hdr[x])
            if len(all_hdr) > all_data.shape[1]:
                all_data = _extend_data( all_data, all_hdr )
        else:
            w = l.split()[:-1]
            if _is_number(w[1]):
                all_data[pos,hdr_idx] = [float(x) if _is_number(x) else nan for x in w]
                pos += 1
                if pos >= all_data.shape[0]:
                    all_data = _extend_data( all_data, all_hdr )
            
    all_data = all_data[:pos,:]
    return (all_hdr, all_data)

def load_hud(filename, max_lines=-1):
    """
    Load a Titan .hud file
    filename=text ".hud" file from TitanPlayer.exe
    Returns 'data' dictionary
     keys = column names
     values = numpy array of data.  'nan' values are empty
    For example:
      plot( data['GPS_Sec'], data['Platf13_GnssRcv0_R02_D1C-R_Std'])
    (The files use RINEX encoding for signals)
    """
    def _is_number(x):
        try:
            float(x)
            return True
        except ValueError:
            return False

    data = {}
    curr_hdrs = []
    pos = 0
    prev_hdr_line=-100
    for line_num,l in enumerate(open(filename)):
        if max_lines > 0 and pos > max_lines:
            break
        w = l.split(';')[:-1]
        if _is_number(w[0]):
            # data line
            pos += 1
            # make sure every data column grows together
            for col in data.values():
                col.append(nan)
            # fill in data we have
            for hdr,val in zip(curr_hdrs,w):
                try:
                    data[hdr][-1] = float(val)
                except:
                    pass
        elif line_num != prev_hdr_line+1:
            # 1st header line = names, 2nd line=units (ignore)
            prev_hdr_line = line_num
            curr_hdrs = [w0.strip() for w0 in w]
            # add any missing headers.  Fill in 'nan' for old missing data.
            for hdr in curr_hdrs:
                if hdr not in data:
                    data[hdr] = [nan]*pos
    for key,val in data.items():
        data[key] = array(val)
    return data


def weighted_median(data, weights=None):
    """Calculate the weighted median of a list."""
    if weights is None:
        return median(data)
    midpoint = 0.5 * sum(weights)
    if any([j > midpoint for j in weights]):
        return data[argmax(weights)]
    if any([j > 0 for j in weights]):
        sorted_data, sorted_weights = zip(*sorted(zip(data, weights)))
        cumulative_weight = 0
        below_midpoint_index = 0
        while cumulative_weight <= midpoint:
            below_midpoint_index += 1
            cumulative_weight += sorted_weights[below_midpoint_index-1]
        cumulative_weight -= sorted_weights[below_midpoint_index-1]
        if cumulative_weight == midpoint:
            bounds = sorted_data[below_midpoint_index-2:below_midpoint_index]
            return sum(bounds) / float(len(bounds))
        return sorted_data[below_midpoint_index-1]


def gen_titan_ref_clock(titan_dir, d, k):
    """Generates RefClock.txt for SNPC based on Titan's clock.  Titan
    usually provides a very good clock estimate.

    titan_dir = directory containing PKFEE_States_RcvClkBias.hud
    d,k = rec 35:2 from vd2arr()
    """
    import pandas as pd
    hClk, dClk = load_hud(titan_dir + '/PKFEE_States_RcvClkBias.hud')
    t = dClk[:,hClk['GPS_Sec']]
    if 'SatSysDepRcvClk_Platf00_GnssRcv0_G00_Val' in hClk:
        clkkey = 'SatSysDepRcvClk_Platf00_GnssRcv0_G00_'
    else:
        clkkey = 'SatSysDepRcvClk_Platf00_GnssRcv0_G_'
    clk_bias = dClk[:,hClk[clkkey + 'Val']]
    clk_sigma = dClk[:,hClk[clkkey + 'Std']]
    code_bias = dClk[:,hClk['CodeRcvBias_Platf00_GnssRcv0_C1C-G_Val']]
    dt = median(diff(t))
    b,a = butter(3, dt/.5/(60*60), btype='lowpass' )
    code_bias = filtfilt(b, a, code_bias, method="gust" )
    df = pd.DataFrame( {'clk':pd.Series(range(len(t)),index=t),
                        'd':pd.Series(range(len(d)),index=d[:,k.TIME]) } )

    print_("Titan/d start_t = %.1f/%.1f, end_t %.1f/%.1f" % (t[0],d[0,k.TIME],t[-1],d[-1,k.TIME]))
    SECS_IN_WEEK = 60.*60.*24.*7.
    with open('RefClock.txt','w') as outfile:
        print_("% clk_types 1", file=outfile )
        print_("% 0 0.0", file=outfile )
        last_clk_t = -10.
        for curr_t,row in df.iterrows():
            ipos = row['d']
            if not isfinite(ipos):
                continue
            iclk = row['clk']
            dt = curr_t - last_clk_t
            if dt < -SECS_IN_WEEK*.5:
                dt += SECS_IN_WEEK
            if not isfinite(iclk) and dt > 1.1:
                continue

            ipos = int(ipos)
            mydrift = d[ipos,k.CLKDRIFT]*(GnssConst.LIGHT/1e6)
            if isfinite(iclk):
                iclk = int(iclk)
                mybias = clk_bias[iclk] + code_bias[iclk]
                mysigma = clk_sigma[iclk]
                last_clk_t = curr_t
                last_clk_bias = mybias
                last_clk_sigma = mysigma
            else:
                mybias = last_clk_bias + mydrift*dt
                mysigma = last_clk_sigma*2
            if mysigma > 100.0:
                continue

            print_("%.3f %.1f %.3f %.1f" % (curr_t, -mybias, -mydrift, mysigma), file=outfile )


class TitanHudPKFEE(object):
    """Load the (measurement) Titan PKFEE HUD files in given directory"""
    def __init__(self, dirname, platf=13):
        self.hRes, self.dRes = load_hud( dirname + '/PKFEE_FilterRes_GNSS.hud')
        self.hAmb, self.dAmb = load_hud( dirname + '/PKFEE_States_Amb.hud')
        self.hMP, self.dMP = load_hud( dirname + '/PKFEE_States_RcvSatBias.hud')
        self.hClk, self.dClk = load_hud(dirname + '/PKFEE_States_RcvClkBias.hud')
        self.amb_median = None
        self.L1_reduced_amb = {}
        self.L1_MP_plus_amb = {}
        self.platf=platf

    def get_ID(self, hdr, pattern ):
        """Get HUD header value that matches pattern"""
        for h in hdr:
            m = re.match( pattern, h )
            if m:
                return h
        return None

    def get_cde_pat(self,sv_id,sat_type):
        if sat_type==RTConst.RT_SatType_GPS:
            return 'Platf%.2d_.*G%.2d_[PC]1C-G_' % (self.platf,sv_id)
        elif sat_type==RTConst.RT_SatType_GLONASS:
            return 'Platf%.2d_.*R%.2d_[PC]1C-R_' % (self.platf,sv_id)
        elif sat_type==RTConst.RT_SatType_BEIDOU_B1GEOPhs:
            return 'Platf%.2d_.*C%.2d_[PC]1I-C_' % (self.platf,sv_id)
        raise ValueError('unknown sat_type %d' % sat_type)

    def get_phs_pat(self,sv_id,sat_type):
        if sat_type==RTConst.RT_SatType_GPS:
            return 'Platf%.2d_.*G%.2d_L1C-G_' % (self.platf,sv_id)
        elif sat_type==RTConst.RT_SatType_GLONASS:
            return 'Platf%.2d_.*R%.2d_L1C-R_' % (self.platf,sv_id)
        elif sat_type==RTConst.RT_SatType_BEIDOU_B1GEOPhs:
            return 'Platf%.2d_.*C%.2d_L1I-C_' % (self.platf,sv_id)
        raise ValueError('unknown sat_type %d' % sat_type)

    def get_L1_cde_res(self,sv_id,sat_type,remove_clk=True,filt_Tc=-1.):
        """Returns code residuals: t, cde_val [m], cde_std [m].
        If remove_clk==True (default), remove clock from residuals.
          Currently this is needed to look at code multipath values.
        If filt_Tc > 0, run low pass filter on cde_res with this time constant.
        """
        resPat = self.get_cde_pat(sv_id,sat_type)
        if sat_type==RTConst.RT_SatType_GPS:
            biasPat = 'CodeRcvBias_Platf00_GnssRcv0_[PC]1C-G_Val'
        elif sat_type==RTConst.RT_SatType_GLONASS:
            biasPat = 'CodeRcvBias_Platf00_GnssRcv0_[PC]1C-R_Val'
        elif sat_type==RTConst.RT_SatType_BEIDOU_B1GEOPhs:
            biasPat = 'CodeRcvBias_Platf00_GnssRcv0_[PC]1I-C_Val'
        else:
            raise ValueError('unknown sat_type %d' % sat_type)

        id1 = self.get_ID(self.hRes, resPat + 'Val' )
        id2 = self.get_ID(self.hRes, resPat + 'Std' )
        if id1 is None or id2 is None:
            return [], [], []
        t = self.dRes[:,self.hRes['GPS_Sec']]
        cde_val = self.dRes[:,self.hRes[id1]]
        cde_std = self.dRes[:,self.hRes[id2]]
        if remove_clk:
            id1 = self.get_ID(self.hClk, biasPat )
            tclk = self.dClk[:,self.hClk[id1]]
            i = find( ~isnan(cde_val+tclk) )
            t = t[i]
            cde_val = cde_val[i] + (tclk[i]-median(tclk[i]))
            cde_std = cde_std[i]
        else:
            i = find( ~isnan(cde_val) )
            t = t[i]
            cde_val = cde_val[i]
            cde_std = cde_std[i]
        if filt_Tc > 0:
            dt = min(diff(t))
            b,a = butter(3, dt/.5/filt_Tc, btype='lowpass' )
            start_i = 0
            for end_i in find( diff(t) > dt+1e-3 ):
                if start_i == end_i:
                    cde_val[start_i:end_i] = cde_val[start_i:end_i]
                else:
                    cde_val[start_i:end_i] = filtfilt( b, a, cde_val[start_i:end_i], method="gust")
                start_i = end_i+1
            end_i = len(t)
            cde_val[start_i:end_i] = filtfilt( b, a, cde_val[start_i:end_i], method="gust")
        return t, cde_val, cde_std

    def get_L1_phs_res(self,sv_id,sat_type):
        """Returns phase residuals: t, phs_val [m], phs_std [m]"""
        resPat = self.get_phs_pat(sv_id,sat_type)
        id1 = self.get_ID(self.hRes, resPat+'Val' )
        id2 = self.get_ID(self.hRes, resPat+'Std' )
        t = self.dRes[:,self.hRes['GPS_Sec']]
        phs_val = self.dRes[:,self.hRes[id1]]
        phs_std = self.dRes[:,self.hRes[id2]]
        return t, phs_val, phs_std

    def get_amb_svs(self):
        """Returns list of all sv IDs (sv_id, RT_SatType) that have PKFEE_States_amb.hud data"""
        sv_list = []
        for x in self.hAmb:
            m = re.match('.*?_G([0-9][0-9])_[PL]1', x )
            if m:
                sv_list.append( (int(m.group(1)), RTConst.RT_SatType_GPS) )
            m = re.match('.*?_R([0-9][0-9])_[PL]1', x )
            if m:
                sv_list.append( (int(m.group(1)), RTConst.RT_SatType_GLONASS) )
            m = re.match('.*?_C([0-9][0-9])_[PL]1', x )
            if m:
                sv_list.append( (int(m.group(1)), RTConst.RT_SatType_BEIDOU_B1GEOPhs) )

        svs_combined = unique([sv+sat_type*1000 for sv,sat_type in sv_list])
        return array([(int(x)%1000, int(x/1000)) for x in svs_combined])

    def get_L1_phs_MP(self,sv_id,sat_type,only_valid=False):
        """Get all L1 phase multipath: t [s], phs_val [m], phs_std [m].  Look
        at phs_res to see if data is used.  This doesn't include any drift
        found by the phase AMB states.
        If only_valid=False -> return all MP data.  If True -> set invalid MP data to nan (based on phs_res)."""
        mpPat = 'PhaseMP_' + self.get_phs_pat(sv_id,sat_type)
        id1 = self.get_ID(self.hMP, mpPat+'Val' )
        id2 = self.get_ID(self.hMP, mpPat+'Std' )
        t = self.dMP[:,self.hMP['GPS_Sec']]
        phs_val = self.dMP[:,self.hMP[id1]]
        phs_std = self.dMP[:,self.hMP[id2]]
        if only_valid:
            t2, phs_res, _ = self.get_L1_phs_res(sv_id,sat_type)
            _, i1, i2 = intersect( t, t2[isnan(phs_res)] )
            phs_val[i1] = nan
            phs_std[i1] = nan
        return t, phs_val, phs_std

    def get_L1_cde_MP(self, sv_id, sat_type, only_valid=False):
        """Get all L1 code multipath: t [s], cde_val [m], cde_std [m], cde_res [m].
        If only_valid=False -> return all MP data.  If True -> set invalid MP data to nan (based on cde_res)."""
        mpPat = 'CodeMP_'+self.get_cde_pat(sv_id,sat_type)
        id1 = self.get_ID(self.hMP, mpPat + 'Val')
        id2 = self.get_ID(self.hMP, mpPat + 'Std')
        t1 = self.dMP[:,self.hMP['GPS_Sec']]
        cde_val = self.dMP[:,self.hMP[id1]]
        cde_std = self.dMP[:,self.hMP[id2]]
        t2, cde_res, _ = self.get_L1_cde_res(sv_id,sat_type)
        if only_valid:
            _, i1, i2 = intersect( t1, t2[isnan(cde_res)] )
            cde_val[i1] = nan
            cde_std[i1] = nan
        return t1, cde_val, cde_std, cde_res

# Syntax sugar - allow accessing dictionary entries with "."
# Example of setting elements with dictionary:
#   k = dotdict({'elem2':2})
# Example of setting elements sequentially:
#   k = dotdict(['elem0','elem1','elem2'])
# Example of setting elements after creation:
#   k = dotdict({})
#   k.elem0 = 0
#   k.elem1 = 1
class dotdict(dict):
    unique_val = {}
    __setattr__ = dict.__setitem__
    __delattr__ = dict.__delitem__
    def __init__(self, idict):
        if type(idict) == list or type(idict) == tuple:
            idict = {x:n for n,x in enumerate(idict)}
        super(dotdict, self).__init__(idict)
    def __getstate__(self):
        return {}  # needed for pickle
    def __setstate__(self,d):
        pass  # needed for pickle
    def __getattr__(self, name):
        obj = super(dotdict, self).get(name, self.unique_val)
        if obj is self.unique_val:
            raise KeyError(name)
        return obj

def _vd2arr_help(info_list,verbose=True):
    import subprocess as sp

    f, rec, opt, mydir, n = info_list
    if verbose:
        print_('  loading... %s' % f)
    tmpfile = 'temp%d.mat' % n
    sp.call(['viewdat','--mat=%s'%tmpfile,'-h',f] + rec + opt, cwd=mydir)
    d = []
    try:
        d = doload(mydir + "/" + tmpfile, verbose=False)
    except:
        pass
    return d

def _vd2arr_help_boostpython(info_list):
    f, rec, opt, mydir, n = info_list
    if ("-d35:2" not in rec and "-d35:19" not in rec) or (len(opt) != 0):
        raise Exception("rec opt unacceptable. Current BoostPythonViewdat module only accepts -d35:2 or -d35:19 with no opt.")

    print_('loading(by boostpython) ... %s' % f)
    d = None

    import BoostPythonViewdat
    path = os.getcwd()
    os.chdir(path+"/"+mydir)
    d, err_code = BoostPythonViewdat.get_vdata(rec + ["--mat_mem","-h"], f)
    os.chdir(path)
    if (err_code != 0):
        print("err_code = "+str(err_code))
        raise Exception("BoostPythonViewdat.get_vdata() failed")
    if (len(d.flatten())) > 32000000: # Observing slowdown when len() > 1e6, but not observing slowdown even up to len() == 32e6 on fermion. Now, it will gain >x2 speed up for all 10-min T04 files
        np_fname = mydir+'/tmp.npy'
        with open(np_fname, 'wb') as f:
            np.save(f, d)
        return np_fname
    else:
        return d

def vd2arr(filename,rec='-d35:2',opt=None,parallel=True,combine_flags=True,verbose=True):
    """
    Run viewdat with --mat option.
    filename=T0x file, or list of files, or glob filename pattern
    rec='-d27' or '-d29' or '-d35:2' (default -d35:2).
      Can also pass args here (e.g. '-d27 -s10000 -p1')
    extra flags: opt=['--return_rec27_fll_meas','--whatever'] (default [])
    Returns (d,key):
     d = raw data
     key = structure with values from any key info generated by '-h'
    """
    import tempfile, shutil, shlex
    import multiprocessing.pool

    rec = shlex.split(rec)
    if opt is None:
        opt = []
    if '-d27' in rec:
        opt+=['--translate_rec35_sub9_to_rec27','--translate_rec35_sub19_to_rec27']
    if '-d29' in rec:
        opt+=['--translate_rec35_sub2_to_rec29']

    key = dotdict({})
    flags = dotdict({})
    info_list = []
    try:
        if type(filename) is str:
            filename = os.path.expanduser(filename)
            filename = os.path.abspath(filename)
            files = sorted(glob(filename))
        else:
            files = sorted([os.path.abspath(x) for x in filename])
        if len(files) == 0:
            raise Exception("Failed to find any input files of format "+filename)
        for n,f in enumerate(files):
            mydir=tempfile.mkdtemp(prefix=".tmp",dir='.')
            info_list.append( (f,rec,opt,mydir,n) )

        d = None
        d2 = None
        try:
            # check if BoostPythonViewdat module exists
            BoostPythonViewdat_spec = importlib.util.find_spec("BoostPythonViewdat")
            found = BoostPythonViewdat_spec is not None
            if not found:
                raise Exception("Skip using BoostPythonViewdat.")
            
            # # check rec is supported or not
            # for info_list_i in info_list:
            #     f, rec, opt, mydir, n = info_list_i
            #     rec = (rec[0]) if (len(rec)==1) else rec
            #     if rec != "-d35:2" and rec != "-d35:19":
            #         raise Exception("Input rec is not supported for BoostPythonViewdat.")
                    
            # If parallel = False, switch back to calling viewdat, because
            # option 1: pool.map() could cause selflock when n_cpu < n_jobs
            # option 2: pool.apply_async() could overcome issue above, but has memory leakage issue if exception occurs and pool.close() is skipped
            if parallel == False:
                raise Exception("Parallel = False, switch back to use default viewdat executable.")

            from multiprocessing import Pool
            n_cpu = int( len(info_list) ) if parallel else 1
            with Pool(n_cpu) as pool:
                d = pool.map( _vd2arr_help_boostpython, info_list )
            for di in range(len(d)):
                if type(d[di]) == str: # if current ele is a filename due to return array too long will lock muliprocessing, load data from file.
                    with open(d[di], 'rb') as f:
                        d[di] = np.load(f)
            d2 = [x for x in d if len(x) > 0]   
        except Exception as e:
            # print("Exception = "+ str(e))
            if len(files) > 1 and parallel:
                # multiprocessing.Pool() uses "fork" which has many ways
                # to hang the system.  Switching to "spawn" is safer, but
                # a bit slower.  ThreadPool() is fast and safe but it only
                # works if we are IO-heavy (or doing subprocess.call())
                with multiprocessing.pool.ThreadPool() as pool:
                    d = pool.map( _vd2arr_help, info_list )
            else:
                d = map( lambda info: _vd2arr_help(info,verbose), info_list )
            d2 = [x for x in d if len(x) > 0]
        d = concatenate(d2)
        for _,_,_,mydir,_ in info_list:
            keyfile = mydir + '/mat_key.m'
            if os.path.isfile(keyfile):
                for l in open(keyfile,'r'):
                    m=re.match(r'\s+key.([^ ]+)\s+=\s+(\d+)',l)
                    if m and len(m.groups()) == 2:
                        key[m.group(1)] = int(m.group(2))-1
                    m=re.match(r'\s+flag.([^ ]+)\s+=\s+(\d+)',l)
                    if m and len(m.groups()) == 2:
                        if combine_flags:
                            key[m.group(1)] = int(m.group(2))
                        else:
                            flags[m.group(1)] = int(m.group(2))
            if len(key) > 0:
                break

    finally:
        for _,_,_,mydir,_ in info_list:
            shutil.rmtree(mydir)
    if combine_flags:
        return (d,key)
    else:
        return (d,key,flags)

def cmd2arr(cmd,prefix_str=None,dtype=object):
    """
    Run command "cmd" and return array with data.
    If 'prefix_str' is set, then strip from text before converting to an array.
    For example: cmd='viewdat -d29 -mb filename.T04'
    vd2arr is faster, but this doesn't need a temporary file and works for more types
    """
    import shlex
    import subprocess as sp
    pipe = sp.Popen(shlex.split(cmd), stdout=sp.PIPE)
    d = []
    while True:
        l = pipe.stdout.readline()
        if l == b'': break
        if prefix_str is not None:
            if l.find(prefix_str) < 0:
                continue
            l = l.replace(prefix_str,'')
        d.append([float(x) for x in l.split()])
    return array(d, dtype=dtype)

def cmd2arr_raw(cmd):
    """
    Run command "cmd" and return array with raw lines as UTF-8 encoding.
    For example: cmd='viewdat -d99 -mb filename.T04'
    """
    import shlex
    import subprocess as sp
    pipe = sp.Popen(shlex.split(cmd), stdout=sp.PIPE)
    d = []
    while True:
        l = pipe.stdout.readline()
        if l == b'': break

        #don't include empty lines
        if l == b'\n':    continue
        if l == b'\r\n':    continue
        if l == b'\r\r\n':  continue

        d.append(str(l.rstrip().decode('utf-8')))
    return array(d)


class BinStruct:
    """Simplify naming struct.unpack/pack data.
    Example for a binary file with a byte and one array of 2 U16s:
      x = BinStruct('flag:<B scales:2H')
      x.decode(b'\x01\x00\x02\x03\x04')
      print(x.d.flag) # 1
      print(x.d.scales) # [512, 1027].  (array because '2H' starts with a number)
    """
    def __init__(self,desc):
        data_desc = []
        bin_desc = ''
        self.d = BinStruct.Empty()
        for elem in desc.split():
            name,form = elem.split(':')
            setattr(self.d, name, None )
            m = re.findall(r'^\d+',form)
            if len(m) > 0:
                data_desc += [(name,int(m[0]))]
            else:
                data_desc += [(name,1)]
            bin_desc += form
        self.data_desc = data_desc
        self.bin_desc = bin_desc

    def decode(self,bin_data):
        unpack_d = struct.unpack(self.bin_desc, bin_data)
        pos = 0
        for elem_name,elem_len in self.data_desc:
            if elem_len > 1:
                unpack_curr = list(unpack_d[pos:pos+elem_len])
            else:
                unpack_curr = unpack_d[pos]
            setattr(self.d, elem_name, unpack_curr )
            pos += elem_len

    def encode(self):
        out_data = []
        for elem_name,_ in self.data_desc:
            curr_elem = getattr(self.d, elem_name)
            try:
                out_data += curr_elem
            except:
                out_data += [curr_elem]
        return struct.pack(self.bin_desc, *out_data)

    class Empty:
        def __str__(self):
            return str(self.__dict__)


class RunCmdByLine(object):
    """Run a shell command and get results one line at a time
    without waiting for the entire command to complete.
    Example:
      data = RunCmdByLine("ls -l /")
      while True:
        line = data.readline()
        if line is None: break
        print(line)
    """
    def __init__(self, cmd_str, decode=True, strip=True):
        import subprocess as sp
        import shlex
        import platform

        self.strip = strip
        self.decode = decode
        prefix = []
        if platform.system() == "Linux":
            prefix = ["stdbuf","-o0"]
        self.pipe = sp.Popen(prefix+shlex.split(cmd_str), stdout=sp.PIPE)

    def readline(self):
        l = self.pipe.stdout.readline()
        self.pipe.poll() # sets pipe.returncode if available
        if not l and self.pipe.returncode is not None:
            self.pipe.wait()
            return None
        if self.decode:
            l = l.decode()
        if self.strip:
            l = l.rstrip()
        return l

    def close(self):
        if self.pipe is not None:
            self.pipe.kill()
            self.pipe.wait()
            self.pipe = None

    def __del__(self):
        self.close()

def get_first_str(cmd_str, search_str):
    import subprocess as sp
    import shlex
    import platform

    search_str = bytearray(search_str,'utf-8')
    prefix = []
    if platform.system() == "Linux":
        prefix = ["stdbuf","-o0"]
    pipe = sp.Popen(prefix+shlex.split(cmd_str), stdout=sp.PIPE)
    while True:
        l = pipe.stdout.readline()
        pipe.poll() # sets pipe.returncode if available
        if not l and pipe.returncode is not None:
            break
        if l.startswith(search_str):
            pipe.kill()
            pipe.wait()
            return l.rstrip()
    pipe.wait()
    raise ValueError('could not find search_str: %s' % search_str)

def get_kv_info(filename, key):
    """
    Get key-value data from viewdat.  Returns first occurance as a string.
    filename = T04 file name
    key = key to return (e.g., AntennaType or RefStationLLH)
    """
    search_str = '  - %s => ' % key
    return get_first_str( "viewdat -d16 " + filename, search_str ).split(b' => ')[1]

def kv_info2arr(filename, key=None, convert_float=False):
    """
    Convert key-value data to an array.
    Options: key - if set, only return data with this key string
             convert_float - if set, convert data to float instead of returning strings
    """
    import shlex
    import subprocess as sp
    pipe = sp.Popen(shlex.split("viewdat -d16 " + filename), stdout=sp.PIPE)
    d = []
    search_str = bytearray('  - %s => ' % key, 'utf-8')
    while True:
        l = pipe.stdout.readline()
        if l == b'': break
        if search_str is not None:
            if l.find(search_str) < 0:
                continue
            l = l.rstrip().split(b' => ')[1].split(b' ')
            if convert_float:
                l = [float(x) for x in l]
        d.append(l)
    return array(d)

def get_meas_week(filename):
    """
    Use viewdat to get the 1st week # present in measurements (rec27, 35:9, 35:19)
    """
    import subprocess as sp
    cmd = ['viewdat','-d27','--translate_rec35_sub9_to_rec27','--translate_rec35_sub19_to_rec27',filename]
    pipe = sp.Popen(cmd, stdout=sp.PIPE)
    d = []
    for line_num in range(100):
        l = pipe.stdout.readline()
        if l == '':
            print_("Didn't fine week number near start of file...")
            break
        m = re.search('Week : ([0-9]+)',l)
        if m:
            return int(m.group(1))
    return None

class OrbitConst(object):
    """
    Orbital constants
    """
    PI = 3.14159265358979323846
    A_WGS84 = 6378137.0
    B_WGS84 = 6356752.314245179
    E2_WGS84 = 6.69437999013e-3
    ONE_MIN_E2 = 0.99330562000987
    SQRT_ONE_MIN_E2 = 9.96647189335258e-1

def enu2llh( enu, ref_llh, is_ref_rad=True ):
    """
    add delta east/north/up to reference lat/lon/height to produce
    full lat/lon/height
    enu = delta east/north/up [m]
    ref_llh = lat[rad], lon[rad], height[m] (if_ref_rad=False, degrees instead of radians)
    """
    ref_lat = ref_llh[0]
    if not is_ref_rad:
        ref_lat = radians(ref_lat)

    # Compute Radii of Curvature
    W = sqrt( 1 - OrbitConst.E2_WGS84 * sin(ref_lat)**2 )
    N = OrbitConst.A_WGS84 / W
    M = OrbitConst.A_WGS84 * ( 1 - OrbitConst.E2_WGS84 ) / W**3

    # Compute Metric Components
    dllh = empty( enu.shape )
    dllh[:,1] = enu[:,0] / (N * cos(ref_lat))
    dllh[:,0] = enu[:,1] / M
    dllh[:,2] = enu[:,2]
    if not is_ref_rad:
        dllh[:,0] = degrees(dllh[:,0])
        dllh[:,1] = degrees(dllh[:,1])

    return dllh + ref_llh

def aer2ecef(azimuthDeg, elevationDeg, slantRange, obs_lat, obs_long, obs_alt):
    """
    Convert satellite az/el/range (viewed from observer) to ECEF position.
    azimuthDeg = satellite azimuth [deg]
    elevationDeg = satellite elevation [deg]
    slantRange = distance from observer to satellite [m]
    obs_lat = observer latitude [rad]
    obs_long = observer longitude [rad]
    obs_alt = observer height [m]
    """
    #site ecef in meters
    sitex, sitey, sitez = conv_lla_ecef(obs_lat,obs_long,obs_alt)

    slat = sin(radians(obs_lat))
    slon = sin(radians(obs_long))
    clat = cos(radians(obs_lat))
    clon = cos(radians(obs_long))

    azRad = radians(azimuthDeg)
    elRad = radians(elevationDeg)

    # az,el,range to sez convertion
    south  = -slantRange * cos(elRad) * cos(azRad)
    east   =  slantRange * cos(elRad) * sin(azRad)
    zenith =  slantRange * sin(elRad)


    x = ( slat * clon * south) + (-slon * east) + (clat * clon * zenith) + sitex
    y = ( slat * slon * south) + ( clon * east) + (clat * slon * zenith) + sitey
    z = (-clat *        south) + ( slat * zenith) + sitez

    return array([x, y, z])

def llh2enu( llh, ref_llh, is_rad=False, is_ref_rad=False ):
    """
    Convert lat/lon/height to delta east/north/up.
    llh = array of lat/lon/height [lat/lon in degrees by default]
    ref_llh = point or array of lat/lon/height [lat/lon in degrees by default]
    is_rad = True -> "llh" lat/lon is radians, else in degrees
    is_ref_rad = True -> "ref_llh" lat/lon is radians, else in degrees
    """
    # Make sure inputs are arrays
    if len(shape(llh)) == 1:
        llh = llh.reshape( (1,len(llh)) )
    if len(shape(ref_llh)) == 1:
        ref_llh = ref_llh.reshape( (1,len(ref_llh)) )

    # Convert to radians?
    scale1 = ones(shape(llh))
    scale2 = ones(shape(ref_llh))
    ref_lat = copy(ref_llh[:,0])
    if not is_rad:
        scale1[:,0:2] *= OrbitConst.PI/180
    if not is_ref_rad:
        scale2[:,0:2] *= OrbitConst.PI/180
        ref_lat *= OrbitConst.PI/180

    # Compute the residuals
    dllh = llh*scale1 - ref_llh*scale2

    # Compute Radii of Curvature
    W = sqrt( 1 - OrbitConst.E2_WGS84 * sin(ref_lat)**2 )
    N = OrbitConst.A_WGS84 / W
    M = OrbitConst.A_WGS84 * ( 1 - OrbitConst.E2_WGS84 ) / W**3

    # Compute Metric Components
    dE = dllh[:,1] * N * cos(ref_lat)
    dN = dllh[:,0] * M
    dU = dllh[:,2]

    return (dE, dN, dU)

def comp_3d_dist(llh, ref_llh, is_rad=False, is_ref_rad=False ):
    """
    Convert lat/lon/height to 3D distance.
    llh = array of lat/lon/height [lat/lon in degrees by default]
    ref_llh = point or array of lat/lon/height [lat/lon in degrees by default]
    is_rad = True -> "llh" lat/lon is radians, else in degrees
    is_ref_rad = True -> "ref_llh" lat/lon is radians, else in degrees
    """
    (dE, dN, dU) = llh2enu( llh, ref_llh, is_rad, is_ref_rad )
    return sqrt( dE**2 + dN**2 + dU**2 )

def intersect( *args, **kwargs ):
    """
    intersect( a, b, c, ..., make_unique=False )
    Find values that appear in both a and b, c, d, ...
    a = array
    b = array
    etc. [optional]
    ret[0] = common values in a,b,c, ... Will be sorted!
    ret[1] = index into a[] (not sorted)
    ret[2] = index into b[] (not sorted)
    etc. [optional]
    Note that any repeated input values will be in the output too
    unless you set make_unique=True.
    If any of c,d,etc. are None, then it won't be used for comparison
    and ret[3],ret[4],etc. will be [].
    """
    from functools import reduce
    make_unique = False
    if 'make_unique' in kwargs:
        make_unique=kwargs['make_unique']
    make_quick_unique = False
    duplicted_values = []
    if 'make_quick_unique' in kwargs:
        make_quick_unique=kwargs['make_quick_unique']

    common = reduce( lambda l,r: intersect1d(l,r,True), (i for i in args if i is not None) )
    if make_unique:
        common = unique(common)
    if make_quick_unique:
        rst = np.ediff1d(common)
        index = np.where(rst[:]==0)[0]
        duplicted_values = common[index]
        common = np.delete(common, index)

    def get_arg_idx(arg):
        if arg is None:
            return []
        if make_unique:
            arg = array(arg).astype(object)
            m = zeros_like(arg, dtype=bool)
            m[unique(arg,return_index=True)[1]] = True
            arg[~m] = None
        if make_quick_unique:
            for x in duplicted_values:
                index = np.where(arg[:]==x)[0]
                arg[index[:-1]] = None            
        common_arg = in1d(arg,common).nonzero()[0]
        if len(common) != len(common_arg):
            print_("intersect length mismatch - perhaps there is repeated data?")
        return common_arg

    # the first 2 args[] are mandatory
    ret = [common] + [get_arg_idx(arg) for arg in [args[0],args[1]]]
    # further args[] are optional
    ret += [get_arg_idx(arg) for arg in args[2:]]
    return ret

def docdf( x, do_pdf=False, pdf_sm=0.1, pad=0 ):
    """Compute CDF (and PDF?) for 'x'.
    if do_pdf is False:
      Returns (cx, cy)
        cx = is just 'x' sorted
        cy = value from 0 to 1.0 - shows the % of data below corresponding cx
    if do_pdf is True:
      Returns (cx, cy, px, py )
      cx/cy as above
      px = X values for PDF (correspond to input 'x' values)
      py = Y values for PDF (probability)
      (Modify 'pdf_sm' to get more or less smoothing.  Higher pdf_sm=more smoothing.)
    plot( cx, cy ) to show CDF plot.
    plot( cx, cy, px, py ) to show CDF and PDF plot.
    """
    if pad > 0 and pad > len(x):
        cx = sort( concatenate( (x,[Inf]*(pad-len(x))) ) )
    else:
        cx = sort(x)
    cy = (arange(len(cx))+1)/float(len(cx))
    if do_pdf:
        from scipy.stats import gaussian_kde
        fit = gaussian_kde(x,bw_method=pdf_sm)
        # Limit fit length to 1000 points for speed reasons...
        decimate_n = int(ceil(len(cx)/1000.))
        px = cx[::decimate_n]
        py = fit(px)
        max_py = max(py)
        if max_py > 1.:
            py = py/max(py)
        return (cx,cy, px,py)
    else:
        return (cx,cy)

# from s_math.c
def conv_lla_ecef(lat_deg, lon_deg, alt_m):
    """
    lat_deg - latitude in deg
    lon_deg - longitude in deg
    alt_m - height in m
    Returns (x,y,z) - ECEF [m]
    """
    lat, lon = lat_deg*pi/180.0, lon_deg*pi/180.0
    Rn = OrbitConst.A_WGS84 / sqrt(1.0 - (OrbitConst.E2_WGS84 * sin(lat) * sin(lat)))
    tmp = (Rn + alt_m) * cos(lat)
    x = tmp * cos(lon)
    y = tmp * sin(lon)
    z = (Rn * OrbitConst.ONE_MIN_E2 + alt_m) * sin(lat)
    return x, y, z

# from s_math.c
def conv_ecef_lla( x, y, z ):
    """
    x, y, z = ECEF [m]
    Returns (lat [deg], lon [deg], height [m])
    """
    PI = 3.14159265358979323846
    c = (OrbitConst.A_WGS84 * OrbitConst.E2_WGS84)
    foure2 = (4.0*OrbitConst.E2_WGS84)
    e_prime = OrbitConst.SQRT_ONE_MIN_E2
    ae_prime = (OrbitConst.A_WGS84 * OrbitConst.SQRT_ONE_MIN_E2)

    lat = 0
    lon = 0
    hgt = 0
    count = 0
    t = 0.0

    if z < 0.0:
        signOfZ = -1.0
    else:
        signOfZ = 1.0

    p = sqrt( x * x + y * y );

    if p < 0.01:
        if z >= 0.0:
            lat = PI/2.0
        else:
            lat = -PI/2.0

        lon = 0.0;
        hgt = abs(z) - OrbitConst.B_WGS84;
    else:
        p_inv = 1.0/p;
        z_prime = e_prime * fabs(z);
        z_prime_plus_c    = z_prime + c;
        z_prime_minus_c = z_prime - c;
        u = 2.0 * z_prime_minus_c;
        v = 2.0 * z_prime_plus_c;

        t_m = -z_prime_minus_c * p_inv;

        if t_m <= 0.0:
            t = (p + z_prime_minus_c);
            t = t/(t + z_prime);
        elif t_m >= 1.0 :
            t = p/z_prime_plus_c;
        else:
            t_m3 = t_m * t_m * t_m;
            fm = p * t_m3 * t_m + u * t_m3 + v * t_m - p;

            if fm >= 0.0:
                t = p/z_prime_plus_c;
            else:
                t = (p + z_prime_minus_c);
                t = t/(t + z_prime);

        while True:
            count += 1

            t_m2 = t * t;
            pt = p * t;

            delta_t = (p-((pt+u)*t_m2 + v) * t)/((4.0*pt+3.0*u)*t_m2+v);

            t += delta_t;

            if not ((count < 15) and (abs(delta_t) > 0.0000001)):
                break

        t_m2 = t * t;
        one_plus_t2    = 1.0 + t_m2;
        one_minus_t2 = 1.0 - t_m2;

        lat = arctan2( one_minus_t2, 2.0 * e_prime * t ) * signOfZ;

        lon = arctan2 ( y, x );

        hgt=(2.0*p*e_prime*t+fabs(z)*one_minus_t2-ae_prime*one_plus_t2)/ \
                (sqrt( one_plus_t2 * one_plus_t2 - foure2 * t_m2));
    return (lat*180/pi, lon*180/pi, hgt)


# 
# Ported from the Stinger function compute_elev_azi()
#
# Input:
#   xRX, yRX, zRX - ECEF position of the receiver
#   xSV, ySV, zSV - ECEF position of the satellite
#
# Returns:
#   elevation angle in radians
#   azimuth angle in radians
def compute_elev_azi(xRX, yRX, zRX, xSV, ySV, zSV):
  #  Difference user and sv positions & find radius 
  dpos = []
  dpos.append(xSV - xRX)
  dpos.append(ySV - yRX)
  dpos.append(zSV - zRX)
  inv_r = 1.0/math.sqrt(dpos[0]**2 + dpos[1]**2 + dpos[2]**2)

   # Compute user-sv direction cosines 
  dcosx = ( dpos[0] * inv_r )
  dcosy = ( dpos[1] * inv_r )
  dcosz = ( dpos[2] * inv_r )

  # Get sine/cosine of lat and lon. 
  lla = conv_ecef_lla( xRX, yRX, zRX)
  # Need in radians
  lat = lla[0] * math.pi / 180
  lon = lla[1] * math.pi / 180
  hgt = lla[2]

  sinlat = math.sin( lat )
  coslat = math.cos( lat )
  sinlon = math.sin( lon )
  coslon = math.cos( lon )
 
  # East/North/Up transforms 
  utz =  sinlat 
  ce  =  coslat 
  et1 = -sinlon 
  et2 =  coslon 

  nt1 = -utz * et2 
  nt2 =  utz * et1 
  utx =  ce  * et2 
  uty = -ce  * et1 

  # Compute elevation angle 
  sin_ea =  dcosx * utx + dcosy * uty + dcosz * utz
  elev = math.asin( sin_ea )

  # Compute azimuth in Radians
  east_dcos  = et1 * dcosx + et2 * dcosy
  north_dcos = nt1 * dcosx + nt2 * dcosy + ce * dcosz

  azi = math.atan2( east_dcos, north_dcos )
  if ( azi < 0.0 ):
    azi += ( 2.0 * math.pi )

  # Return the elevation and azimuth in radians
  return(elev,azi)

def _decode_2bsm( u16, pos ):
    # val = 2*(S^M) + 1 - 4*S
    IS = (u16 >> (4*pos + 3))&1
    IM = (u16 >> (4*pos + 2))&1
    QS = (u16 >> (4*pos + 1))&1
    QM = (u16 >> (4*pos + 0))&1
    return 2*(IS^IM) + 1 - 4*IS + 1j*(2*(QS^QM) + 1 - 4*QS)

def _print_stats( cval ):
    print_('IS %%%.1f IM %%%.1f' % (100.*len(find(real(cval)>0))/len(cval), \
                                    100.*len(find(fabs(real(cval))>=3))/len(cval)))
    print_('QS %%%.1f QM %%%.1f' % (100.*len(find(imag(cval)>0))/len(cval), \
                                    100.*len(find(fabs(imag(cval))>=3))/len(cval)))

def decode_2bu16( raw16 ):
    """
    Convert a Maxwell 7/8 U16 logged sample to 4 I/Q pairs
    """
    return [_decode_2bsm(u16,i) for u16 in raw16 for i in range(0,4)]

def _unpack_u16s( f, u8len ):
    # '<5H' -> 5 little-endian u16
    # '>5H' -> 5 big-endian u16
    import struct
    return struct.unpack('<%dH' % (u8len/2), f.read(u8len))

def read_raw2b(filename,u8len,offset=0,verbose=True):
    """
    Read a raw 2-bit Ettus RF file (or raw Maxwell 6/7 data)
    """
    f = open(filename,'rb')
    if offset > 0:
        offset = (offset + 1)& (~0x1)
        f.seek(offset)
    elif offset < 0:
        offset = -((-offset + 1)& (~0x1))
        f.seek(offset,2)
    raw16 = _unpack_u16s( f, u8len )
    cval = decode_2bu16( raw16 )
    if verbose: _print_stats(cval)
    return cval


def read_p16(filename,u8len,offset=0):
    """
    Read a p16 file
    """
    import struct
    f = open(filename,'rb')
    hdr_len = struct.unpack( 'I', f.read(4) )[0]
    hdr_src = struct.unpack( 'H', f.read(2) )[0]
    hdr_encoding = struct.unpack( 'B', f.read(1) )[0]
    hdr_bands = struct.unpack( 'H', f.read(2) )[0]
    hdr_pad = struct.unpack( 'B', f.read(1) )[0]
    hdr_samp_Hz = struct.unpack( 'd', f.read(8) )[0]
    f.seek(hdr_len)
    has_L1 = hdr_bands & 0x1
    has_L2 = hdr_bands & 0x2
    has_G1 = hdr_bands & 0x4
    if hdr_bands & 0xfff8:
        print_("Unknown bands : 0x%x - incorrect read?" % hdr_bands)
    if bin(hdr_bands).count("1") > 2:
        print_("Probably will not handle > 2 bands well..")
    print_("Samp Hz %.1f.  Bands:" % hdr_samp_Hz)
    if has_L1: print_("  L1")
    if has_L2: print_("  L2")
    if has_G1: print_("  G1")
    raw16 = _unpack_u16s( f, u8len )
    if has_L2 or has_G1:
        L1val = decode_2bu16( raw16[0::2] )
        L2val = decode_2bu16( raw16[1::2] )
        return (L1val, L2val)
    else:
        L1val = decode_2bu16( raw16[0::2] )
        return (L1val)

def _decode_u32( raw32, pos ):
    IS = (raw32[0] >> (31-pos))&1
    QS = (raw32[1] >> (31-pos))&1
    IM = (raw32[2] >> (31-pos))&1
    QM = (raw32[3] >> (31-pos))&1
    return 4*IS - 2*(IS^IM) - 1 +1j*(4*QS - 2*(QS^QM) - 1)

def read_p32(filename,u8len,offset=0):
    """
    Read a p32 file
    u8len = # of bytes in file to read
    offset = # of bytes to skip at startup.  Rounded to multiple of 16.
    """
    import struct
    f = open(filename,'rb')
    n_sig = 0
    hdr_len = struct.unpack( 'I', f.read(4) )[0]
    hdr_src = struct.unpack( 'H', f.read(2) )[0]
    hdr_L1 = struct.unpack( 'B', f.read(1) )[0]
    hdr_L2 = struct.unpack( 'B', f.read(1) )[0]
    samp_Hz = struct.unpack( '<d', f.read(8) )[0]
    print("Sample rate: %.3f MHz" % (samp_Hz*1e-6))
    L1_Hz = struct.unpack( 'd', f.read(8) )[0]
    L2_Hz = struct.unpack( 'd', f.read(8) )[0]

    if hdr_L1:
        n_sig += 1
    if hdr_L2:
        n_sig += 1
    hdr_G1, hdr_B1 = (0, 0)
    while hdr_len > f.tell():
        hdr_X = struct.unpack( 'B', f.read(1) )[0]
        X_Hz = struct.unpack( 'd', f.read(8) )[0]
        n_sig += 1
        if hdr_X == 3:
            hdr_G1 = 1
        elif hdr_X == 8:
            hdr_B1 = 1
        else:
            print("Unknown signal type %d" % hdr_X)

    mask = 16*n_sig - 1  # each p32 block is 16 bytes
    offset = (offset+mask) & (~mask)
    f.seek( hdr_len + offset, 0 )
    u32len = u8len//4
    raw32 = struct.unpack('<%dI' % u32len, f.read(u8len))

    print("%d signals: L1 %d, L2 %d, G1 %d B1 %d" % (n_sig,hdr_L1, hdr_L2, hdr_G1, hdr_B1))
    sep = 4*n_sig
    u32len &= ~(sep-1)
    out = []
    for n in range(n_sig):
        val = [_decode_u32(raw32[sep*i+4*n:sep*i+4+4*n],pos) for i in range(0,u32len//sep) for pos in range(0,32)]
        out.append(val)
    return out

def _decode_2bit_gss6450(raw32,n):
    # raw32 = 32-bit I0,Q0,I1,Q1,...I7,Q7
    # Ix,Qx is 2 bit 2's complement? _No_
    # return In + 1j*Qn

    lookup=[1,3,-3,-1]
    #lookup=[1,2,-2,-1]
    In = (raw32 >> (32 - 2 - 4*n)) & 0x3
    Qn = (raw32 >> (32 - 4 - 4*n)) & 0x3
    return lookup[In] + 1j*lookup[Qn]

def _decode_4bit_gss6450(raw32,n):
    # raw32 = 32-bit I0,Q0,I1,Q1,...I3,Q3
    # Ix,Qx is 4 bit 2's complement, but multiply
    # by 2 and add 1 to get rid of DC bias.
    # return In + 1j*Qn

    In = (raw32 >> (32 - 4 - 8*n)) & 0xf
    if In > 7:
        In = (In - 16)*2 + 1
    else:
        In = In*2 + 1
    Qn = (raw32 >> (32 - 8 - 8*n)) & 0xf
    if Qn > 7:
        Qn = (Qn - 16)*2 + 1
    else:
        Qn = Qn*2 + 1
    return In + 1j*Qn

def _decode_8bit_gss6450(raw32,n):
    # raw32 = 32-bit I0,Q0,I1,Q1
    # Ix,Qx is 8 bit 2's complement, but multiply
    # by 2 and add 1 for consistency with other modes.
    # return In + 1j*Qn

    In = (raw32 >> (32 - 8 - 16*n)) & 0xff
    if In > 127:
        In = (In - 256)*2 + 1
    else:
        In = In*2 + 1
    Qn = (raw32 >> (32 - 16 - 16*n)) & 0xff
    if Qn > 127:
        Qn = (Qn - 256)*2 + 1
    else:
        Qn = Qn*2 + 1
    return In + 1j*Qn


def read_gss6450(filename,n_secs,offset_secs=0.,verbose=True):
    """
    Read a Sprint GSS6450 file
    n_secs = # of seconds in file to read
    offset_secs = # of seconds to skip at startup.
    Returns 2 things:
      Info: (utc seconds in week at start of samples, etc.)
      array: signal #1 [,signal #2?]
    """
    import struct
    from datetime import datetime, timedelta
    f = open(filename,'rb')
    n_bits = -1
    bandwidth_mhz = -1
    week_secs = 0
    f_mid_mhz = []
    for _ in range(100):
        s=f.readline(120).rstrip()
        if s.startswith(b"<End of Header>"):
            break
        if verbose:
            print(s)
        if s.startswith(b"<Start Time>"):
            date_str = s[13:-4].decode('utf-8')
            utc_start = datetime.strptime(date_str,'%H:%M:%S %d/%m/%y')
            week_start = datetime(utc_start.year,utc_start.month,utc_start.day)
            week_start -= timedelta(days=(week_start.isoweekday()%7))
            week_secs = (utc_start - week_start).total_seconds()
        m = re.match(rb'<Signal Recorded> +([0-9]+), .*?, Bandwidth ([0-9]+) MHz, Freq ([.0-9]+) MHz, Bits ([0-9]+)',s)
        if m:
            n_bits = int(m.group(4))
            bandwidth_mhz = int(m.group(2))
            signal_num = int(m.group(1))
            if filename.lower().endswith('.a.gns'):
                if signal_num <= 2:
                    f_mid_mhz.append(float(m.group(3)))
            else:
                if signal_num > 2:
                    f_mid_mhz.append(float(m.group(3)))

    if bandwidth_mhz < 0 or n_bits < 0:
        raise RuntimeError("Unknown freq/bits")

    # sync to next 4-byte boundary (plus skip first few sync patterns)
    sync_pos = f.tell()
    sync_pos = (sync_pos + 3) & (~0x3)
    f.seek(sync_pos + 4*2*10)
    sync1 = struct.unpack( 'I', f.read(4) )[0]
    if sync1 != 0x11111111:
        sync1 = struct.unpack( 'I', f.read(4) )[0]
    if sync1 != 0x11111111:
        print("Error: unknown sync")
        return None
    sync2 = struct.unpack( 'I', f.read(4) )[0]
    if verbose:
        print("start: %x %x" % (sync1,sync2))
    if sync1==sync2:
        n_sigs = 1
    else:
        n_sigs = 2
    if verbose:
        print('# of signals in file: %d' % n_sigs)
    while f.tell() < sync_pos + 64*1024:
        sync1 = struct.unpack( 'I', f.read(4) )[0]
        if n_sigs == 2:
            sync2 = struct.unpack( 'I', f.read(4) )[0]
        if sync1 != 0x11111111:
            if verbose:
                print("Found end of sync")
            break
    if verbose:
        print("end: %x %x" % (sync1,sync2))

    f_samp_mhz = bandwidth_mhz/10*10.23
    # n_bits is for I only, so n_bits*2 for I+Q
    bytes_per_sec = n_bits*2*n_sigs*f_samp_mhz*1e6/8.
    offset = int(offset_secs * bytes_per_sec)
    offset = (offset + 7) & (~7)
    if verbose:
        print("File offset {}".format(offset))
    f.seek( offset, 1 )

    u8len = int(n_secs * bytes_per_sec)
    u32len = int(u8len/4)
    u8len = u32len * 4
    if verbose:
        print("# of bytes to read {}".format(u8len))
    raw32 = struct.unpack('<%dI' % u32len, f.read(u8len))
    if n_bits == 2:
        decode_func = _decode_2bit_gss6450
        n_in_32b = 8
    elif n_bits == 4:
        decode_func = _decode_4bit_gss6450
        n_in_32b = 4
    elif n_bits == 8:
        decode_func = _decode_8bit_gss6450
        n_in_32b = 2
    else:
        print("Error: unknown # of bits")
        return None

    sigs = empty((int(u32len/n_sigs)*int(n_in_32b),n_sigs),dtype=complex)
    if n_sigs == 2:
        for i in range(int(u32len/2)):
            for pos in range(n_in_32b):
                sigs[i*n_in_32b+pos,0] = decode_func(raw32[2*i],pos)
                sigs[i*n_in_32b+pos,1] = decode_func(raw32[2*i+1],pos)
    else:
        for i in range(int(u32len)):
            for pos in range(n_in_32b):
                sigs[i*n_in_32b+pos,0] = decode_func(raw32[i],pos)
    start_t = week_secs + offset_secs
    Info = namedtuple('Info',['start_t','f_samp_MHz','n_bits','f_mid_MHz'])
    samp_info = Info(start_t, f_samp_mhz, n_bits, f_mid_mhz)
    return samp_info, sigs


def do_1d_avg_fft( d, flen, max_avg=-1, do_window=False ):
    """
    compute averaged FFT
    d = raw input data
    flen = # of points for each FFT
    max_avg = maximum # of averages to perform
    do_window = set to True if you want to apply a Hamming window
      (e.g., in case a jammer is present)
    """
    i_max = int(len(d)/flen)
    if max_avg > 0:
        i_max = min(max_avg,i_max)
    dfft = zeros(flen)
    window = 1.
    if do_window:
        window = hamming(flen)
    n_sums = 0
    for i in range(0,i_max):
        dfft += fftshift(abs(fft(d[(i*flen):((i+1)*flen)]*window)))
        n_sums += 1
    return dfft / n_sums

def _calc_atm( t, FreqLx, FreqLy, PhaseLx, PhaseLy ):
    eta = 2.0*FreqLy*FreqLy/( (FreqLy*FreqLy) - (FreqLx*FreqLx) )
    atm = (PhaseLy - PhaseLx)*eta
    atm -= mean(atm)
    return atm

def remove_seg_means( t, dt, val ):
    new_val = val.copy()
    i_jmp = find( abs(diff(t)) > dt+.001 )
    s = 0
    for i in i_jmp:
        new_val[s:i+1] -= mean(new_val[s:i+1])
        s = i+1
    new_val[s:] -= mean(new_val[s:])
    return new_val

def iono_free_SR( t, FreqLx, PseudoLx, PhaseLx, SlipLx, FreqLy, PseudoLy, PhaseLy, SlipLy, lp_known ):
    """
    Compute iono-free stationary pseudorange (MPx)
    FreqLx = Lx frequency [Hz]
    PseudoLx = Lx pseudorange [m]
    PhaseLx = Lx carrier phase [cyc]
    SlipLx = Lx slip count
    FreqLy, PseudoLy, PhaseLy, SlipLy = same for Ly
    If you let PseudoLy == [] then MPLy will be empty too.
    Returns: (MPLx, atmLx, MPLy)
    Note: atmLx is twice the iono because it is a correction to "PR - phase"
    """
    WavelengthLx = GnssConst.LIGHT / FreqLx
    WavelengthLy = GnssConst.LIGHT / FreqLy
    PhaseLx, PhaseLy  = -WavelengthLx*PhaseLx, -WavelengthLy*PhaseLy
    atmLx = _calc_atm(t, FreqLx, FreqLy, PhaseLx, PhaseLy )
    MPLx = PseudoLx - PhaseLx - atmLx
    dt = median(diff(t))
    MPLx = remove_seg_means( t, dt, MPLx )

    if len(PseudoLy) > 0:
        MPLy = PseudoLy - PhaseLy - atmLx
        MPLy = remove_seg_means( t, dt, MPLy )
    else:
        MPLy = []

    atmLx = remove_seg_means(t,dt,atmLx)
    return (MPLx, atmLx, MPLy)


def get_Lx_idx( d, k, sv_id, sat_type, Lx_freq, Lx_trk, elev_mask_deg, min_seg=0 ):
    """
    Similar to get_L1L2_idx(), but for any signal type
    Inputs: d = rec27/35:19 data
            k = rec27/35:19 key (from vd2arr)
            sv_id = satellite ID
            sat_type = satellite type from T0x file
            Lx_freq = "x"/master frequency (e.g., dRec27L1Band)
            Lx_trk = "x"/master track type (e.g., dRec27Track_CA)
            elev_mask_deg = elevation mask [deg]
            min_seg = minimum # of points in each non-slip segment
    Returns: i_Lx
    """
    i_Lx = find( (d[:,k.SV] == sv_id) & \
                 (d[:,k.SAT_TYPE] == sat_type) & \
                 (d[:,k.EL] >= elev_mask_deg) & \
                 (d[:,k.FREQ] == Lx_freq) & \
                 (d[:,k.TRACK] == Lx_trk) \
               )
    if len(i_Lx) == 0:
        return (array([],dtype=int))
    return (i_Lx)


def get_LxLy_idx( d, k, sv_id, sat_type, Lx_freq, Lx_trk, Ly_freq, Ly_trk, elev_mask_deg, min_seg=0 ):
    """
    Similar to get_L1L2_idx(), but for any signal type
    Inputs: d = rec27/35:19 data
            k = rec27/35:19 key (from vd2arr)
            sv_id = satellite ID
            sat_type = satellite type from T0x file
            Lx_freq = "x"/master frequency (e.g., dRec27L1Band)
            Lx_trk = "x"/master track type (e.g., dRec27Track_CA)
            Ly_freq = "y"/slave frequency (e.g., dRec27L1Band)
            Ly_trk = "y"/slave track type (e.g., dRec27Track_CA)
            elev_mask_deg = elevation mask [deg]
            min_seg = minimum # of points in each non-slip segment
    Returns: i_Lx, i_Ly -> for input into get_MPx()
    """
    i_Lx = find( (d[:,k.SV] == sv_id) & \
                 (d[:,k.SAT_TYPE] == sat_type) & \
                 (d[:,k.EL] >= elev_mask_deg) & \
                 (d[:,k.FREQ] == Lx_freq) & \
                 (d[:,k.TRACK] == Lx_trk) \
               )
    if Ly_trk < 0:
        i_Ly = find( (d[:,k.SV] == sv_id) & \
                     (d[:,k.SAT_TYPE] == sat_type) & \
                     (d[:,k.EL] >= elev_mask_deg) & \
                     (d[:,k.FREQ] == Ly_freq) )
        # pick any tracking mode - max() happens to choose L2C over L2E
        if len(i_Ly) > 0:
            track = max( d[i_Ly,k.TRACK] )
            i_Ly = find( (d[:,k.SV] == sv_id) & \
                         (d[:,k.SAT_TYPE] == sat_type) & \
                         (d[:,k.EL] >= elev_mask_deg) & \
                         (d[:,k.FREQ] == Ly_freq) & \
                         (d[:,k.TRACK] == track) \
                       )
        else:
            i_Ly = array([])
    else:
        i_Ly = find( (d[:,k.SV] == sv_id) & \
                     (d[:,k.SAT_TYPE] == sat_type) & \
                     (d[:,k.EL] >= elev_mask_deg) & \
                     (d[:,k.FREQ] == Ly_freq) & \
                     (d[:,k.TRACK] == Ly_trk) \
                   )
    if len(i_Lx) == 0 or len(i_Ly) == 0:
        return (array([],dtype=int), array([],dtype=int))

    _, ix, iy = intersect( d[i_Lx,k.TIME], d[i_Ly,k.TIME] )
    i_Lx=i_Lx[ix]
    i_Ly=i_Ly[iy]

    def remove_slips( d, k, iLx, iLy, min_seg ):
        """Remove any data with a slip on either frequency.
        If min_seg > 0, then make sure data segments are at least 'min_seg' points long.
        """
        i_noslip = find( (diff(d[iLx,k.SLIP_CNT]) == 0) & (diff(d[iLy,k.SLIP_CNT]) == 0) ) + 1
        if min_seg > 0:
            i_jump = concatenate(([0],find( diff(i_noslip) > 1 )+1,[len(iLx)]))
            i_noslip_ma = ma.array( i_noslip, mask=[False]*len(i_noslip) )
            # mask out short segments
            for i in range(len(i_jump)-1):
                start,end = i_jump[i], i_jump[i+1]
                if end - start < min_seg:
                    i_noslip_ma.mask[start:end] = True
            i_noslip = i_noslip_ma[~i_noslip_ma.mask].data
        iLx, iLy = iLx[i_noslip], iLy[i_noslip]

        return iLx, iLy

    i_Lx, i_Ly = remove_slips( d, k, i_Lx, i_Ly, min_seg )
    return (i_Lx, i_Ly)


class get_sat_type(object):
  """
  Return the SAT_TYPE_ for the supplied RT_SatType
  Also returns a system name string
  get_sat_type(RT_SatType_GPS).SAT_TYPE
  get_sat_type(RT_SatType_GALILEO).sysstr
  """
  sat_type_dict = {
        RTConst.RT_SatType_GPS : (RcvrConst.SAT_TYPE_GPS, "GPS"),
        RTConst.RT_SatType_SBAS : (RcvrConst.SAT_TYPE_SBAS, "SBAS"),
        RTConst.RT_SatType_GALILEO : (RcvrConst.SAT_TYPE_GALILEO, "GALILEO"),
        RTConst.RT_SatType_QZSS : (RcvrConst.SAT_TYPE_QZSS, "QZSS"),
        RTConst.RT_SatType_BEIDOU_B1GEOPhs : (RcvrConst.SAT_TYPE_BEIDOU, "BEIDOU"),
        RTConst.RT_SatType_GLONASS : (RcvrConst.SAT_TYPE_GLONASS, "GLONASS"),
        RTConst.RT_SatType_IRNSS : (RcvrConst.SAT_TYPE_IRNSS, "IRNSS"),
        RTConst.RT_SatType_XONA : (RcvrConst.SAT_TYPE_XONA, "XONA") }

  def __init__(self, RT_SatType):
    self.SAT_TYPE = self.sat_type_dict[RT_SatType][0]
    self.sysstr = self.sat_type_dict[RT_SatType][1]

class get_sub_type(object):
  # (RT_SatType, FREQ, TRACK) : (Short desc, data/pilot, subtype, priority)
  RTSat = namedtuple('RTSat',['SatType','band','track'])
  SatInfo = namedtuple('SatInfo',['desc','IQ','subtype','order'])
  sub_type_dict = {
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L1Band, RTConst.dRec27Track_CA): SatInfo('GPS L1CA','',RcvrConst.SUBTYPE_L1CA, 0),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, RTConst.dRec27Track_E): SatInfo('GPS L2E','',RcvrConst.SUBTYPE_L2E, 1),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, RTConst.dRec27Track_CMCL): SatInfo('GPS L2C','CMCL',RcvrConst.SUBTYPE_L2C, 2),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, RTConst.dRec27Track_CM): SatInfo('GPS L2C','CM',RcvrConst.SUBTYPE_L2C, 2),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L2Band, RTConst.dRec27Track_CL): SatInfo('GPS L2C','CL',RcvrConst.SUBTYPE_L2C, 2),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_IQ): SatInfo('GPS L5','IQ',RcvrConst.SUBTYPE_L5, 3),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_I): SatInfo('GPS L5','I',RcvrConst.SUBTYPE_L5, 3),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_Q): SatInfo('GPS L5','Q',RcvrConst.SUBTYPE_L5, 3),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_DP): SatInfo('GPS L1C','DP BOC11',RcvrConst.SUBTYPE_L1C, 4),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_D): SatInfo('GPS L1C','D BOC11',RcvrConst.SUBTYPE_L1C, 4),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_P): SatInfo('GPS L1C','P BOC11',RcvrConst.SUBTYPE_L1C, 4),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_DP_MBOC): SatInfo('GPS L1C','DP MBOC',RcvrConst.SUBTYPE_L1C, 4),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_D_MBOC): SatInfo('GPS L1C','D MBOC',RcvrConst.SUBTYPE_L1C, 4),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_P_MBOC): SatInfo('GPS L1C','P MBOC',RcvrConst.SUBTYPE_L1C, 4),
    RTSat(RTConst.RT_SatType_GPS, RTConst.dRec27L1Band, RTConst.dRec27Track_L1E): SatInfo('GPS L1E','',RcvrConst.SUBTYPE_L1E, 5),

    RTSat(RTConst.RT_SatType_SBAS, RTConst.dRec27L1Band, RTConst.dRec27Track_CA): SatInfo('SBAS L1','',RcvrConst.SUBTYPE_L1CA, 0),
    RTSat(RTConst.RT_SatType_SBAS, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_I): SatInfo('SBAS L5','I',RcvrConst.SUBTYPE_L5, 1),

    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_DP_MBOC): SatInfo('GAL E1','DP MBOC',RcvrConst.SUBTYPE_E1, 0),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_D_MBOC): SatInfo('GAL E1','D MBOC',RcvrConst.SUBTYPE_E1, 0),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_P_MBOC): SatInfo('GAL E1','P MBOC',RcvrConst.SUBTYPE_E1, 0),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_DP): SatInfo('GAL E1','DP BOC11',RcvrConst.SUBTYPE_E1, 0),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_D): SatInfo('GAL E1','D BOC11',RcvrConst.SUBTYPE_E1, 0),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_P): SatInfo('GAL E1','P BOC11',RcvrConst.SUBTYPE_E1, 0),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_BPSK10_PD): SatInfo('GAL E5A','DP',RcvrConst.SUBTYPE_E5A, 1),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_BPSK10_D): SatInfo('GAL E5A','D',RcvrConst.SUBTYPE_E5A, 1),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_BPSK10_P): SatInfo('GAL E5A','P',RcvrConst.SUBTYPE_E5A, 1),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27E5BBand, RTConst.dRec27Track_BPSK10_PD): SatInfo('GAL E5B','DP',RcvrConst.SUBTYPE_E5B, 2),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27E5BBand, RTConst.dRec27Track_BPSK10_D): SatInfo('GAL E5B','D',RcvrConst.SUBTYPE_E5B, 2),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27E5BBand, RTConst.dRec27Track_BPSK10_P): SatInfo('GAL E5B','P',RcvrConst.SUBTYPE_E5B, 2),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27E5ABCentreBand, RTConst.dRec27Track_AltBOCCompPD): SatInfo('GAL E5Alt','DP',RcvrConst.SUBTYPE_E5AltBOC, 3),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27E5ABCentreBand, RTConst.dRec27Track_AltBOCCompD): SatInfo('GAL E5Alt','D',RcvrConst.SUBTYPE_E5AltBOC, 3),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27E5ABCentreBand, RTConst.dRec27Track_AltBOCCompP): SatInfo('GAL E5Alt','P',RcvrConst.SUBTYPE_E5AltBOC, 3),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27E5ABCentreBand, RTConst.dRec27Track_AltBOCNonComp): SatInfo('GAL E5Alt','Full',RcvrConst.SUBTYPE_E5AltBOC, 3),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27E6Band, RTConst.dRec27Track_E6_PD): SatInfo('GAL E6','DP',RcvrConst.SUBTYPE_E6, 4),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27E6Band, RTConst.dRec27Track_E6_D): SatInfo('GAL E6','D',RcvrConst.SUBTYPE_E6, 4),
    RTSat(RTConst.RT_SatType_GALILEO, RTConst.dRec27E6Band, RTConst.dRec27Track_E6_P): SatInfo('GAL E6','P',RcvrConst.SUBTYPE_E6, 4),

    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L1Band, RTConst.dRec27Track_CA): SatInfo('QZSS L1CA','',RcvrConst.SUBTYPE_L1CA, 0),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L1Band, RTConst.dRec27Track_L1CB): SatInfo('QZSS L1CB','',RcvrConst.SUBTYPE_L1CB, 0),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_DP): SatInfo('QZSS L1C','DP BOC11',RcvrConst.SUBTYPE_L1C, 1),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_D): SatInfo('QZSS L1C','D BOC11',RcvrConst.SUBTYPE_L1C, 1),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_P): SatInfo('QZSS L1C','P BOC11',RcvrConst.SUBTYPE_L1C, 1),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_DP_MBOC): SatInfo('QZSS L1C','DP MBOC',RcvrConst.SUBTYPE_L1C, 1),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_D_MBOC): SatInfo('QZSS L1C','D MBOC',RcvrConst.SUBTYPE_L1C, 1),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_P_MBOC): SatInfo('QZSS L1C','P MBOC',RcvrConst.SUBTYPE_L1C, 1),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L1Band, RTConst.dRec27Track_SAIF): SatInfo('QZSS L1SAIF','',RcvrConst.SUBTYPE_L1SAIF, 2),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L2Band, RTConst.dRec27Track_CMCL): SatInfo('QZSS L2C','CMCL',RcvrConst.SUBTYPE_L2C, 3),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L2Band, RTConst.dRec27Track_CM): SatInfo('QZSS L2C','CM',RcvrConst.SUBTYPE_L2C, 3),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L2Band, RTConst.dRec27Track_CL): SatInfo('QZSS L2C','CL',RcvrConst.SUBTYPE_L2C, 3),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_IQ): SatInfo('QZSS L5','IQ',RcvrConst.SUBTYPE_L5, 4),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_I): SatInfo('QZSS L5','I',RcvrConst.SUBTYPE_L5, 4),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_Q): SatInfo('QZSS L5','Q',RcvrConst.SUBTYPE_L5, 4),
    RTSat(RTConst.RT_SatType_QZSS, RTConst.dRec27E6Band, RTConst.dRec27Track_LEX): SatInfo('QZSS LEX','',RcvrConst.SUBTYPE_LEX, 5),

    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27B1Band, RTConst.dRec27Track_B1): SatInfo('BDS B1','',RcvrConst.SUBTYPE_B1, 0),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_DP): SatInfo('BDS B1C','DP BOC11',RcvrConst.SUBTYPE_B1C, 1),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_D): SatInfo('BDS B1C','D BOC11',RcvrConst.SUBTYPE_B1C, 1),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_P): SatInfo('BDS B1C','P BOC11',RcvrConst.SUBTYPE_B1C, 1),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_DP_MBOC): SatInfo('BDS B1C','DP MBOC',RcvrConst.SUBTYPE_B1C, 1),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_D_MBOC): SatInfo('BDS B1C','D MBOC',RcvrConst.SUBTYPE_B1C, 1),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_P_MBOC): SatInfo('BDS B1C','P MBOC',RcvrConst.SUBTYPE_B1C, 1),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27E5BBand, RTConst.dRec27Track_B2): SatInfo('BDS B2','',RcvrConst.SUBTYPE_B2, 2),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_IQ): SatInfo('BDS B2A','DP',RcvrConst.SUBTYPE_B2A, 3),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_I): SatInfo('BDS B2A','D',RcvrConst.SUBTYPE_B2A, 3),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_Q): SatInfo('BDS B2A','P',RcvrConst.SUBTYPE_B2A, 3),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27E5BBand, RTConst.dRec27Track_IQ): SatInfo('BDS B2B','DP',RcvrConst.SUBTYPE_B2B, 4),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27E5BBand, RTConst.dRec27Track_I): SatInfo('BDS B2B','D',RcvrConst.SUBTYPE_B2B, 4),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27E5BBand, RTConst.dRec27Track_Q): SatInfo('BDS B2B','P',RcvrConst.SUBTYPE_B2B, 4),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27E5ABCentreBand, RTConst.dRec27Track_AltBOCCompPD): SatInfo('BDS B2Ace','DP',RcvrConst.SUBTYPE_B2AceBOC, 4),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27E5ABCentreBand, RTConst.dRec27Track_AltBOCCompD): SatInfo('BDS B2Ace','D',RcvrConst.SUBTYPE_B2AceBOC, 4),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27E5ABCentreBand, RTConst.dRec27Track_AltBOCCompP): SatInfo('BDS B2Ace','P',RcvrConst.SUBTYPE_B2AceBOC, 4),
    RTSat(RTConst.RT_SatType_BEIDOU_B1GEOPhs, RTConst.dRec27B3Band, RTConst.dRec27Track_B3): SatInfo('BDS B3','',RcvrConst.SUBTYPE_B3, 6),

    RTSat(RTConst.RT_SatType_GLONASS, RTConst.dRec27L1Band, RTConst.dRec27Track_CA): SatInfo('GLN G1C','',RcvrConst.SUBTYPE_G1C, 0),
    RTSat(RTConst.RT_SatType_GLONASS, RTConst.dRec27L1Band, RTConst.dRec27Track_P): SatInfo('GLN G1P','',RcvrConst.SUBTYPE_G1P, 1),
    RTSat(RTConst.RT_SatType_GLONASS, RTConst.dRec27L2Band, RTConst.dRec27Track_CA): SatInfo('GLN G2C','',RcvrConst.SUBTYPE_G2C, 2),
    RTSat(RTConst.RT_SatType_GLONASS, RTConst.dRec27L2Band, RTConst.dRec27Track_P): SatInfo('GLN G2P','',RcvrConst.SUBTYPE_G2P, 3),
    RTSat(RTConst.RT_SatType_GLONASS, RTConst.dRec27G3Band, RTConst.dRec27Track_G3_PD): SatInfo('GLN G3','PD',RcvrConst.SUBTYPE_G3, 4),
    RTSat(RTConst.RT_SatType_GLONASS, RTConst.dRec27G3Band, RTConst.dRec27Track_G3_D): SatInfo('GLN G3','D',RcvrConst.SUBTYPE_G3, 4),
    RTSat(RTConst.RT_SatType_GLONASS, RTConst.dRec27G3Band, RTConst.dRec27Track_G3_P): SatInfo('GLN G3','P',RcvrConst.SUBTYPE_G3, 4),

    RTSat(RTConst.RT_SatType_IRNSS, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_CA): SatInfo('IRN L5','',RcvrConst.SUBTYPE_L5CA, 0),
    RTSat(RTConst.RT_SatType_IRNSS, RTConst.dRec27S1Band, RTConst.dRec27Track_CA): SatInfo('IRN S1','',RcvrConst.SUBTYPE_S1CA, 1),

    RTSat(RTConst.RT_SatType_XONA, RTConst.dRec27L1Band,    RTConst.dRec27Track_X1_PD): SatInfo('XON X1','',RcvrConst.SUBTYPE_X1, 1),
    RTSat(RTConst.RT_SatType_XONA, RTConst.dRec27L5E5ABand, RTConst.dRec27Track_X5_PD): SatInfo('XON X5','',RcvrConst.SUBTYPE_X5, 0) }

  """
  Return the SUB_TYPE_ for the supplied RT_SatType, dRec27..Band and dRec27Track
  Also returns a signal name string and sorting order number
  get_sub_type(RT_SatType_GPS,dRec27L1Band,dRec27Track_CA).SUBTYPE
  get_sub_type(RT_SatType_GALILEO,dRec27L1Band,dRec27Track_BOC_1_1_DP).sigstr
  - this returns, for example, 'GAL E1'
  get_sub_type(RT_SatType_GALILEO,dRec27L1Band,dRec27Track_BOC_1_1_DP).trkstr
  - this returns, for example, 'DP BOC11'
    it may be empty for some signal types
  get_sub_type(RT_SatType_GALILEO,dRec27L1Band,dRec27Track_BOC_1_1_DP).fullstr
  - concatenation of sigstr and trkstr, seperated by a '-', for example 'GAL E1-DP BOC11'
  get_sub_type(RT_SatType_IRNSS,dRec27S1Band,dRec27Track_CA).group
  - used to group common types, for example, L2CM/L2CL/L2CMCL are all the same group
  """
  def __init__(self, RT_SatType, dRec27Band, dRec27Track):    
    # check existance to prevent an exception from being thrown
    # for unknown tracking combo (i.e. maybe a bug)
    if (RT_SatType, dRec27Band, dRec27Track) in self.sub_type_dict:    
        self.SUBTYPE = self.sub_type_dict[RT_SatType, dRec27Band, dRec27Track][2]
        self.sigstr = self.sub_type_dict[RT_SatType, dRec27Band, dRec27Track][0]
        self.trkstr = self.sub_type_dict[RT_SatType, dRec27Band, dRec27Track][1]
        self.group = self.sub_type_dict[RT_SatType, dRec27Band, dRec27Track][3]
        self.fullstr = self.sub_type_dict[RT_SatType, dRec27Band, dRec27Track][0]
        if (len(self.trkstr)):
            self.fullstr += '-'+self.trkstr
    else:
        # print("Unknown signal",RT_SatType,dRec27Band,dRec27Track)
        self.SUBTYPE = -1
        self.sigstr  = "??? signal"
        self.trkstr  = "??? track"
        self.group   = 99
        self.fullstr = "??? (%d %d %d)" % (RT_SatType, dRec27Band, dRec27Track)

def get_subtype_to_subband(subtype):
    # See stinger/aj_task.c::aj_subtype_to_subband_decoder()
    switch = {
         3:  0,     # eRec35sub28_SubType_L1CA     --> eRec35sub28_SubBand_GPS_L1_CA_CODE
        28:  0,     # eRec35sub28_SubType_L1CB     --> eRec35sub28_SubBand_GPS_L1_CA_CODE
        29:  0,     # eRec35sub28_SubType_L1SLAS   --> eRec35sub28_SubBand_GPS_L1_CA_CODE
         4:  1,     # eRec35sub28_SubType_L1C      --> eRec35sub28_SubBand_GPS_L1_AND_GALILEO_E1
        15:  1,     # eRec35sub28_SubType_E1       --> eRec35sub28_SubBand_GPS_L1_AND_GALILEO_E1
        24:  1,     # eRec35sub28_SubType_B1C      --> eRec35sub28_SubBand_GPS_L1_AND_GALILEO_E1
         5:  1,     # eRec35sub28_SubType_L1E      --> eRec35sub28_SubBand_GPS_L1_AND_GALILEO_E1
        10:  2,     # eRec35sub28_SubType_G1C      --> eRec35sub28_SubBand_GLONASS_G1_FDMA
        11:  2,     # eRec35sub28_SubType_G1P      --> eRec35sub28_SubBand_GLONASS_G1_FDMA
        21:  3,     # eRec35sub28_SubType_B1       --> eRec35sub28_SubBand_BEIDOU_B1
         6:  4,     # eRec35sub28_SubType_L2E      --> eRec35sub28_SubBand_GPS_L2
         8:  4,     # eRec35sub28_SubType_L2E_OR_C --> eRec35sub28_SubBand_GPS_L2
         7: 11,     # eRec35sub28_SubType_L2C      --> eRec35sub28_SubBand_GPS_L2C
        12:  5,     # eRec35sub28_SubType_G2C      --> eRec35sub28_SubBand_GLONASS_G2_FDMA
        13:  5,     # eRec35sub28_SubType_G2P      --> eRec35sub28_SubBand_GLONASS_G2_FDMA
         9:  6,     # eRec35sub28_SubType_L5       --> eRec35sub28_SubBand_GPS_L5_AND_GALILEO_E5A
        32:  6,     # eRec35sub28_SubType_L5CA     --> eRec35sub28_SubBand_GPS_L5_AND_GALILEO_E5A
        16:  6,     # eRec35sub28_SubType_E5A      --> eRec35sub28_SubBand_GPS_L5_AND_GALILEO_E5A
        25:  6,     # eRec35sub28_SubType_B2A      --> eRec35sub28_SubBand_GPS_L5_AND_GALILEO_E5A
        17:  7,     # eRec35sub28_SubType_E5B      --> eRec35sub28_SubBand_GLONASS_G3_AND_BEIDOU_B2
        14:  7,     # eRec35sub28_SubType_G3       --> eRec35sub28_SubBand_GLONASS_G3_AND_BEIDOU_B2
        22:  7,     # eRec35sub28_SubType_B2       --> eRec35sub28_SubBand_GLONASS_G3_AND_BEIDOU_B2
        26:  7,     # eRec35sub28_SubType_B2B      --> eRec35sub28_SubBand_GLONASS_G3_AND_BEIDOU_B2
        19:  8,     # eRec35sub28_SubType_E6       --> eRec35sub28_SubBand_GALILEO_E6
        20:  8,     # eRec35sub28_SubType_E6_2ND   --> eRec35sub28_SubBand_GALILEO_E6
        30:  8,     # eRec35sub28_SubType_LEX      --> eRec35sub28_SubBand_GALILEO_E6
        31:  8,     # eRec35sub28_SubType_LEX_L6E  --> eRec35sub28_SubBand_GALILEO_E6
        23:  9,     # eRec35sub28_SubType_B3       --> eRec35sub28_SubBand_BEIDOU_B3
        33: 10,     # eRec35sub28_SubType_S1CA     --> eRec35sub28_SubBand_IRNSS_S1_CA_CODE
        18: -1,     # eRec35sub28_SubType_E5AltBOC --> invalid
        27: -1      # eRec35sub28_SubType_B2AceBOC --> invalid
        }
    return switch.get(subtype,-1)

def rt_band_to_label(rt_band):
    switch = \
        {
            0: 'LG1',
            1: 'LG2',
            4: 'LE5',
            5: ' E6',
            6: ' B1',
            11:' S '
        }
    return switch.get(rt_band,"Invalid rt_band!!")

def rt_subband_to_label(rt_subband):
    switch = \
        {
            0: 'GPS C/A',
            1: 'L1 & E1',
            2: 'Glonass G1',
            3: 'Beidou B1',
            4: 'GPS L2',
            5: 'Glonass G2',
            6: 'L5 & E5A',
            7: 'G3 & B2',
            8: 'Galileo E6',
            9: 'Beidou B3',
            10:'S1 C/A'
        }
    return switch.get(rt_subband,"Invalid rt_subband!!")

def rt_subtype_to_label(rt_subtype):
    switch = \
        {
            0: 'unknown',
            3: 'L1-C/A ',
            4: '  L1C  ',
            5: '  L1E  ',
            6: '  L2E  ',
            7: '  L2C  ',
            8: 'L2-EorC',
            9: '  L5   ',
            10:'  G1C  ',
            11:'  G1P  ',
            12:'  G2C  ',
            13:'  G2P  ',
            14:'  G3   ',
            15:'  E1   ',
            16:'  E5A  ',
            17:'  E5B  ',
            18:'AltBOC ',
            19:'   E6  ',
            20:'E6 2nd ',
            21:'  B1   ',
            22:'  B2   ',
            23:'  B3   ',
            24:'  B1C  ',
            25:'  B2A  ',
            26:'  B2B  ',
            27:'AceBOC ',
            28:' L1CB  ',
            29:'L1SLAS ',
            30:'  LEX  ',
            31:'LEX-L6E',
            32:'L5-C/A ',
            33:'S1-C/A ',
            34:'  X1   ',
            35:'  X5   ',
            36:'L1-SBOC',
            37:'  X5D  '
        }
    return switch.get(rt_subtype,"unknown")

def sv_to_Lx_info( d, k, sv, rt_sat_type ):
    """
    Helper function for get_LxLy_idx()
    Inputs: d = rec27/35:19 data
            k = rec27/35:19 key (from vd2arr)
            sv = satellite ID
            rt_sat_type = satellite type from T0x file
    Returns: list for all satellite signals:
       [ (Lx_freq, Lx_trk, label), ... ]
    (e.g., [(dRec27L1Band, dRec27Track_CA, 'GPS L1'), ...])
    """
    i = find( (d[:,k.SV]==sv) & (d[:,k.SAT_TYPE]==rt_sat_type) )
    signals = get_signals( d[i,:], k )
    info = []
    for freq,track in signals[rt_sat_type]:
        info.append( (freq, track, get_sub_type(rt_sat_type, freq, track).fullstr) )
    return info

def sv_to_LxLy_info( d, k, sv, rt_sat_type ):
    """
    Helper function for get_LxLy_idx()
    Inputs: d = rec27/35:19 data
            k = rec27/35:19 key (from vd2arr)
            sv = satellite ID
            rt_sat_type = satellite type from T0x file
    Returns: list for all satellite signals:
       [ (Lx_freq, Lx_trk, Ly_freq, Ly_trk, label), ... ]
    (e.g., [(dRec27L1Band, dRec27Track_CA, dRec27L2Band, dRec27Track_CMCL, 'GPS L1'), ...])
    """
    i = find( (d[:,k.SV]==sv) & (d[:,k.SAT_TYPE]==rt_sat_type) )
    signals = get_signals( d[i,:], k )
    info = []
    for freq,track in signals[rt_sat_type]:
        for freq2,track2 in signals[rt_sat_type]:
            if sorted((freq,freq2))==sorted((RTConst.dRec27L1Band,RTConst.dRec27B1Band)):
                # B1 and B1C are too close in frequency for use by MPx
                continue
            if freq2 != freq:
                break
        if freq == freq2:
            continue
        info.append( (freq, track, freq2, track2, get_sub_type(rt_sat_type, freq, track).fullstr) )
    return info

def get_L1L2_idx( d, k, sv_id, sat_type, elev_mask_deg, min_seg=0 ):
    """
    Get index for intersecting GPS L1 and L2E/L2C times (or GLN G1CA and G2CA,
    or Galileo E1 & E5A, or Beidou B1 & B2)
    If there is no L2E/L2C data then L2E/L2C is ignored and i_L2E/i_L2C is empty.
    d = rec27 data
    k = rec27 key (e.g., from vd2arr)
    sv_id = satellite ID
    sat_type = RT_SatType_GPS, or GLONASS, or ...
    elev_mask_deg = elevation mask [deg]
    min_seg = minimum # of points in each non-slip segment (otherwise fill in Nan)
    Returns: (i_L1, i_L2)
    (i_L2 is for L2C if available, otherwise L2E)
    """
    if sat_type == RTConst.RT_SatType_GPS:
        return get_LxLy_idx( d, k, sv_id, sat_type, RTConst.dRec27L1Band, RTConst.dRec27Track_CA, \
                             RTConst.dRec27L2Band, -1, elev_mask_deg, min_seg )
    elif sat_type == RTConst.RT_SatType_GLONASS:
        return get_LxLy_idx( d, k, sv_id, sat_type, RTConst.dRec27L1Band, RTConst.dRec27Track_CA, \
                             RTConst.dRec27L2Band, RTConst.dRec27Track_CA, elev_mask_deg, min_seg )
    elif sat_type == RTConst.RT_SatType_QZSS:
        return get_LxLy_idx( d, k, sv_id, sat_type, RTConst.dRec27L1Band, RTConst.dRec27Track_CA, \
                             RTConst.dRec27L2Band, RTConst.dRec27Track_CMCL, elev_mask_deg, min_seg )
    elif sat_type == RTConst.RT_SatType_GALILEO:
        return get_LxLy_idx( d, k, sv_id, sat_type, RTConst.dRec27L1Band, RTConst.dRec27Track_BOC_1_1_DP_MBOC, \
                             RTConst.dRec27L5E5ABand, -1, elev_mask_deg, min_seg )
    elif sat_type==RTConst.RT_SatType_BEIDOU_ICDPRNs or sat_type==RTConst.RT_SatType_BEIDOU_B1GEOPhs:
        i=find( (d[:,k.SV] == sv_id) & (d[:,k.SAT_TYPE]==sat_type) )
        sigs = get_signals(d[i,:],k)[sat_type]
        band2, track2 = -1, -1
        if len(sigs) > 1:
          for (tmp_band2,tmp_track2) in sigs:
              if tmp_band2 == RTConst.dRec27L5E5ABand or tmp_band2 == RTConst.dRec27E5BBand or tmp_band2 == RTConst.dRec27B3Band:
                  band2, track2 = tmp_band2, tmp_track2
                  break
        return get_LxLy_idx( d, k, sv_id, sat_type,
                             RTConst.dRec27B1Band, RTConst.dRec27Track_B1,
                             band2, track2, elev_mask_deg, min_seg )
    else:
        raise ValueError('unknown sat_type %d'%sat_type)

def get_sv_list( d, k, desc=False ):
    """
    d = rec27 data
    k = rec27 key (e.g., from vd2arr)
    Return a list of unique satellites in rec27/35:9 [(sv,sat_type), ...]
    If desc=True, return triplet:
     [(sv,sat_type,sat_desc),...]
    """
    sv_clist = unique(d[:,k.SV]+1000*d[:,k.SAT_TYPE])
    if desc:
        return [(int(x)%1000,int(x/1000),get_sat_type(int(x/1000)).sysstr) for x in sv_clist]
    else:
        return [(int(x)%1000,int(x/1000)) for x in sv_clist]

def get_signals( d, k=None, desc=False ):
    """
    d = rec27 data (from vd2cls or vd2arr)
    k = None (or rec27 key from vd2arr)
    desc = False:
      Return a dictionary of unique signals in rec27/35:9 {sat_type:(freq,track), ...}
    desc = True:
      Return a dictionary of unique signals in rec27/35:9 {sat_type:(freq,track,description), ...}
      where "desription" is something like 'GPS L1CA'
    """
    if k is None:
        k = d.k
    sv_clist = unique( (d[:,k.SAT_TYPE].astype(int)<<16)
                       + (d[:,k.FREQ].astype(int)<<8)
                       + (d[:,k.TRACK].astype(int)<<0) )
    out = {}
    for x in sv_clist:
        sat_type = (x>>16)&0xff
        freq = (x>>8)&0xff
        track = x&0xff
        if desc:
            curr_item = (freq,track,get_sub_type(sat_type,freq,track).fullstr)
        else:
            curr_item = (freq,track)
        if sat_type in out:
            out[sat_type].append( curr_item )
        else:
            out[sat_type] = [ curr_item ]

    for sat_type in out.keys():
        out[sat_type] = sorted(out[sat_type], key=lambda x:get_sub_type(sat_type,x[0],x[1]).group)
    return out


def do_double_diff( dr, kr, db, kb, ref_sv, tst_sv, sat_type, freq, track ):
    """
    Do a manual zero-baseline double-diff calculation.
    dr = rover rec27 data
    kr = rover rec27 key (e.g., from vd2arr)
    db = base rec27 data
    kb = base rec27 key (e.g., from vd2arr)
    ref_sv = reference satellite ID
    tst_sv = test satellite ID
    sat_type = satellite type
    freq = block type (frequency)
    track = track/signal type
    Returns: (t, code double diff[m], carrier double diff[cycles])
    Note - tries to remove cycle slips from carrier double diff...
    """
    ibr = find( (db[:,kb.SV]==ref_sv) & (db[:,kb.SAT_TYPE]==sat_type) & \
                (db[:,kb.FREQ]==freq) & (db[:,kb.TRACK]==track) )
    ibt = find( (db[:,kb.SV]==tst_sv) & (db[:,kb.SAT_TYPE]==sat_type) & \
                (db[:,kb.FREQ]==freq) & (db[:,kb.TRACK]==track) )
    irr = find( (dr[:,kr.SV]==ref_sv) & (dr[:,kr.SAT_TYPE]==sat_type) & \
                (dr[:,kr.FREQ]==freq) & (dr[:,kr.TRACK]==track) )
    irt = find( (dr[:,kr.SV]==tst_sv) & (dr[:,kr.SAT_TYPE]==sat_type) & \
                (dr[:,kr.FREQ]==freq) & (dr[:,kr.TRACK]==track) )
    t, ibr1, ibt1, irr1, irt1 = intersect( db[ibr,kb.TIME], db[ibt,kb.TIME], \
                                           dr[irr,kr.TIME], dr[irt,kr.TIME] )
    ibr=ibr[ibr1]
    ibt=ibt[ibt1]
    irr=irr[irr1]
    irt=irt[irt1]
    # steer code & carrier
    def steer_code( d, k ):
        dt = -d[:,k.CLKBIAS]*1e-3
        wavelength = GnssConst.L1_WAVELENGTH
        return d[:,k.RANGE] + d[:,k.DOPP] * dt * wavelength + dt*GnssConst.LIGHT
    def steer_carr( d, k ):
        dt = -d[:,k.CLKBIAS]*1e-3
        wavelength = GnssConst.L1_WAVELENGTH
        return d[:,k.PHASE] - d[:,k.DOPP]*dt - dt*GnssConst.LIGHT/wavelength

    base_ref_code = steer_code(db[ibr,:], kb )
    rovr_ref_code = steer_code(dr[irr,:], kr )
    base_tst_code = steer_code(db[ibt,:], kb )
    rovr_tst_code = steer_code(dr[irt,:], kr )
    diff_code = base_ref_code-base_tst_code - (rovr_ref_code-rovr_tst_code)
    #diff_code = db[ibr,kb.RANGE]-db[ibt,kb.RANGE] - (dr[irr,kr.RANGE]-dr[irt,kr.RANGE])
    base_ref_carr = steer_carr(db[ibr,:], kb )
    rovr_ref_carr = steer_carr(dr[irr,:], kr )
    base_tst_carr = steer_carr(db[ibt,:], kb )
    rovr_tst_carr = steer_carr(dr[irt,:], kr )
    diff_carr = base_ref_carr-base_tst_carr - (rovr_ref_carr-rovr_tst_carr)
    #diff_carr = db[ibr,kb.PHASE]-db[ibt,kb.PHASE] - (dr[irr,kr.PHASE]-dr[irt,kr.PHASE])
    #diff_carr = remove_jumps(t,diff_carr,db[ibr,kb.SLIP_CNT]+db[ibt,kb.SLIP_CNT]+dr[irr,kr.SLIP_CNT]+dr[irt,kr.SLIP_CNT],thresh=1.)
    return t, diff_code, diff_carr



def get_freq( d, k ):
    if d[k.FREQ] == RTConst.dRec27L1Band:
        if d[k.SAT_TYPE] == RTConst.RT_SatType_XONA:
            return GnssConst.XONA_L1_FREQ
        elif d[k.SAT_TYPE] == RTConst.RT_SatType_GLONASS:
            return GnssConst.GLONASS_L1_BASE_FREQ + GnssConst.GLONASS_L1_FREQ_OFFSET*d[k.FDMA]
        else:
            return GnssConst.L1_FREQ
    elif d[k.FREQ] == RTConst.dRec27L2Band:
        if d[k.SAT_TYPE] == RTConst.RT_SatType_GLONASS:
            return GnssConst.GLONASS_L2_BASE_FREQ + GnssConst.GLONASS_L2_FREQ_OFFSET*d[k.FDMA]
        else:
            return GnssConst.L2_FREQ
    elif d[k.FREQ] == RTConst.dRec27L5E5ABand:
        if d[k.SAT_TYPE] == RTConst.RT_SatType_XONA:
            return GnssConst.XONA_L5_FREQ
        else:
            return GnssConst.L5_FREQ
    elif d[k.FREQ] == RTConst.dRec27E5BBand:
        return GnssConst.GALILEO_E5B_FREQUENCY
    elif d[k.FREQ] == RTConst.dRec27E5ABCentreBand:
        return GnssConst.GALILEO_E5AltBOC_FREQUENCY
    elif d[k.FREQ] == RTConst.dRec27E6Band:
        return GnssConst.GALILEO_E6_FREQUENCY
    elif d[k.FREQ] == RTConst.dRec27B1Band:
        return GnssConst.BEIDOU_B1_FREQUENCY
    elif d[k.FREQ] == RTConst.dRec27B3Band:
        return GnssConst.BEIDOU_B3_FREQUENCY
    elif d[k.FREQ] == RTConst.dRec27E1Band:
        return GnssConst.BEIDOU_E1_FREQUENCY
    elif d[k.FREQ] == RTConst.dRec27G3Band:
        return GnssConst.GLONASS_L3_FREQUENCY
    elif d[k.FREQ] == RTConst.dRec27S1Band:
        return GnssConst.IRNSS_S1_FREQUENCY

def get_lp_known( d, k ):
    if 'MEAS' in k:
        return (d[:,k.MEAS].astype(int) & k.dMEAS_LOCK_POINT)==k.dMEAS_LOCK_POINT
    else:
        return (d[:,k.MEAS1].astype(int) & 0x20)==0x20

def get_MPx( d, k, i_Lx, i_Ly, filt_Tc=-1., do_MPy=False, use_CCFilt=False, rm_MPHat=False ):
    """
    Compute iono-free stationary pseudorange (MPx)
    d = rec27/rec35:19 data
    k = rec27/rec35:19 key (e.g., from vd2arr)
    i_Lx = best Lx index for satellite - see get_GPS_L1L2_idx or get_LxLy_idx
    i_Ly = best Ly index for satellite
    filt_Tc = if > 0, lowpass filter MPx data with this time constant. 15s is OK.
    do_MPy = if True, return MPy.  If False returns [].
    use_CCFilt = if True use code/carrier filtered PR (need rec 35:19 diagnostic data)
    Returns: (MPx, atm1, MPy)
      MPx  = Lx code multipath
      atmx = Lx atmospheric correction
      MPy  = Ly code multipath
    """
    if do_MPy:
        rng_Ly = d[i_Ly,k.RANGE]
    else:
        rng_Ly = []

    if len(i_Lx) == 0 or len(i_Ly) != len(i_Lx):
        return (array([]), array([]), array([]))

    if use_CCFilt:
        # CCDiff = unsmoothed - smoothed, thus smoothed = unsmoothed - CCDiff
        rng_Lx = d[i_Lx,k.RANGE] - d[i_Lx,k.CCDiff]
    else:
        rng_Lx = d[i_Lx,k.RANGE]
    if rm_MPHat:
        rng_Lx += d[i_Lx,k.MPHat]
    freq_x = get_freq( d[i_Lx[0],:], k )
    freq_y = get_freq( d[i_Ly[0],:], k )
    freq_ratio = freq_x/freq_y
    if freq_ratio >= 0.99 and freq_ratio <= 1.01:
        print("get_MPx frequencies are too close - ratio %.3f" % freq_ratio)
        return (nan*ones(rng_Lx.shape), array([]), array([]))

    MPx, atmx, MPy \
        = iono_free_SR( d[i_Lx,k.TIME], \
                        freq_x, rng_Lx, d[i_Lx,k.PHASE], d[i_Lx,k.SLIP_CNT], \
                        freq_y, rng_Ly, d[i_Ly,k.PHASE], d[i_Ly,k.SLIP_CNT], \
                        get_lp_known(d[i_Lx,:],k) & get_lp_known(d[i_Lx,:],k) )
    if filt_Tc > 0.:
        dt = median(diff(d[i_Lx,k.TIME]))
        b,a = butter(3, dt/.5/filt_Tc, btype='lowpass' )
        if len(MPx) > 20:
            MPx = filtfilt( b, a, MPx, method="gust" )
        if len(MPy) > 20:
            MPy = filtfilt( b, a, MPy, method="gust" )

    return (MPx, atmx, MPy)

def signal_envelope( x, N ):
    """
Inputs: x = random signal
        N = # of points to decay over
Returns envelope of 'x' based on a forward-backward estimate..
"""
    k = exp(-1./N)

    def _helper( abs_x ):
        y = empty(abs_x.shape)
        env_out = nan
        for i, x in enumerate(abs_x):
            if isnan(env_out):
                env_out = x
            elif isnan(x):
                env_out = env_out*k
            elif x < env_out:
                env_out = env_out*k + (1-k)*x
            else:
                env_out = x
            y[i] = env_out
        return y

    abs_x = abs(x)
    y_fwd = _helper( abs_x )
    y_rev = _helper( abs_x[::-1] )
    return maximum(y_fwd,y_rev[::-1])

def rms( x, ignore_nan=False ):
    """
    Return root-mean-square of x
    If ignore_nan==True, ignore NaN data..
    """
    if ignore_nan:
        return sqrt( nanmean( square( x )))
    else:
        return sqrt( mean( square( x )))


def detrend_poly( y, order, x=None ):
    """Similar to detrend_linear but uses a polynomial of given order"""
    if x is None:
        x = r_[0:len(y)]
    model = polyfit( x, y, order )
    return y - polyval( model, x )

def get_cdf_percents( cx, cy, percents ):
    out = []
    for p in percents:
        i = find( cy >= .01*p )
        if len(i) > 0:
            out.append( cx[i[0]] )
        else:
            out.append( 0.0 )
    return out

def plt_pospac_truth( txt, pp, dkl_list, do_3d=True, do_2d=True, do_hgt=True, do_track=False, print_all=False, do_pad=False, do_vel=False ):
    """
Give quick summary of T0x vs POSPAC results.  Example usage:
pp = doload('POSLV-PPSmartBase.txt') # or pp=[lat,lon,hgt]
d1,k1=vd2arr('f1.T04')
d2,k2=vd2arr('f2.T04')
plt_pospac_truth( 'some title', pp, [(d1,k1,'ref'), (d2,k2,'tst')] )
show()
"""
    dPP_TIME=PosPacConst.dPP_TIME
    dPP_LAT=PosPacConst.dPP_LAT
    dPP_velE=PosPacConst.dPP_velE
    dPP_velN=PosPacConst.dPP_velN
    dPP_velU=PosPacConst.dPP_velU

    is_static = False
    if shape(pp) == (3,):
        pp = array([-1.,pp[0],pp[1],pp[2]]).reshape(1,4)
        is_static = True

    if do_track:
        f_Tr=figure()
        axTr_1=subplot(211)
        grid(True)
        axTr_2=subplot(212,sharex=axTr_1)
        grid(True)
        tight_layout()
    if do_3d:
        f_3d_err=figure()
        ax3d_err=subplot(111)
        grid(True)
        f_3d_cdf=figure()
        ax3d_cdf=subplot(111)
        grid(True)
        if do_vel:
            f_3d_cdf_v=figure()
            ax3d_cdf_v=subplot(111)
            grid(True)
    if do_2d:
        f_2d_err=figure()
        ax2d_err=subplot(111)
        grid(True)
        f_2d_cdf=figure()
        ax2d_cdf=subplot(111)
        grid(True)
        if do_vel:
            f_2d_cdf_v=figure()
            ax2d_cdf_v=subplot(111)
            grid(True)
    if do_hgt:
        f_hgt_err=figure()
        axhgt_err=subplot(111)
        grid(True)
        f_hgt_cdf=figure()
        axhgt_cdf=subplot(111)
        grid(True)
        if do_vel:
            f_hgt_cdf_v=figure()
            axhgt_cdf_v=subplot(111)
            grid(True)

    # show data for each input
    def show_data( for_all ):
        t,i_d,i_pp=(None,None,None)
        if is_static:
            common_i = intersect( *(d[:,k.TIME] for d,k,_ in dkl_list) )
            common_i.insert(1,slice(None))
        else:
            common_i = intersect( pp[:,dPP_TIME], *(d[:,k.TIME] for d,k,_ in dkl_list) )
        for n,(d,k,l) in enumerate(dkl_list):
            if for_all:
                if is_static:
                    t = d[:,k.TIME]
                    i_pp = 0
                    i_d = slice(None)
                else:
                    t, i_pp, i_d = intersect( pp[:,dPP_TIME], d[:,k.TIME] )
            else:
                t = common_i[0]
                i_pp = common_i[1]
                i_d = common_i[2+n]
            if do_vel:
                dVE = d.VLON[i_d] - pp[i_pp,dPP_velE]
                dVN = d.VLAT[i_d] - pp[i_pp,dPP_velN]
                dVU = d.VHGT[i_d] - pp[i_pp,dPP_velU]
                v_err_3d = sqrt( dVE**2 + dVN**2 + dVU**2 )
                v_err_2d = sqrt( dVE**2 + dVN**2 )
                v_err_hgt = abs( dVU )
                v_cx3,v_cy3 = docdf( v_err_3d )
                v_cx2,v_cy2 = docdf( v_err_2d )
                v_cxh,v_cyh = docdf( v_err_hgt )
            (dE, dN, dU) = llh2enu( d[i_d,k.LAT:k.LAT+3], pp[i_pp,dPP_LAT:dPP_LAT+3] )
            err_3d = sqrt( dE**2 + dN**2 + dU**2 )
            err_2d = sqrt( dE**2 + dN**2 )
            err_hgt = abs( dU )
            cx3,cy3 = docdf( err_3d )
            cx2,cy2 = docdf( err_2d )
            cxh,cyh = docdf( err_hgt )
            if do_track and for_all:
                axTr_1.plot( t, d[i_d,k.NTRK], '-', label=l )
                axTr_2.plot( t, d[i_d,k.NUSED], '-', label=l )
            if for_all and do_3d:
                ax3d_err.plot( t, err_3d, '-x', label=l )
            if for_all and do_2d:
                ax2d_err.plot( t, err_2d, '-x', label=l )
            if for_all and do_hgt:
                axhgt_err.plot( t, err_hgt, '-x', label=l )
            if not for_all:
                if do_3d:
                    ax3d_cdf.plot( cx3, 100*cy3, label=l )
                    if do_vel:
                        ax3d_cdf_v.plot( v_cx3, 100*v_cy3, label=l )
                if do_2d:
                    ax2d_cdf.plot( cx2, 100*cy2, label=l )
                    if do_vel:
                        ax2d_cdf_v.plot( v_cx2, 100*v_cy2, label=l )
                if do_hgt:
                    axhgt_cdf.plot( cxh, 100*cyh, label=l )
                    if do_vel:
                        axhgt_cdf_v.plot( v_cxh, 100*v_cyh, label=l )

            if for_all and not print_all:
                print_("All '%s' len %d" % (l,len(err_3d)))
                continue
            if for_all:
                print_("All '%s' data err (len %d):" % (l,len(err_3d)))
            else:
                print_("Common '%s' data err (len %d):" % (l,len(err_3d)))
            percent3 = get_cdf_percents( cx3, cy3, [50, 68, 90, 95, 99] )
            percent2 = get_cdf_percents( cx2, cy2, [50, 68, 90, 95, 99] )
            print_(' 3D max/rms/std %.1f %.2f %.2f' % (max(err_3d),rms(err_3d),std(err_3d)))
            print_(' 3D 50%%/68%%/90%%/95%%/99%% %.3f %.3f %.3f %.3f %.3f' % tuple(percent3))
            print_(' 2D max/rms/std %.1f %.2f %.2f' % (max(err_2d),rms(err_2d),std(err_2d)))
            print_(' 2D 50%%/68%%/90%%/95%%/99%% %.3f %.3f %.3f %.3f %.3f' % tuple(percent2))
            if do_vel:
                percent3 = get_cdf_percents( v_cx3, v_cy3, [50, 68, 90, 95, 99] )
                percent2 = get_cdf_percents( v_cx2, v_cy2, [50, 68, 90, 95, 99] )
                print_(' 3D vel max/rms/std %.1f %.2f %.2f' % (max(v_err_3d),rms(v_err_3d),std(v_err_3d)))
                print_(' 3D vel 50%%/68%%/90%%/95%%/99%% %.3f %.3f %.3f %.3f %.3f' % tuple(percent3))
                print_(' 2D vel max/rms/std %.1f %.2f %.2f' % (max(v_err_2d),rms(v_err_2d),std(v_err_2d)))
                print_(' 2D vel 50%%/68%%/90%%/95%%/99%% %.3f %.3f %.3f %.3f %.3f' % tuple(percent2))
            print_(' fixmode:',end="")
            for fm in unique(d[i_d,k.FIXTYPE]):
                i=find(d[i_d,k.FIXTYPE] == fm)
                print_(' %d=%.1f%%' % (fm, len(i)*100./len(d[i_d,k.TIME])),end="")
            print_('')


    show_data( True )
    show_data( False )

    # finalize graphs:
    if do_track:
        axTr_1.set_title('NTRK')
        axTr_1.legend(loc='best')
        axTr_2.set_title('NUSED')
    if do_3d:
        figure(f_3d_err.number)
        xlabel('GPS time [s]')
        ylabel('3D error [m]')
        legend(loc='best')
        title(txt)
        figure(f_3d_cdf.number)
        xlabel('3D error [m]')
        ylabel('Common data CDF probability %')
        grid(True)
        legend(loc='best')
        title(txt)
        if do_vel:
            figure(f_3d_cdf_v.number)
            xlabel('3D velocity error [m/s]')
            ylabel('Common data CDF probability %')
            grid(True)
            legend(loc='best')
            title(txt)
    if do_2d:
        figure(f_2d_err.number)
        xlabel('GPS time [s]')
        ylabel('2D error [m]')
        legend(loc='best')
        title(txt)
        figure(f_2d_cdf.number)
        xlabel('2D error [m]')
        ylabel('Common data CDF probability %')
        grid(True)
        legend(loc='best')
        title(txt)
        if do_vel:
            figure(f_2d_cdf_v.number)
            xlabel('2D velocity error [m/s]')
            ylabel('Common data CDF probability %')
            grid(True)
            legend(loc='best')
            title(txt)

    if do_hgt:
        figure(f_hgt_err.number)
        xlabel('GPS time [s]')
        ylabel('Height error [m]')
        legend(loc='best')
        title(txt)
        figure(f_hgt_cdf.number)
        xlabel('Height error [m]')
        ylabel('Common data CDF probability %')
        grid(True)
        legend(loc='best')
        title(txt)
        if do_vel:
            figure(f_hgt_cdf_v.number)
            xlabel('Height velocity error [m/s]')
            ylabel('Common data CDF probability %')
            grid(True)
            legend(loc='best')
            title(txt)



def find_SNR_dips( d, k, thresh=5 ):
    """
Look for sudden SNR jumps in data.  Must only pass in data from master subchannel.
Usage:
(d,k) = vd2arr('file.T0x',rec='-d27',opt=['-t'])
find_SNR_dips(d,k)
"""
    sv_list = get_sv_list( d,k )
    dt = None
    for sv,sat_type in sv_list:
        i = find( (d[:,k.SV]==sv) & (d[:,k.SAT_TYPE]==sat_type) & ((int_(d[:,k.TIME]*1000 + 2)%1000)<=4) )
        if dt is None:
            dt = min(diff(d[i,k.TIME]))
        cno_diff = diff(d[i,k.CNO])
        ijump = find( abs(cno_diff) > thresh )
        for pos, ia in enumerate(ijump):
            if ia-1 < 0 or ia+1 > len(i):
                ijump[pos] = -1
                continue
            if abs(d[i[ia-1],k.TIME] - d[i[ia+1],k.TIME]) > dt*3:
                ijump[pos] = -1
                continue
            if min(d[i[ia-1:ia+2],k.CNO]) < 32.:
                ijump[pos] = -1
                continue
        ijump = ijump[ ijump >= 0 ]

        if len(ijump) > 0:
            print_('sv %d/%d:' % (sv,sat_type), d[i[ijump],k.TIME])
            #plot( d[i,k.TIME], d[i,k.CNO], label='sv %d/%d'%(sv,sat_type))
            #plot( d[i[ijump],k.TIME], d[i[ijump],k.CNO], 'ro' )
    #legend(bbox_to_anchor=(1.1,1.05))
    #show()


def legend_outside(ax=None, x0=1, y0=.9, direction = "h", padpoints = 3, **kwargs):
    """
Put legend on outside of plot.
Only tested example: legend_outside() # legend on right
See: https://stackoverflow.com/questions/42994338/creating-figure-with-exact-size-and-no-padding-and-legend-outside-the-axes/43001737
"""
    if ax is None:
        ax=gca()
    fig = ax.get_figure()
    otrans = ax.figure.transFigure
    t = ax.legend(bbox_to_anchor=(x0,y0), loc=1, bbox_transform=otrans,**kwargs)
    #plt.tight_layout(pad=0)
    renderer = ax.figure.canvas.get_renderer()
    ax.figure.draw(renderer=renderer)
    ppar = [0,-padpoints/72.] if direction == "v" else [-padpoints/72.,0]
    trans2=matplotlib.transforms.ScaledTranslation(ppar[0],ppar[1],fig.dpi_scale_trans)+\
             ax.figure.transFigure.inverted()
    tbox = t.get_window_extent().transformed(trans2 )
    bbox = ax.get_position()
    if direction=="v":
        ax.set_position([bbox.x0, bbox.y0,bbox.width, tbox.y0-bbox.y0])
    else:
        ax.set_position([bbox.x0, bbox.y0,tbox.x0-bbox.x0, bbox.height])


def plot_tracking(d, k, sat_types=None, title_txt='',marker='o'):
    """
Plot tracking & cycle slip info from a T04 file.
Example:
  d,k=vd2arr(filename, rec='-d35:19')
  plot_tracking(d,k)
Inputs:
  sat_types = list of satellite types to plot, e.g., [0,2] for GPS/GLN
  title_txt = add extra info on title
  marker = what marker to use for the cycle slips
Returns: [(fig,sat_type),...]
"""
    if sat_types is None:
        sat_types = unique( d[:,k.SAT_TYPE] )

    combinedFlag = False
    if 'MEAS' in k:
      is_rec35_19 = True
      try:
        # flags are a little different if we used vd2cls()
        if(d.kf):
          combinedFlag = True
      except:
        pass
    else:
        is_rec35_19 = False

    all_results = []
    for loop,sat_type in enumerate(sat_types):
        fig=figure()
        if loop==0:
            ax = subplot(111)
        else:
            subplot(111,sharex=ax)
        tot_len = 0
        i_st = find(d[:,k.SAT_TYPE] == sat_type)
        sv_list = unique(d[i_st,k.SV])
        sig_list = get_signals( d[i_st,:], k )[sat_type]
        sig_colors = {}
        for sig_n,(freq, track) in enumerate(sig_list):
            for sv in sv_list:
                # Get unique color for each signal type, and don't label things twice on legend
                try:
                    color=sig_colors[(freq,track)]
                    label='_nolegend_'
                except:
                    color=None
                    label = get_sub_type(sat_type,freq,track).fullstr

                i = find( (d[i_st,k.SV] == sv)
                          & (d[i_st,k.FREQ]==freq)
                          & (d[i_st,k.TRACK]==track) )
                if len(i) == 0:
                    continue
                i=i_st[i]
                if is_rec35_19:
                    if(combinedFlag):
                      i2 = find( (d[i,k.MEAS].astype(int)&d.kf.dMEAS_SLIP)!=0 )
                    else:
                      i2 = find( (d[i,k.MEAS].astype(int)&k.dMEAS_SLIP)!=0 )
                else:
                    i2 = find( d[i,k.SLIP_FLG] )

                # set proper spot on SV # y-axis
                sv_y = d[i,k.SV].copy()
                sv_y[isclose(sv_y,sv)] = find(sv_list==sv)[0]
                spacing = sig_n*.17

                # insert NaN for time gaps.  That way we get gaps for tracking outages.
                dt = abs(diff(d[i,k.TIME]))
                expected_dt = median(dt)+.001
                igap = find( dt > expected_dt )+1
                if len(igap) > 0:
                    sv_xgap = insert(d[i,k.TIME], igap, nan )
                    sv_ygap = insert(sv_y, igap, nan )
                else:
                    sv_xgap = d[i,k.TIME]
                    sv_ygap = sv_y

                # plot tracking line
                p,=plot( sv_xgap, sv_ygap+spacing, '.-', markersize=1, label=label, color=color)
                if color is None:
                    color = p.get_color()
                    sig_colors[(freq,track)] = color

                # plot cycle slips
                plot( d[i[i2],k.TIME], sv_y[i2]+spacing, marker, mfc='none', label='_nolegend_', color=color)

                tot_len += len(i)
        if tot_len == 0:
            close()
            continue
        txt = 'track/slip info'
        if len(title_txt) > 0:
            txt += ' : ' + title_txt
        title(txt)
        yticks( arange(len(sv_list)), sv_list.astype(int) )
        grid(True)
        legend_outside()
        all_results.append( (fig,sat_type) )
    return all_results


def plot_subchan_raim(d, k, sat_types=None, title_txt='', show_all=False):
    """
Plot RAIM flags for each subchannel/signalfrom a T04 file.
Example:
  d,k=vd2arr(filename, rec='-d35:19 -t')
  plot_subchan_raim(d,k)
Inputs:
  sat_types = list of satellite types to plot, e.g., [0,2] for GPS/GLN
  title_txt = add extra info on title
  show_all = if False(default), hide plots with no RAIM faults.
Returns: [(fig,sat_type),...]
"""
    from collections import Counter
    if sat_types is None:
        sat_types = unique( d[:,k.SAT_TYPE] )

    raims = ( ('MM',k.dSV_FLAGS_FAULT_MM_RAIM),
              ('WLS',k.dSV_FLAGS_FAULT_WLS_RAIM),
              ('KF',k.dSV_FLAGS_FAULT_KF_RAIM),
              ('SBAS',k.dSV_FLAGS_FAULT_SBAS_MSG),
              ('CNAV',k.dSV_FLAGS_FAULT_CNAV),
              ('FFT',k.dSV_FLAGS_FAULT_FFT_CORR),
              ('RTX',k.dSV_FLAGS_FAULT_RTX_POS),
              ('SYS',k.dSV_FLAGS_FAULT_SYS_RAIM),
              ('EPH',k.dSV_FLAGS_FAULT_EPH),
              ('NMA',k.dSV_FLAGS_FAULT_NMA),
              ('CNO',k.dSV_FLAGS_FAULT_CNO_DETECT_SPOOF),
              ('DUAL_ANT',k.dSV_FLAGS_FAULT_DUAL_ANT_SPOOF)
            )
    raims_cnt = Counter()

    all_results = []
    for loop,sat_type in enumerate(sat_types):
        # Find all signal types for this sat_type
        i_st = find(d[:,k.SAT_TYPE] == sat_type)
        sig_list = get_signals(d[i_st,:], k)[sat_type]
        n_signals = len(sig_list)
        if n_signals == 0:
            continue
        fig, axes = subplots(n_signals, 1, sharex=True, figsize=(10, 2.5*n_signals))
        if n_signals == 1:
            axes = [axes]
        sv_list = unique(d[i_st,k.SV])
        tot_len_all = 0
        for sig_idx, (freq, track) in enumerate(sig_list):
            ax = axes[sig_idx]
            tot_len = 0
            # plot RAIM info for this signal type
            for n,(label, flag) in enumerate(raims):
                i = find( (d[:,k.SAT_TYPE]==sat_type)
                          & (d[:,k.FREQ]==freq)
                          & (d[:,k.TRACK]==track)
                          &((d[:,k.SUBCH_FLAGS].astype(int)&flag)!=0))
                sv_y = d[i,k.SV].copy()
                for sv_pos,sv in enumerate(sv_list):
                    sv_y[isclose(sv_y,sv)] = sv_pos
                plot_args = dict(ax=ax) if hasattr(ax, 'plot') else {}
                if len(i) == 0:
                    label='_nolegend_'
                ax.plot(d[i,k.TIME], sv_y+(n+1)/(len(raims)+2), 'o', markersize=2, label=label)
                tot_len += len(i)
                if len(i) > 0:
                    raims_cnt[label] += len(i)
            tot_len_all += tot_len
        
            # plot tracking info for this signal type
            for sv_pos,sv in enumerate(sv_list):
                i = find( (d[:,k.SV]==sv)
                          & (d[:,k.SAT_TYPE]==sat_type)
                          & (d[:,k.FREQ]==freq)
                          & (d[:,k.TRACK]==track))
                dt = abs(diff(d[i,k.TIME]))
                igap = find(dt > median(dt)+.001)+1
                if len(igap) > 0:
                    sv_xgap = insert(d[i,k.TIME], igap, nan )
                    sv_ygap = insert(sv_pos*ones(i.shape), igap, nan )
                else:
                    sv_xgap = d[i,k.TIME]
                    sv_ygap = sv_pos*ones(i.shape)
                ax.plot(sv_xgap, sv_ygap, 'k.-', markersize=1)
            txt = get_sub_type(sat_type, freq, track).fullstr
            if len(title_txt) > 0:
                txt = title_txt + ' : ' + txt
            ax.set_title(txt)
            ax.set_yticks(arange(len(sv_list)))
            ax.set_yticklabels(sv_list.astype(int))
            ax.grid(True)
            if tot_len > 0:
                legend_outside(ax)
        if tot_len_all == 0 and not show_all:
            close(fig)
            continue

        fig.tight_layout()
        all_results.append( (fig,sat_type) )
    print(raims_cnt)
    return all_results


def plot_raim(d, k, sat_types=None, title_txt='', show_all=False):
    """
Plot RAIM flags from a T04 file.
Example:
  d,k=vd2arr(filename, rec='-d35:19 -t')
  plot_raim(d,k)
Inputs:
  sat_types = list of satellite types to plot, e.g., [0,2] for GPS/GLN
  title_txt = add extra info on title
  show_all = if False(default), hide plots with no RAIM faults.
Returns: [(fig,sat_type),...]
"""
    from collections import Counter
    if sat_types is None:
        sat_types = unique( d[:,k.SAT_TYPE] )

    raims = ( ('MM',k.dSV_FLAGS_FAULT_MM_RAIM),
              ('WLS',k.dSV_FLAGS_FAULT_WLS_RAIM),
              ('KF',k.dSV_FLAGS_FAULT_KF_RAIM),
              ('SBAS',k.dSV_FLAGS_FAULT_SBAS_MSG),
              ('CNAV',k.dSV_FLAGS_FAULT_CNAV),
              ('FFT',k.dSV_FLAGS_FAULT_FFT_CORR),
              ('RTX',k.dSV_FLAGS_FAULT_RTX_POS),
              ('SYS',k.dSV_FLAGS_FAULT_SYS_RAIM),
              ('EPH',k.dSV_FLAGS_FAULT_EPH),
              ('NMA',k.dSV_FLAGS_FAULT_NMA),
              ('CNO',k.dSV_FLAGS_FAULT_CNO_DETECT_SPOOF),
              ('DUAL_ANT',k.dSV_FLAGS_FAULT_DUAL_ANT_SPOOF)
            )
    raims_cnt = Counter()

    all_results = []
    for loop,sat_type in enumerate(sat_types):
        fig=figure()
        if loop==0:
            ax = subplot(111)
        else:
            subplot(111,sharex=ax)
        tot_len = 0
        i_st = find(d[:,k.SAT_TYPE] == sat_type)
        sv_list = unique(d[i_st,k.SV])
        # plot RAIM info
        for n,(label, flag) in enumerate(raims):
            i = find( (d[:,k.SAT_TYPE]==sat_type) & ((d[:,k.SV_FLAGS].astype(int)&flag)!=0) )
            sv_y = d[i,k.SV].copy()
            for sv_pos,sv in enumerate(sv_list):
                sv_y[isclose(sv_y,sv)] = sv_pos
            if len(i) == 0:
                label='_nolegend_' # don't clutter legend with non-existent stuff
            plot( d[i,k.TIME], sv_y+(n+1)/(len(raims)+2), 'o', markersize=2, label=label)
            tot_len += len(i)
            if len(i) > 0:
                raims_cnt[label] += len(i)
        if tot_len == 0 and not show_all:
            close()
            continue
        # plot tracking info
        for sv_pos,sv in enumerate(sv_list):
            i = find( (d[:,k.SV]==sv) & (d[:,k.SAT_TYPE]==sat_type) )
            # insert NaN for time gaps.  That way we get gaps for tracking outages.
            dt = abs(diff(d[i,k.TIME]))
            igap = find( dt > median(dt)+.001 )+1
            if len(igap) > 0:
                sv_xgap = insert(d[i,k.TIME], igap, nan )
                sv_ygap = insert(sv_pos*ones(i.shape), igap, nan )
            else:
                sv_xgap = d[i,k.TIME]
                sv_ygap = sv_pos*ones(i.shape)
            plot( sv_xgap, sv_ygap, 'k.-', markersize=1 )
        txt = get_sub_type(sat_type,d[i[0],k.FREQ],d[i[0],k.TRACK]).fullstr
        if len(title_txt) > 0:
            txt = title_txt + ' : ' + txt
        title(txt)
        yticks( arange(len(sv_list)), sv_list.astype(int) )
        grid(True)
        if tot_len > 0:
            legend_outside()
        all_results.append( (fig,sat_type) )
    print(raims_cnt)
    return all_results


def plot_pos_err( d, k, truth, txt='' ):
    """Inputs: d,k = rec 35:2/16 position data & index info
    truth = an array of secs,lat,lon,height truth points
    txt = optional string to prepend to all plot titles
    Generate a height error plot and 2D error plot vs. time.
    Returns [ fig_height_err, fig_2d_err ]
    """
    unique_fixtypes = unique( d[:,k.FIXTYPE] )
    # Map a colormap one row per position type. Default to black,
    # set colors for the position types we care about
    color_mapping = {}
    color_mapping[0]  = [1,0,0,1]   # Auto LSQ
    color_mapping[1]  = [1,0,0.3,1] # DGNSS LSQ
    color_mapping[2]  = [1,0,0.6,1] # SBAS LSQ
    color_mapping[3]  = [1,1,0,1]   # RTK Float
    color_mapping[4]  = [1,0.5,0,1] # RTK Fixed
    color_mapping[9]  = [0,0,1,1]   # Auto KF
    color_mapping[10] = [0,1,0,1]   # DGNSS KF
    color_mapping[11] = [1,0,1,1]   # SBAS KF
    color_mapping[12] = [0.5,0.5,0.5,1] # RTX

    start_week = d[0,k.WEEK]
    t = d[:,k.TIME] + 60*60*24*7*(d[:,k.WEEK]-start_week)

    all_figs = []
    if len(truth) == 1:
        i_d = slice(None)
    else:
        _,i_d,i_tr = intersect( d[:,k.TIME], truth[:,0] )
        truth = truth[i_tr,1:4]
    (dE, dN, dU) = llh2enu( d[i_d,k.LAT:k.LAT+3], truth )
    err_2d = sqrt( dE**2 + dN**2 )
    for err_val, err_name in [ (dU,'Height'), (err_2d,'2D') ]:
        fig = figure()
        for fixtype in unique_fixtypes:
            fixtype = int(fixtype)
            i = find( d[i_d,k.FIXTYPE] == fixtype )
            pos_yield = 100.0*float(len(i))/float(len(i_d))
            errTmp = np.sort(abs(err_val[i]))
            err95  = errTmp[int(0.95 * len(i))]
            if(len(i) < 100):
                # IF we don't have many points the 95% have very little
                # confidence
                label = PosTypeStrConst[fixtype] + " (points= " + str(len(i)) + ")"
            else:
                label = PosTypeStrConst[fixtype] + " 95% = " + "{:.3f}".format(err95) + "m (points= " + str(len(i)) + ")"
            color = 'b'
            try:
                color = color_mapping[fixtype]
            except:
                pass
            scatter( t[i], err_val[i], s=5, edgecolor='none', c=color, label=label )
        xlabel('Time [GPS Seconds]')
        ylabel('%s Error [m]'%err_name)

        title(txt + ' %s Error'%err_name)
        grid(True)
        legend(loc='best',markerscale=4)
        tight_layout()
        all_figs.append(fig)
    return all_figs


def make_legend_interactive( fig ):
    """
    From http://matplotlib.org/examples/event_handling/legend_picking.html
    Given figure, make clicking on legend elements enable/disable lines.
    Assumes a single legend, so only label lines on a single subplot().
    If multiple subplots have the same number of lines as the legend, then
    clicking will affect all subplots.
    May want to use "figlegend()" or "legend(bbox_to_anchor=(1.1,1.05))"
    """
    # we will set up a dict mapping legend line to original plot line,
    # and enable picking on the legend line
    ax = fig.get_axes()[0]
    leg = [x for x in ax.get_children() if type(x) == matplotlib.legend.Legend]
    if len(leg) == 0:
        leg = [x for x in fig.get_children() if type(x) == matplotlib.legend.Legend]
    all_leglines = leg[0].get_lines()
    for legline in all_leglines:
        legline.set_picker(10) # how big is click zone on each legend element?

    lined = dict()
    for ax in fig.get_axes():
        lines = ax.get_lines()
        if len(lines) != len(all_leglines):
            continue
        for legline, origline in zip(all_leglines, lines):
            lined.setdefault(legline,[]).append( origline )

    def onpick(event):
        # on the pick event, find the orig line corresponding to the
        # legend proxy line, and toggle the visibility
        legline = event.artist
        for origline in lined[legline]:
            vis = not origline.get_visible()
            origline.set_visible(vis)
        # Change the alpha on the line in the legend so we can see what lines
        # have been toggled
        if vis:
            legline.set_alpha(1.0)
        else:
            legline.set_alpha(0.2)
        fig.canvas.draw()

    fig.canvas.mpl_connect('pick_event', onpick)


class VdCls(np.ndarray):
    # Combine data and keys from vd2arr for easier usage.

    def __new__(cls, input_array, keys, flags ):
        # From numpy documentation on wrapping ndarray:
        obj = np.asarray(input_array).view(cls)
        # add new attributes:
        obj.k = keys
        obj.f = flags
        if 'is_data_transposed' in keys:
            del keys['is_data_transposed']
        obj.kf = dotdict(keys)
        obj.kf.update(flags)
        return obj

    def __array_finalize__(self, obj):
        # From numpy documentation on wrapping ndarray:
        if obj is None: return
        self.k = getattr(obj, 'k', None)
        self.f = getattr(obj, 'f', None)
        self.kf = getattr(obj, 'kf', None)

    def __getattr__(self,attr):
        if attr in self.k:
            if len(self.shape) == 1:
                return self[self.k[attr]]
            else:
                return self[:,self.k[attr]]
        raise AttributeError("'VdCls' object has no attribute '%s'"%attr)

    def add_keys(self, key_vals):
        self.k.update( key_vals )
        self.kf.update( key_vals )

    def get_sv_list(self, desc=False):
        return get_sv_list( self, self.k, desc=desc )

def vd2cls(filename, rec, verbose=True ):
    """Same as vd2arr, but output is wrapped together.  Needs work, but idea is:
      d = vd2cls( 'file.T04', '-d35:19' )
      print( d.SV )  # instead of "print( d[:,k.SV] )"
    To access keys, see "d.k"
    For flags, see "d.f"
    For keys+flags, see "d.kf"
    """
    d,k,flag = vd2arr( filename, rec, combine_flags=False, verbose=verbose )
    return VdCls( d, k, flag )

def sv_intersect( *args, **kwargs ):
    """Grab a satellite from multiple "vd2cls" data sets.
    Example: md1, md2 = sv_intersect( d1, d2, sv=(14,10) )
             # sv=(sv,sat_type) or sv=(sv,sat_type,freq,track)
             plot( md1.TIME, md1.RANGE )
    md1/md2 are just "views" of d1/d2, so they take minimal memory"""
    if len(kwargs['sv']) == 2:
        sv,sat_type = kwargs['sv']
        freq,track = None,None
    else:
        sv,sat_type,freq,track = kwargs['sv']

    times = []
    idx = []
    for d in args:
        if freq is None:
            i = find( (d.SV==sv) & (d.SAT_TYPE==sat_type) )
        else:
            i = find( (d.SV==sv) & (d.SAT_TYPE==sat_type) & (d.TRACK==track) & (d.FREQ==freq) )
        if len(args) > 1:
            times.append( d.TIME[i] )
        idx.append( i )

    if len(args) > 1:
        tmp = intersect( *times )
        idx2 = tmp[1:]
        return [d[i[i2]] for d,i,i2 in zip(args,idx,idx2)]
    else:
        return args[0][idx[0]]


def snpc_res_fields():
    """Return indexes for StingerNavPC residuals.txt file"""
    k=dotdict({})
    k.week=0
    k.TIME=1
    k.SV=2
    k.SAT_TYPE=3
    k.CNO=4
    k.CSTATE=5
    k.pr_avail=6 # called pr_used in code...
    k.pr_res=7
    k.pr_sig=8
    k.clk_bias_type=9
    k.clk_bias=10
    k.clk_diff=11
    k.dop_avail=12
    k.dop_res=13
    k.clk_drift=14
    k.clk_drift_diff=15
    k.EL=16
    k.used=17
    k.AZ=18
    k.pos_err_est=19
    k.iono_mode=20
    k.smooth_pr_res=21
    k.cc_filt_cnt=22
    k.FDMA=23
    return k


def get_ttff_all( filenames ):
    """Given list of T0x files, return DataFrame of: [[start_t, meas_dt,pos_dt],...]
    See get_ttff()
    Example:
      from glob import glob
      d = get_ttff_all(glob('/mnt/data_drive/TTFF/data/*20190430*.T04'))
      plot( d.start_t, d.meas_dt )
    """
    import multiprocessing as mp
    import pandas as pd
    d = []
    with mp.Pool() as pool:
        results = [pool.apply_async( get_ttff, args=(f,)) for f in sorted(filenames)]
        for res in results:
            start_t, meas_dt, pos_dt = res.get()
            d.append( [start_t, meas_dt, pos_dt] )
    return pd.DataFrame(d,columns=['start_t','meas_dt','pos_dt'])


def get_ttff( filename ):
    """Given a T0x file, return: (start_gps_time, meas_dt,pos_dt)
    start_t = GPS secs in week that the receiver booted
    meas_dt = # of seconds after boot that first measurement was logged
    pos_dt = # of seconds after boot that first position was logged
    """
    up_secs = get_first_str("viewdat -d16 "+filename,"  - SystemUptime =>")
    up_secs = int(up_secs.split()[-1])
    gps_secs = get_first_str("viewdat -d16 "+filename,"  - Time tag      :")
    gps_secs = int(gps_secs.split()[-2])
    pos_secs = get_first_str("viewdat -d29,35:2 "+filename,"Week :")
    pos_secs = float(pos_secs.split()[5])
    if filename.lower().endswith('t02'):
        meas_secs = get_first_str("viewdat -d27 "+filename,"TYPE 27:  SV")
    else:
        meas_secs = get_first_str("viewdat -d35:19 "+filename,"TYPE 35:19 SV")
    meas_secs = float(meas_secs.split()[-1])
    start_gps_time = gps_secs - up_secs
    pos_dt = pos_secs - start_gps_time
    meas_dt = meas_secs - start_gps_time
    SECS_IN_WEEK = 60.*60.*24.*7.
    if pos_dt < -SECS_IN_WEEK*.5:
        pos_dt += SECS_IN_WEEK
    if meas_dt < -SECS_IN_WEEK*.5:
        meas_dt += SECS_IN_WEEK
    return start_gps_time, meas_dt, pos_dt


def calc_cpu_reserve(info, extstr=''):
    """Utlility function for parse_load_diags()
    Calculates the CPU reserve from all of the background tasks
    Inputs: info - dictionary of the CPU load data
            extstr - Extension string for each CPU
                     Empty string for CPU0, otherwise 'CPUn'
    Returns: The CPU reserve
    """
    val = eval('info.data.StingerBIT'+extstr)
    try:
        # Only in older files?
        val += eval('info.data.CheetahBackgroundTest'+extstr)
    except:
        pass
    try:
        val += eval('info.data.CoreBuildBackground'+extstr)
    except:
        pass
    return val

def parse_load_diags_task_names( filename ):
    """Parse processor load filename info.
    Inputs: filename = T04 file (since 5.20)
    Returns: task_names = name of each task (for quick lookup by 35:258 index)
             task_cores = core # of each task
    Example:
    IDs, names,cores = parse_load_diags_task_names("file.T04")
    IDs = [0,1,...]
    names = ['cyg_start','StingerLM',...]
    cores = [0,0,...]
    """
    import shlex
    import subprocess as sp
    import platform

    # Get task names
    cmd = "viewdat -d35:259 "+filename
    if platform.system() == "Linux":
        cmd = "stdbuf -o0 " + cmd
    pipe = sp.Popen(shlex.split(cmd), stdout=sp.PIPE)
    task_IDs = []
    task_names = []
    task_cores = []
    got_task_names = False
    while True:
        l = pipe.stdout.readline().decode()
        if l == '': break
        if not got_task_names:
            if l.startswith('Task Names:'):
                got_task_names = True
            continue
        w = l.rstrip().split()
        if len(w) < 4:
            break
        curr_ID = int(w[1])
        if int(w[0]) > 0 and curr_ID == 0:
            curr_ID = -1
        task_IDs.append(curr_ID)
        task_names.append(w[3])
        task_cores.append(int(w[2]))
    return task_IDs, task_names, task_cores

def parse_load_diags( filename, opts="" ):
    """Parse processor load diagnostic info.
    Inputs: filename = T04 file (since 5.20)
            opts = any other viewdat opts (e.g. "-e260000" to end early)
    Example:
    d = parse_load_diags("file.T04")
    plot( d.TIME, d.data.StingerLM )   # plots one line
    plot( d.TIME, d.data.HTTPDworker ) # plots several lines
    (d.core.StingerLM, etc. -> core #)
    (d.RESERVE is an alias for CBT+BIT CPU0)
    (d.RESERVECPUn is an alias for CBT+BIT CPUn, if present)
    """
    IDs, task_names, task_cores = parse_load_diags_task_names(filename)

    # Get task load data
    d = cmd2arr("viewdat -d35:258 -mb " + opts + " " + filename,dtype=float)
    info = dotdict({})
    info.TIME = d[1:,0] * 1e-3
    info.data = dotdict({})
    # Ignore the first columns (time)
    dtasks = d[:,1:]
    for i in range(d.shape[1]-1):
        txt = task_names[i]
        tcore = task_cores[i]
        # https://stackoverflow.com/questions/6294179/how-to-find-all-occurrences-of-an-element-in-a-list
        # It would be better to do this once per CPU
        cidx = [_i for _i, _x in enumerate(task_cores) if _x==tcore]
        diff_count_sum = diff( sum(dtasks[:,cidx],axis=1) )
        curr_load = 100. * diff(dtasks[:,i]) / diff_count_sum
        curr_load = curr_load.reshape( len(curr_load), 1 )
        if txt in info.data:
            info.data[txt] = append( info.data[txt], curr_load, axis=1 )
        else:
            info.data[txt] = curr_load

    info.RESERVE = calc_cpu_reserve(info)
    if max(task_cores) >= 1:
        info.RESERVECPU1 = calc_cpu_reserve(info, 'CPU1')
    if max(task_cores) >= 2:
        info.RESERVECPU2 = calc_cpu_reserve(info, 'CPU2')
    if max(task_cores) >= 3:
        info.RESERVECPU3 = calc_cpu_reserve(info, 'CPU3')

    info.core = dotdict({})
    for txt,tcore in zip(task_names, task_cores):
        info.core[txt] = tcore

    return info


def parse_compact3(filename):
    """The UNAVCO "teqc" program can generate raw data if you give it
    the options "+qc +plot".  For example, it may generate some_file.m12
    for raw MP12 data.  To plot MP12 for GPS SV #10 data you could then do:
      d = parse_compact3("some_file.m12")
      plot( d['G10'][:,0], d['G10'][:,1] )
    """
    from datetime import datetime, timedelta
    from collections import defaultdict
    with open(filename,'r') as f:
        line = f.readline()
        if not line.startswith('COMPACT3'):
            raise RuntimeError("Not a COMPACT3 file")
        line = f.readline()
        if not line.startswith('GPS_START_TIME'):
            raise RuntimeError("Unexpected 2nd line")
        line = line[len('GPS_START_TIME '):].rstrip()
        start_time = datetime.strptime( line, '%Y %m %d %H %M %S.%f')
        week_roll_time = start_time.replace(hour=0,minute=0,second=0,microsecond=0) \
                       - timedelta(days=(start_time.isoweekday()%7))
        secs_in_week = 60*60*24*7
        start_offset_s = (start_time - week_roll_time).total_seconds() % secs_in_week
        curr_offset_s = start_offset_s
        data = defaultdict(list)
        curr_svs = []
        for row,line in enumerate(f):
            w = line.rstrip().split()
            if row%2 == 0:
                # epoch: secs_offset list_of_svs(or -1 for no change)
                curr_offset_s = float(w[0]) + start_offset_s
                if w[1] != "-1":
                    curr_svs = w[2:]
            else:
                for col,val in enumerate(w):
                    if val.endswith('S'):
                        val = nan
                    else:
                        val = float(val)
                    data[curr_svs[col]].append( (curr_offset_s, val) )
        for key in data.keys():
            data[key] = array(data[key])
        return data


def getSerialNumber(filename,reportRates=False):
  """Returns the serial number from a T02/T04/DAT file
  """
  serialNumber = None
  rxID = None
  productName = None
  posRate = None
  measRate = None
  gotPos = False
  gotMeas = False
  gotRXID = False

  data = RunCmdByLine('viewdat -d16 ' + filename)
  while(True):
    line = data.readline()
    if( (line is None) ):
      # Note some fields below are not always reported. Hence, wait for a
      # trailing line before exiting.
      break
    elif( (reportRates == False) and (gotRXID == True)):
      break
    elif(     (reportRates == True) 
          and (gotRXID == True)
          and (gotMeas == True)
          and (gotPos == True)):
     break
    elif("SessionMeasIntervalMsecs" in line):
      tokens = line.strip().split(' ')
      print(tokens)
      measRate = int(tokens[3])
      gotMeas = True
    elif("SessionPosIntervalMsecs" in line):
      tokens = line.strip().split(' ')
      print(tokens)
      posRate = int(tokens[3])
      gotPos = True
    elif("ReceiverId" in line):
      tokens = line.rstrip().split('=>')[1].split(',')
      print(tokens)
      serialNumber = tokens[2]
      rxID = int(tokens[0])
      productName = tokens[1]
      gotRXID = True

  if(reportRates == True):
    print(serialNumber,rxID,productName,measRate,posRate)
    return(serialNumber,rxID,productName,measRate,posRate)
  else:
    return(serialNumber,rxID,productName)



def getFirmwareVersion(filename,numericDate=False,reportProduct=False):
  """Returns the firmware version number and date from a T02/T04/DAT file
  commas are removed from the date in case this is saved in a CSV file.
  Only part of the data file is processed (up until we get a record 12), 
  so this is pretty quick to execute.
  """

  date = ''
  version = None
  subversion = None
  rxType = None

  data = RunCmdByLine('viewdat -d12 ' + filename)
  while(True):
    line = data.readline()
    if( (line is None) or ("GPS week" in line) ):
      # Note some fields below are not always reported. Hence, wait for a
      # trailing line before exiting.
      break
    elif("Firmware date" in line):
      # Note - files from Terrasat (generated from an RT27 -> T0X) probably
      # by a Pivot derivative, do not have the date. Looking at viewdat,
      # this is because the year in the record 12 is 0.
      tokens = line.rstrip().split(':')
      # Remove commas from the date
      date = tokens[1].strip().replace(',','')
    elif("Navigation processor" in line):
      tokens = line.rstrip().split(':')
      subversion = tokens[1].strip()
    elif("Release target" in line):
      tokens = line.rstrip().split(':')
      version = tokens[1].strip()
    elif("Receiver type" in line):
      tokens = line.rstrip().split(':')
      rxType = tokens[1].strip()

  # some released versions (all?) don't report a Release taget, instead
  # the navigation processor is the version. In betas the navigation 
  # processor field reports the beta number, the release target the full
  # version
  if(numericDate==True):
    monthLUT={'Jan':1,  'Feb':2,  'Mar':3,
              'Apr':4,  'May':5,  'Jun':6,
              'Jul':7,  'Aug':8,  'Sep':9,
              'Oct':10, 'Nov':11, 'Dec':12}
    tokens = date.split()
    FWYear = int(tokens[-1])
    FWMonth = monthLUT[tokens[-3]]
    FWDay = int(tokens[-2])
    verStr = "%s %4d-%02d-%02d" % (subversion,FWYear,FWMonth,FWDay)
  elif(version is None):
    verStr = "%s %s" % (subversion,date)
  else:
    verStr = "%s-%s %s" % (version,subversion,date)

  if(reportProduct):
    verStr += ' ' + rxType
  return(verStr)

def getRefPosition(filename,reportTime=False):
  """Returns the 
  """

  data = RunCmdByLine('viewdat -d35:2 ' + filename)

  Lat = None
  Lon = None
  Hgt = None

  while(True):
    line = data.readline()
    tokens = line.rstrip().split()
    
    if(line is None):
      pass
    #  break
    elif(     (reportTime == True)
          and (len(tokens) == 9)
          and (tokens[0] == 'Week')
          and (tokens[3] == 'Time') ):
      Week = int(tokens[2])
      Time = float(tokens[5])
    elif(     (len(tokens) == 12)
          and (tokens[0] == 'Lat')
          and (tokens[4] == 'Lon')
          and (tokens[8] == 'Hgt')):
      Lat = float(tokens[2])
      Lon = float(tokens[6])
      Hgt = float(tokens[10])
      break


  # Most Terrasat have a 0 record 22, disable for now
  if(False):
    # There are various records that store the reference position. If this
    # fails, extend this function to extract alternate records
    data = RunCmdByLine('viewdat -d22 ' + filename)

    Lat = None
    Lon = None
    Hgt = None

    while(True):
      line = data.readline()
      if(line is None):
        break
      elif("Reference Latitude" in line):
        tokens = line.rstrip().split()
        Lat = float(tokens[3])
      elif("Reference Longitude" in line):
        tokens = line.rstrip().split()
        Lon = float(tokens[3])
      elif("Reference Height" in line):
        tokens = line.rstrip().split()
        Hgt = float(tokens[3])
        break

  if(reportTime == True):
    return(Week,Time,Lat,Lon,Hgt)
  else:
    return(Lat,Lon,Hgt)


def getTime(filename):
  """Returns the time from the first 35:2 position record
  """

  data = RunCmdByLine('viewdat -d35:2 ' + filename)
  Week = []
  Time = []

  while(True):
    line = data.readline()
    tokens = line.rstrip().split()
    if(line is None):
      pass
    elif(     (len(tokens) == 9)
          and (tokens[0] == 'Week')
          and (tokens[3] == 'Time') ):
      Week = int(tokens[2])
      Time = float(tokens[5])
      break


  return(Week,Time)


def getMasks(filename):
  foundMaskToken = False
  ElevMask = None
  PDOPMask = None

  print("In getMasks",filename)
  data = RunCmdByLine('viewdat -d30 ' + filename)
  while(True):
    line = data.readline()
    if(line is None):
      # Note some fields below are not always reported. Hence, wait for a
      # trailing line before exiting.
      break
    elif("MASKS" in line):
      print("Found Masks")
      foundMaskToken = True
    elif( (foundMaskToken == True) and ("Elevation:" in line) ):
      tokens = line.rstrip().split(':')
      print("Found Elev",tokens)
      # Remove commas from the date
      ElevMask = int(tokens[1])
    elif( (foundMaskToken == True) and ("PDOP" in line) ):
      tokens = line.rstrip().split(':')
      print("Found PDOP",tokens)
      # Remove commas from the date
      PDOPMask = int(tokens[1])
      break

  return(ElevMask, PDOPMask)

def plot_fft_data( d, antenna_num=0, sample_point=0, spectrogram=False,
                   plot_max=False, autoscale=False, title='' ):
    """Plot FFT rec 35:25 (or rec 35:24) data interactively.
    antenna_num = 0 or 1
    sample_point = 0:default, 1=pre-miti, 2=post-miti
    spectrogram = show spectrogram?
    plot_max = plot max FFT data by default?
    autoscale = autoscale plots?
    title = plot title
    Usage:
     d=vd2cls("file.T04","-d35:25")
     plot_fft_data( d )
    Note: "." and "," keys can be used to step through time.
    """
    from matplotlib.widgets import Slider, Button, RadioButtons, CheckButtons

    d=d[d.ANTNUM==antenna_num]
    if 'SAMP_PT' in d.k:
        d=d[d.SAMP_PT==sample_point]
    has_max = 'MAX_FREQDATA' in d.k
    center_hz = unique(d.CENTERFREQ)
    center_labels = ['%.1f'%f for f in center_hz*1e-6]
    freq_i = d.k['FREQDATA']
    if has_max:
        freq_i2 = d.k['MAX_FREQDATA']
    secs_in_week=60*60*24*7
    # handle week roll. Try to keep most times in range 0->secs_in_week
    secs_unwrap = around(unwrap(d.SEC/secs_in_week*2*pi)*secs_in_week/(2*pi))
    if median(secs_unwrap) >= secs_in_week:
        secs_unwrap -= secs_in_week

    curr_freq_hz = d.CENTERFREQ[0]
    curr_sec = secs_unwrap[0]

    fig,ax=subplots()
    subplots_adjust(left=0.25, bottom=0.25)
    ax.set_title(title)
    ax.set_xlabel('Freq[MHz]')
    ax.set_ylabel('FFT[dB]')
    ax.grid(True)

    plt_line, = plot(0,0)
    plt_line2, = plot(0,0)

    rax = axes([0.025, 0.25, 0.15, 0.5])
    i_active = argmin( abs(center_hz - curr_freq_hz) )
    fft_radio = RadioButtons(rax, center_labels, active=i_active)
    if has_max:
        rax2 = axes([0.0, 0.85, 0.15, 0.15])
        fft_check = CheckButtons(rax2, labels=['max'], actives=[plot_max])
    else:
        fft_check = None

    axtime = axes([0.25, 0.1, 0.65, 0.03])

    if spectrogram:
        fig1,ax1 = subplots()
        ax1.set_title(title)
        ax1.set_xlabel('GPS TOW [sec]')
        ax1.set_ylabel('Frequency [MHz]')

    # It'd be nice to try and detect valstep without hardcoded values...
    if max(diff(d.SEC)) < 30.0:
        valstep = 1.0
    else:
        valstep = 60.0
    fft_slider = Slider(axtime,
                        'Time',
                        min(secs_unwrap),
                        max(secs_unwrap),
                        valinit=min(secs_unwrap),
                        valstep=valstep )

    def update_freq(_):
        curr_freq_hz = center_hz[ center_labels.index(fft_radio.value_selected) ]
        d0 = d[d.CENTERFREQ == curr_freq_hz]
        i = argmin( abs(d0.SEC - fft_slider.val) )
        freq_mhz = (r_[0:2048]-1024)*d0.BINSIZE[i]*1e-3 + curr_freq_hz*1e-6
        plt_line.set_xdata(freq_mhz)
        plt_line.set_ydata(d0[i,freq_i:freq_i+2048])
        if has_max and fft_check.get_status()[0]:
            plt_line2.set_xdata(freq_mhz)
            plt_line2.set_ydata(d0[i,freq_i2:freq_i2+2048])
        else:
            plt_line2.set_ydata(np.full(2048,nan))
        ax.relim()
        vmin=10
        vmax=80
        if autoscale:
            vmin = np.min(d0[:,d0.k.FREQDATA:d0.k.FREQDATA_LAST+1])
            vmax = np.max(d0[:,d0.k.FREQDATA:d0.k.FREQDATA_LAST+1])
        ax.set_xlim(freq_mhz[0],freq_mhz[-1])
        ax.set_ylim(min([nanmin(d0[:,freq_i:]),vmin]),
                    max([nanmax(d0[:,freq_i:]),vmax]))
        fig.canvas.draw_idle()
        if spectrogram:
            X,Y = meshgrid(freq_mhz,d0.SEC)
            pcm = ax1.pcolormesh(Y,X,
                                 d0[:,d0.k.FREQDATA:d0.k.FREQDATA_LAST+1],
                                 cmap='rainbow',vmin=vmin,vmax=vmax)
            # Based on
            # https://stackoverflow.com/questions/19816820/how-to-retrieve-colorbar-instance-from-figure-in-matplotlib
            found_cbar = False
            for col in ax1.collections:
                cb = col.colorbar
                if cb is not None:
                    cb.update_normal(pcm)
                    found_cbar = True
                    break
            if not found_cbar:
                cbar = fig1.colorbar(pcm, ax=ax1)
            ax1.set_ylim(curr_freq_hz*1e-6-25,curr_freq_hz*1e-6+25)
            fig1.canvas.draw_idle()

    def update_time(_):
        curr_freq_hz = center_hz[ center_labels.index(fft_radio.value_selected) ]
        d0 = d[d.CENTERFREQ == curr_freq_hz]
        i = argmin( abs(d0.SEC - fft_slider.val) )
        plt_line.set_ydata(d0[i,freq_i:freq_i+2048])
        if has_max and fft_check.get_status()[0]:
            plt_line2.set_ydata(d0[i,freq_i2:freq_i2+2048])
        else:
            plt_line2.set_ydata(np.full(2048,nan))
        ax.relim()
        ax.autoscale_view(True,True,True)
        fig.canvas.draw_idle()

    fft_radio.on_clicked(update_freq)
    if has_max:
        fft_check.on_clicked(update_freq)
    fft_slider.on_changed(update_time)

    def on_key(event):
        if event.key == ',':
            fft_slider.set_val(fft_slider.val - valstep)  # Decrease slider value
        elif event.key == '.':
            fft_slider.set_val(fft_slider.val + valstep)  # Increase slider value

    # Connect the key event to the handler
    fig.canvas.mpl_connect('key_release_event', on_key)

    update_freq(fft_radio.value_selected)
    return (fft_radio, fft_slider, fft_check)

def plot_history_fft_data( d, antenna_num=0, do_max=False ):
    """Plot history of FFT rec 35:25 data interactively.
    Usage:
     d = vd2cls("file.T04","-d35:25")
     plot_history_fft_data( d )
    """
    from matplotlib.widgets import RadioButtons

    d=d[d.ANTNUM==antenna_num]
    center_hz = unique(d.CENTERFREQ)
    center_labels = ['%.1f'%f for f in center_hz*1e-6]
    if do_max:
        freq_i = d.k['MAX_FREQDATA']
    else:
        freq_i = d.k['FREQDATA']

    curr_freq_hz = d.CENTERFREQ[0]

    fig,ax=subplots()
    subplots_adjust(left=0.35)
    ax.set_xlabel('Freq[MHz]')
    ax.set_ylabel('Time[secs]')
    ax.grid(True)

    rax = axes([0.025, 0.25, 0.15, 0.5])
    i_active = argmin( abs(center_hz - curr_freq_hz) )
    fft_radio = RadioButtons(rax, center_labels, active=i_active)

    def update_freq(label):
        curr_freq_hz = center_hz[ center_labels.index(label) ]
        d0 = d[d.CENTERFREQ==curr_freq_hz,:]
        freq_mhz = (r_[0:2048]-1024)*d0.BINSIZE[0]*1e-3 + curr_freq_hz*1e-6
        X,Y=meshgrid(freq_mhz,d0.SEC)
        if 'cb' in dir(ax):
            ax.cb.remove()
        plt_im = ax.pcolormesh(X,Y,d0[:,freq_i:freq_i+2048])
        ax.cb = colorbar(plt_im, ax=ax, orientation='horizontal')
        ax.relim()
        ax.autoscale_view(True,True,True)
        fig.canvas.draw_idle()

    fft_radio.on_clicked(update_freq)

    update_freq(fft_radio.value_selected)
    show()
    return (fft_radio)

def save_compressed_png(filename, dpi=None, make_dir=True):
    """Save current plot as a PNG file. Matplotlib isn't very good at
    compressing PNG image files. Therefore, use PIL functions to
    compress a memory version of the file. The compressed images are
    close to half the size of the Matplotlib generated version.

    Note: this reduces the image to 256 colors, so in images with
    gradients or lots of colors you may notice the difference.

    Inputs: filename = output PNG name
    Optional inputs: dpi = resolution of image (default None)
                     make_dir = create directory if needed? (default True)
    """

    if make_dir:
        dirname = os.path.abspath(os.path.dirname(filename))
        if not os.path.exists(dirname):
            os.makedirs(dirname)

    # Save the png to memory
    ram = io.BytesIO()
    savefig(ram,format='png',dpi=dpi)
    ram.seek(0)
    # Compress the png data before saving to the file system, the
    # matplotlib png's are not well compressed
    im = Image.open(ram)
    if hasattr(Image,'ADAPTIVE'):
        im2 = im.convert('RGB').convert('P', palette=Image.ADAPTIVE)
    else:
        im2 = im.convert('RGB').convert('P', palette=Image.Palette.ADAPTIVE)
    im2.save(filename,format='PNG')


# convert a date to the day within the current year
def date_to_doy(year,month,day):
  return(datetime.date(year,month,day).timetuple().tm_yday)

# Convert the year and day number within the year to year/month/day
def doy_to_date(year,doy):
  # ToDo - test for a stupid number?
  dateObj = datetime.datetime(year, 1, 1) + datetime.timedelta(doy - 1)
  return(dateObj.year,dateObj.month,dateObj.day)

# viewdat's UTC conversion does not appear
# to account for leap seconds, so this doesn't either
def gps_to_utc(gps_weeks, gps_sow) -> datetime.datetime:
    # Define the GPS epoch
    gps_epoch = datetime.datetime(1980, 1, 6, 0, 0, 0)
    delta = datetime.timedelta(weeks=gps_weeks, seconds=round(gps_sow,2))
    # delta = datetime.timedelta(weeks=gps_weeks, seconds=gps_sow)
    return gps_epoch + delta

# None interactive function that extracts FFT data from a T04 and plots it
def plot_fft_data_png(filename,fig_title,out_fileroot=None,minpower=30,maxpower=45,dpi=300,sample_point=0,antenna_num=0, rec='-d35:25',use_max=False):
  centerFreqs = [1190000000,1240000000,1280000000,1583000000,1590000000,1550000000]
  bandLabels = ['L5','L2','E6','L1','L1','B1']
  summary = []

  if not filename.lower().endswith('.t04'):
    return

  d=vd2cls(filename, rec)

  d=d[d.ANTNUM==antenna_num]
  if 'SAMP_PT' in d.k:
    d=d[d.SAMP_PT==sample_point]

  # Get the start index for the freq data
  if use_max:
    freq_i = d.k['MAX_FREQDATA']
  else:
    freq_i = d.k['FREQDATA']

  for band in range(len(centerFreqs)):
    try:
      # filter the data to extract L1 only
      d0 = d[d.CENTERFREQ == centerFreqs[band]]
       
      if(len(d0) > 0):
      
        # Now form the mean and sigma over this dataset (often a day)
        avFFT  = [0] * 2048 # FFTs are all currently 2048 points
        sumSq  = [0] * 2048
        sigFFT = [0] * 2048

        numEpochs = len(d0)
        for i in range(numEpochs):
          for k in range(2048):
            avFFT[k] += d0[i][freq_i+k]
            sumSq[k] += d0[i][freq_i+k] * d0[i][freq_i+k]

        # Divide by the number of elements to get the average
        for k in range(2048):
          avFFT[k] /= numEpochs # Convert to the mean

          # Now compute the one pass sigma
          var = (sumSq[k] / numEpochs) - (avFFT[k]* avFFT[k])
          sigFFT[k] = np.sqrt(var)

        summary.append({'Freq':centerFreqs[band],
                        'FreqStr':bandLabels[band],
                        'NumFFTs':len(d0),
                        'Average':avFFT,
                        'Sigma':sigFFT})

        if(out_fileroot == None):
          # Use the input name
          png_filename = filename[:-4] + '-' + bandLabels[band] + '.png'
        else:
          png_filename = out_fileroot + '-' + bandLabels[band] + '.png'

        # For an unknown reason the last minute from the previous week rolls into the next week,
        # this causes issues with the axis so dump it.
        i = find(d0.SEC < (86400*7 - 60))
        d0 = d0[i]

        curr_freq_hz = d0.CENTERFREQ[0]

        fig,ax=subplots()

        freq_mhz = (np.r_[0:2048]-1024)*d0.BINSIZE[0]*1e-3 + curr_freq_hz*1e-6
        X,Y=meshgrid(freq_mhz,d0.SEC)
        if 'cb' in dir(ax):
          ax.cb.remove()
        plt_im = ax.pcolormesh(X,Y,d0[:,freq_i:freq_i+2048],vmin=minpower,vmax=maxpower,cmap='rainbow')
        ax.cb = colorbar(plt_im, ax=ax, orientation='vertical')
        ax.cb.set_label('Mean Power [dB]')

        ax.set_xlabel('Freq [MHz]')
        ax.set_ylabel('Time [Sec]')
        title(fig_title + ' - ' + bandLabels[band])
        tight_layout()
        save_compressed_png(png_filename, dpi=dpi)
        close()
    except:
      pass

  return(summary)

# Non-interactive function that extracts MSS FFT data from a T04 and plots it
def plot_mss_fft_data_png(
    filename,
    fig_title,
    out_fileroot=None,
    minpower=50,
    maxpower=100,
    dpi=300,
    mss_hw_idx=0,
    use_avg=False,
):
    summary = []

    if not filename.lower().endswith(".t04"):
        return

    if use_avg:
        rec = "-d35:38"
    else:
        rec = "-d35:42"

    try:
        d = vd2cls(filename, rec)
    except ValueError:
        print(f"No MSS FFT data ({rec}) found in file?")
        return

    d = d[d.HW_IDX == mss_hw_idx]

    freq_i = d.k.FREQDATA

    # Get the center frequencies
    centerFreqs = np.unique(d.BASE_FREQ)
    # TODO: Find the nearest known RTX frequency and
    # bin it that way? Not clear what the long-term behavior is here...

    NFFT = 512  # MSS FFTs are all currently 512 points

    for band in centerFreqs:
        d0 = d[d.BASE_FREQ == band]

        if len(d0) > 0:
            # Now form the mean and sigma over this dataset (often a day)
            avFFT = [0] * NFFT
            sumSq = [0] * NFFT
            sigFFT = [0] * NFFT

            numEpochs = len(d0)
            for i in range(numEpochs):
                for k in range(NFFT):
                    avFFT[k] += d0[i][freq_i + k]
                    sumSq[k] += d0[i][freq_i + k] * d0[i][freq_i + k]

            # Divide by the number of elements to get the average
            for k in range(NFFT):
                avFFT[k] /= numEpochs  # Convert to the mean

            # Now compute the one pass sigma
            var = (sumSq[k] / numEpochs) - (avFFT[k] * avFFT[k])
            sigFFT[k] = np.sqrt(var)

            summary.append(
                {
                    "Freq": band,
                    # "FreqStr": bandLabels[band],
                    "NumFFTs": len(d0),
                    "Average": avFFT,
                    "Sigma": sigFFT,
                }
            )

            if out_fileroot is None:
                # Use the input name
                png_filename = filename[:-4] + "-" + str(band) + ".png"
            else:
                png_filename = out_fileroot + "-" + str(band) + ".png"

            curr_freq_hz = band

            if use_avg:
                time = d0.SECS
            else:
                time = d0.MSECS / 1000.0

            fig, ax = subplots()

            freq_mhz = (np.r_[0:NFFT] - NFFT / 2) * d0.BIN_SIZE[
                0
            ] * 1e-3 + curr_freq_hz * 1e-6
            X, Y = meshgrid(freq_mhz, time)
            if "cb" in dir(ax):
                ax.cb.remove()
            plt_im = ax.pcolormesh(
                X,
                Y,
                d0[:, freq_i : freq_i + NFFT],
                vmin=minpower,
                vmax=maxpower,
                cmap="rainbow",
            )
            ax.cb = colorbar(plt_im, ax=ax, orientation="vertical")
            ax.cb.set_label("Mean Power [dB]")

            ax.set_xlabel("Freq [MHz]")
            ax.set_ylabel("Time [Sec]")
            title(fig_title + " - " + str(band))
            tight_layout()
            save_compressed_png(png_filename, dpi=dpi)
            close()

    return summary

def icdf(cx,cy,max_x):
  index = find(cx < max_x)
  sel_cy = np.concatenate( (cy[index],[cy[index[-1]]]) )
  sel_cx = np.concatenate( (cx[index],[max_x]) )
  return np.trapz(sel_cy,x=sel_cx)/max_x

def moving_std(arr, window):
    """
    Compute moving standard deviation over a 1D NumPy array.

    Parameters:
        arr (np.ndarray): Input 1D array.
        window (int): Size of the moving window (must be >= 1).

    Returns:
        np.ndarray: Array of moving standard deviations.
    """
    # Input validation
    if not isinstance(arr, np.ndarray):
        raise TypeError("Input must be a NumPy array.")
    if arr.ndim != 1:
        raise ValueError("Input array must be 1-dimensional.")
    if not isinstance(window, int) or window < 1:
        raise ValueError("Window size must be a positive integer.")
    if window > len(arr):
        raise ValueError("Window size cannot be larger than array length.")

    # Create rolling windows using stride tricks (no data copy)
    shape = (arr.size - window + 1, window)
    strides = (arr.strides[0], arr.strides[0])
    windows = np.lib.stride_tricks.as_strided(arr, shape=shape, strides=strides)

    # Compute std along the window axis
    return windows.std(axis=1, ddof=0)  # ddof=0 for population std, ddof=1 for sample std

