#!/usr/bin/env python
#
# Generates summary plots and web pages for antenna switch units.
# See 'cuttime' and 'min_ontime' for antenna switch parameters.
#
# Example usage:
#  ./plot_reacq.py "*.T04" 2019-01-01 "My test unit"
# (or see runall_plot_reacq.sh)

import matplotlib
# Allow running headless from the command line
matplotlib.use("agg")

from mutils import *
import shlex
import subprocess as sp
import glob
import os.path
from six import print_

# Antenna switch parameters
cuttime = 10
min_ontime = 120

def pipe_data( file_pat ):
    """Read rec27 data from T0x files.  Example file_pat='*.T04'"""
    for filename in sorted(glob.glob(file_pat)):
        print_("Processing %s"%filename)
        cmd = 'viewdat -d27 -mb --dec=100 --translate_rec35_sub19_to_rec27 %s' % filename
        pipe = sp.Popen(shlex.split(cmd), stdout=sp.PIPE)
        d = []
        while True:
            l = pipe.stdout.readline()
            if l == b'': break
            yield l
        pipe.wait()

def analyze_t0x( file_pat, elev_mask, dbg_name, slip_name, out_name ):
    """Analyze antenna switch T0x data.
       file_pat = list of files to process (e.g., '*.T04')
       elev_mask = lowest elevation angle to process [deg]
       dbg_name = output filename for debug/CDF data (e.g., out-2017-01-01.txt)
       slip_name = output filename for slip after acq data (e.g., out-2017-01-01.slip)
       out_name = output filename for summary report data (e.g., 2017-01-01.rpt)
    """
    k_init_flag = 0
    k_secs = 1
    # minimum re-acq time (to lock point known):
    k_min_time = 2
    # maximum re-acq time (to lock point known):
    k_max_time = 3
    # number of re-acqs (to lock point known):
    k_num = 4
    k_restore_flag = 5
    # slip_cnt when we first get lock-point after an outage:
    k_lp_slip_cnt = 6
    # time when we first get lock-point after an outage:
    k_lp_secs = 7

    Svs = {}
    TotalTrack = {}
    NumTrack = {}
    NumLockPoint = {}
    TotalLockPoint = {}

    f_debug = open(dbg_name,'w')
    f_slip = open(slip_name,'w')
    f_out = open(out_name,'w')
    lastEpochTime = 0
    lastEpochNum  = 0
    firstTrackSecs = -1. # time that first satellite got lock-point after an outage
    for l in pipe_data( file_pat ):
        w = l.strip().split()
        if len(w) < 20:
            continue
        elev = float(w[10])
        if elev < elev_mask:
            continue
        sv_flags = int(w[8])
        if (sv_flags & 0x10) != 0:
            # unhealthy SV
            continue

        secs = float(w[0])
        SV = int(w[5])
        Type = int(w[7])
        band = int(w[11])
        Track = int(w[12])
        CNO = float(w[16])
        slip_cnt = int(w[18])
        Meas1 = int(w[19])

        Svs_key = (Type,SV,band,Track)
        Svs.setdefault(Svs_key, [0, 0, 1e9, 0, 0, 0, 0, -1.]) # see k_min_time
        if secs != lastEpochTime:
            if secs - lastEpochTime > 2:
                for key in Svs.keys():
                    Svs[key][k_restore_flag] = 1
                    Svs[key][k_lp_secs] = -1.
                firstTrackSecs = -1.
            lastEpochTime = secs
            lastEpochNum = 1
        else:
            lastEpochNum += 1


        # Output diagnostics for satellites that slip after they get to
        # lock point known.  Usually this shouldn't happen, but it can
        # happen when there is a bug (or the SV is low SNR)
        if Svs[Svs_key][k_lp_secs]>=0. and slip_cnt != Svs[Svs_key][k_lp_slip_cnt]:
            Svs[Svs_key][k_lp_slip_cnt] = slip_cnt

            # compute the approximate time since the antenna on time
            on_delta = secs - firstTrackSecs + cuttime
            if on_delta < min_ontime - cuttime:
                delta = secs - Svs[Svs_key][k_lp_secs]
                print_("%.1f %d %d %d %d %.1f %.1f %.1f" %
                       (secs, SV, Type, band, Track, delta, CNO, elev),
                       file=f_slip )


        if Svs[Svs_key][k_init_flag] == 0:
            Svs[Svs_key][k_secs] = secs
            Svs[Svs_key][k_init_flag] = 1 # We've initialized
            continue

        delta = secs - Svs[Svs_key][k_secs] - cuttime
        is_dt1 = (secs != (Svs[Svs_key][k_secs] + 1)) # why?
        is_init2 = (Svs[Svs_key][k_init_flag] == 2)
        is_restore1 = (Svs[Svs_key][k_restore_flag] == 1)
        if not( (is_dt1 or is_init2) and is_restore1 ):
            Svs[Svs_key][k_secs] = secs
            continue

        if delta <= 1.:
            continue

        if delta >= min_ontime:
            # We've exceeded our maximum time so reset everything and try
            # again.

            # Reset the flag for next time
            Svs[Svs_key][k_init_flag] = 1

            # Update the time
            Svs[Svs_key][k_secs] = secs
            continue

        Stats_key = (Type,band,Track)
        TotalLockPoint.setdefault(Stats_key, 0)
        NumLockPoint.setdefault(Stats_key, 0)
        TotalTrack.setdefault(Stats_key, 0)
        NumTrack.setdefault(Stats_key, 0)

        if Meas1 & 0x20:  # have lock point
            print_("%.1f %d %d %d %d %.1f %.1f %.1f" %
                   (secs, SV, Type, band, Track, delta, CNO, elev),
                   file=f_debug )
            if firstTrackSecs < 0:
                firstTrackSecs = secs
            Svs[Svs_key][k_lp_slip_cnt] = slip_cnt
            Svs[Svs_key][k_lp_secs] = secs
            # Clear the data file gap detector
            Svs[Svs_key][k_restore_flag] = 0
            if delta > Svs[Svs_key][k_max_time]:
                Svs[Svs_key][k_max_time] = delta
            if delta < Svs[Svs_key][k_min_time]:
                Svs[Svs_key][k_min_time] = delta
            Svs[Svs_key][k_num] += 1
            TotalLockPoint[Stats_key] += delta
            NumLockPoint[Stats_key] += 1
            # We got lock point on the first epoch so mark this
            # as also getting initial lock on the same epoch
            if Svs[Svs_key][k_init_flag] == 1:
                TotalTrack[Stats_key] += delta
                NumTrack[Stats_key] += 1

            # Reset the flag for next time
            Svs[Svs_key][k_init_flag] = 1
            # Update the time
            Svs[Svs_key][k_secs] = secs
        elif Meas1 > 0 and Svs[Svs_key][k_init_flag] == 1:
            TotalTrack[Stats_key] += delta
            NumTrack[Stats_key] += 1

            # Mark that we've already got the initial lock time
            Svs[Svs_key][k_init_flag] = 2

    print_("Using Elev. Mask = %.0f" % elev_mask,
           file=f_out)
    print_("=====================================================",
           file=f_out, end='');

    last_Type, last_SV = -1, -1
    for Svs_key in sorted( Svs.keys() ):
        Type,SV,band,Track = Svs_key
        if Svs[Svs_key][k_num] == 0:
            continue

        if Type != last_Type:
            print_("\nSYS SV BAND TRK   NUM  MIN  MAX",
                   file=f_out)
        elif SV != last_SV:
            print_("", file=f_out)
        last_Type,last_SV = Type,SV

        print_(" %d %3d    %d  %2d %5ld %4d %4d " %
               (Type,
                SV,
                band,
                Track,
                Svs[Svs_key][k_num],
                Svs[Svs_key][k_min_time],
                Svs[Svs_key][k_max_time]),
               file=f_out)

    print_("\n\n",file=f_out)
    print_("=====================================================", file=f_out)
    print_("=====================================================", file=f_out)
    print_("=====================================================", file=f_out)

    for Type,band,Track in sorted(NumTrack.keys()):
        Stats_key = (Type,band,Track)
        if NumTrack[Stats_key] == 0:
            continue

        av = TotalTrack[Stats_key] / NumTrack[Stats_key]
        print_("Sys=%d / Band=%d / Track Type=%2d (Track)       = %5.1f secs %4d relocks"%
               (Type,band,Track,av,NumTrack[Stats_key]),
               file=f_out)
        av = TotalLockPoint[Stats_key] / NumLockPoint[Stats_key];
        print_("Sys=%d / Band=%d / Track Type=%2d (Lock-Point)  = %5.1f secs %4d relocks" %
               (Type,band,Track,av,NumLockPoint[Stats_key]),
               file=f_out)
        print_("",file=f_out)

def gen_plots( dbg_filename, slip_filename, prefix ):
    """Generate CDF plots of reacquisition data.
       dbg_filename = dbg_name from analyze_t0x()
       slip_filename = slip_name from analyze_t0x()
       prefix = prefix for generated output plot files
    """
    d = doload( dbg_filename )
    dslp = doload( slip_filename )
    k = dotdict({})
    k.TIME = 0
    k.SV = 1
    k.SAT_TYPE = 2
    k.FREQ = 3
    k.TRACK = 4
    k.dt = 5
    k.snr = 6
    k.elev = 7
    d=VdCls(d,k,[])
    dslp=VdCls(dslp,k,[])
    out_list = []
    d_elev = 10
    all_elev = r_[5:90:d_elev]
    for sat_type in unique(d.SAT_TYPE):
        d0 = d[ d.SAT_TYPE==sat_type ]
        signals = get_signals( d0, desc=True )
        fig1,ax1 = subplots(1,1,figsize=(8,6))
        fig2,ax2 = subplots(1,1,figsize=(8,6))
        fig3,ax3 = subplots(1,1,figsize=(8,6))
        fig4,ax4 = subplots(1,1,figsize=(8,6))
        all_desc = []
        width = d_elev/len(signals[sat_type])
        spacing = d_elev/len(signals[sat_type])
        for n, (freq, track, txt) in enumerate(signals[sat_type]):
            txt = txt.split()
            d1 = d0[ (d0.FREQ==freq) & (d0.TRACK==track) ]
            cx,cy = docdf( d1.dt )
            mrk = (n*.05,.1+n*.01)
            lbl = '%s: %d (50/95%%=%.1f/%.1fs)'%(txt[1],len(d1),get_cdf_percents(cx,cy,[50])[0],get_cdf_percents(cx,cy,[95])[0])
            ax1.plot( cx, 100*cy, '^-', markevery=mrk, fillstyle='none', label=lbl )

            if len(dslp) == 0:
                islp = []
            else:
                islp = find( (dslp.SAT_TYPE==sat_type) & (dslp.FREQ==freq) & (dslp.TRACK==track) )
            lbl = '%s: %d slip(s)'%(txt[1],len(islp))
            if len(islp) == 0:
                ax2.plot([],label=lbl)
            else:
                y,binEdges = histogram(dslp.dt[islp], range=[0,min_ontime])
                y = y * (100./len(d1))
                binCenters = 0.5*(binEdges[1:]+binEdges[:-1])
                ax2.plot( binCenters, y, '.-', label=lbl )

            v_data = []
            v_elev = []
            for elev in all_elev:
                d11 = d1[(d1.elev>=elev)&(d1.elev<elev+d_elev)]
                if len(d11) < 2: continue
                v_elev.append( elev )
                v_data.append( d11.dt )
            v_elev = array(v_elev)
            if len(v_data) > 0:
                violin = ax3.violinplot( v_data, v_elev+n*spacing, widths=width )
                all_desc.append( (violin['bodies'][0], txt[1] ) )

            v_data = []
            dslp_el = dslp.elev[islp]
            for elev in all_elev:
                if len(dslp_el) == 0:
                    v_data.append( 0 )
                    continue
                dslp_el0 = dslp_el[(dslp_el>=elev)&(dslp_el<elev+d_elev)]
                v_data.append( len(dslp_el0) )
            ax4.bar( all_elev+n*spacing, v_data, width=width, label=lbl )

        ax1.grid(True)
        ax1.set_xlim([0,min_ontime])
        ax1.set_xticks(r_[0:min_ontime+1:10])
        ax1.set_yticks(r_[0:100+1:10])
        ax1.set_xlabel('Time [seconds]')
        ax1.set_ylabel('CDF [%]')
        ax1.set_title('%s Acquisition CDF'%txt[0])
        ax1.legend()
        out_filename = '%s_%s.png'%(prefix,txt[0])
        figure(fig1.number)
        savefig(out_filename)
        close(fig1)

        figure(fig2.number)
        ax2.grid(True)
        ax2.set_title('%s Slip Histogram'%txt[0])
        ax2.set_xlim([0,min_ontime])
        ax2.set_ylim([-1,101])
        ax2.set_xlabel('Time after 1st lock point [seconds]')
        ax2.set_ylabel('# of slips / Total # of re-acqs [%]')
        ax2.legend()
        out2_filename = '%s_%s_slip.png'%(prefix,txt[0])
        savefig(out2_filename)
        close(fig2)

        figure(fig3.number)
        ax3.grid(True)
        ax3.set_xticks(all_elev)
        ax3.set_ylim([1,15])
        ax3.set_title('%s Acquisition by Elevation (%d deg bins)'%(txt[0],d_elev))
        ax3.set_xlabel('Elev [deg]')
        ax3.set_ylabel('Re-acq time [s] (max 15s)')
        ax3.legend( *zip(*all_desc) )
        out3_filename = '%s_%s_violin.png'%(prefix,txt[0])
        savefig(out3_filename)
        close(fig3)

        figure(fig4.number)
        ax4.grid(True)
        ax4.set_xticks(all_elev)
        ax4.set_title('%s Slips by Elevation (%d deg bins)'%(txt[0],d_elev))
        ax4.set_xlabel('Elev [deg]')
        ax4.set_ylabel('# slips')
        ax4.legend()
        out4_filename = '%s_%s_slip_elev.png'%(prefix,txt[0])
        savefig(out4_filename)
        close(fig4)

        out_list.append((out_filename,out2_filename,out3_filename,out4_filename))

    return out_list

def gen_html( prefix, rpt_filename, fig_list ):
    """Generate HTML file with summary report and all plots.
       prefix = output file name (e.g., "2017-01-01" generates "2017-01-01.html")
       rpt_filename = name of summary report file (e.g., 2017-01-01.rpt)
       fig_list = list of PNG files to include on HTML page
    """
    f_out = open('%s.html'%prefix, 'w')
    print_('<html><body><pre>',file=f_out)
    for line in open(rpt_filename,'r'):
        print_(line, file=f_out, end='')
    print_("</pre><br/><hr><br/>", file=f_out)
    for figname_list in fig_list:
        for figname in figname_list:
            print_('<img src="%s"/>' % figname, file=f_out)
        print_('<br/><hr/><br/>', file=f_out)
    print_("</body></html>", file=f_out)

def update_list_all(sys_desc, time_str):
    """Update list_all.txt and generate index.html
       sys_desc = Text description of system for top of index.html
       time_str = year-month-day (e.g., 2017-01-01)
    """
    time_space = time_str.replace('-',' ')

    # Append "year month day" if it is not already in list_all.txt
    found = False
    if os.path.isfile('list_all.txt'):
        with open('list_all.txt','r') as f_list:
            for line in f_list:
                if line.startswith(time_space):
                    found = True
                    break

    if not found:
        with open('list_all.txt','a') as f_list:
            print_(time_space, file=f_list)

    # Regenerate index.html from list_all.txt, but only include
    # files from current year.
    curr_year = time_space.split()[0]
    f_index = open('index.html','w')
    print_("<html><body>", file=f_index)
    print_("<h1>%s Reacquisition</h1>"%sys_desc, file=f_index);
    print_("<ul>", file=f_index)
    with open('list_all.txt','r') as f_list:
        for line in f_list:
            if not line.startswith(curr_year):
                continue
            w = line.split()
            if len(w) < 3:
                continue
            link_base = '-'.join(w)
            print_("<li><a href=\"%s.html\">%s</a></li>"%(link_base,link_base), file=f_index)
    print_("</ul>", file=f_index)
    print_("</body></html>", file=f_index)


if __name__ == "__main__":
    import argparse

    parser = argparse.ArgumentParser(description='Analyze antenna switch reacquisition performance.')
    parser.add_argument('t0x_file_pat', help='T02/T04 filename (or pattern like "*.T04")')
    parser.add_argument('out_day', help='Info on test day (e.g., "2019-02-06")')
    parser.add_argument('sys_desc', help='Description of test/system (e.g., "Test BD940")')
    args = parser.parse_args()
    t0x_file_pat = args.t0x_file_pat
    out_day = args.out_day
    sys_desc = args.sys_desc
    print( t0x_file_pat, out_day, sys_desc )

    elev_mask = 5
    dbg_name = 'out-%s.txt' % out_day
    slip_name = 'out-%s.slips' % out_day
    rpt_name = '%s.rpt' % out_day

    if not (os.path.isfile(dbg_name)
            and os.path.isfile(slip_name)
            and os.path.isfile(rpt_name)):
        analyze_t0x( t0x_file_pat, elev_mask, dbg_name, slip_name, rpt_name )

    out_list = gen_plots( dbg_name, slip_name, out_day )

    gen_html( out_day, rpt_name, out_list )

    update_list_all(sys_desc, out_day)
