#!/usr/bin/env python
usage="""\
1. Create table in ./OutputResults/all_data.db
2. Takes json output of ProcessResults.py insert into ./OutputResults/all_data.db
3. Load ./OutputResults/RX?-?.json into ./OutputResults/all_data.db
    eg. python3 ProcessResultsStorage.py -d RX3-178_179
"""

######################################################################
# Copyright Trimble 2022
######################################################################

import sqlite3
from tkinter import S
import dateutil.parser as du
import json
from collections import defaultdict
from datetime import datetime
import os
import numpy as np
import numpy
from collections import OrderedDict

curr_path = os.path.dirname(os.path.realpath(__file__))

# Temporary change!
db_filename = curr_path + '/OutputResults/all_data.db'

def get_db(write=False):
    if write:
        db = sqlite3.connect(db_filename)
    else:
        db = sqlite3.connect('file:%s?mode=ro'%db_filename,uri=True)
    db.row_factory = sqlite3.Row
    if write:
        create_tables(db.cursor())
    return db


def insert_into(c, table_name, entries):
    keys = ','.join([str(x) for x in list(entries.keys())])
    values = ','.join([f"'{x}'" for x in list(entries.values())])
    c.execute(f"""
        INSERT INTO {table_name} ({keys})
        VALUES ({values})
    """)

def replace_into(c, table_name, values):
    c.execute("REPLACE INTO %s VALUES (%s)"%
              (table_name,
               ','.join('?'*len(values))),
              values)

def delete_from(c, table_name, values, print_msg=False):
    cmd = "DELETE FROM %s WHERE %s"%(table_name,' AND '.join(values))
    if print_msg:
        print(cmd)
    c.execute(cmd)

def select_from(c, table_name, values, print_msg=False):
    if len(values) == 0:
        cmd = "SELECT * FROM %s "%(table_name,)    
    else:
        cmd = "SELECT * FROM %s WHERE %s"%(table_name,' AND '.join(values))
    if print_msg:
        print(cmd)
    c.execute(cmd)
    rows = []
    for row in c.fetchall():
        rows.append(dict(row))
    return rows


def get_runs(c, table_name, num_results=250):
    c.execute(f"""
        SELECT * FROM {table_name}
        WHERE 
            sat_type = 'GLN' AND
            band = 'L1' AND
            track = 'CA' AND
            version=1.1
        ORDER BY run_num DESC, rx_num DESC
        LIMIT {num_results};
    """)
    rows = []
    for row in c.fetchall():
        rows.append(dict(row))
    return rows


def create_tables(c):
    # Table of droplist menu options for Sat. Type, Band, Track
    c.execute("""CREATE TABLE IF NOT EXISTS ALL_COMBOS (
                 scenario_ID text,
                 sat_type text,
                 band text,
                 track text
              ); """)

    # Single difference results. Note we have multiple results for each run_num+rx_num.
    c.execute("""CREATE TABLE IF NOT EXISTS SD_ALL_RESULTS (
                        run_date timestamp,
                        run_num integer,
                        rx_num integer,
                        rx_desc text,
                        rx_type text,
                        rx_filter_type text,
                        custom_fw integer,
                        notes_ID integer,
                        scenario_ID text,
                        default_para integer,
                        freq_type text,
                        version real,
                        sat_type text,
                        band text,
                        track text,
                        sd_cno_mean real,
                        sd_cno_std real,
                        sd_code_mav real,
                        sd_carrier_mav real,
                        sd_cycle_slips integer DEFAULT 0,
                        sd_cycle_slip_rate real DEFAULT 0,
                        fixtype_percentile_95 real DEFAULT 0,
                        fixtype_percentile_99 real DEFAULT 0,
                        fixtype_percentile_999 real DEFAULT 0,
                        err_3d_percentile_95 real DEFAULT 0,
                        err_3d_percentile_99 real DEFAULT 0,
                        err_3d_percentile_999 real DEFAULT 0,
                        sigma_3d_percentile_95 real DEFAULT 0,
                        sigma_3d_percentile_99 real DEFAULT 0,
                        sigma_3d_percentile_999 real DEFAULT 0
                    ); """)
    try:
        c.execute("ALTER TABLE SD_ALL_RESULTS ADD valid integer NOT NULL DEFAULT 1")
        print(' add column valid to SD_ALL_RESULTS with default value 1')
    except Exception:
        pass
        
    # Unique notes_id
    c.execute("""CREATE TABLE IF NOT EXISTS NOTES_DESC (
                 notes_ID integer UNIQUE,
                 notes text
              ); """)

def init_table():
    with get_db(True) as db:
        c = db.cursor()
        create_tables(c)

def get_combos():
    with get_db(True) as db:
        c = db.cursor()
        c.execute("""
            SELECT DISTINCT sat_type, band, track 
            FROM SD_ALL_RESULTS 
            ORDER BY sat_type, band, track;
        """)
        combos = {}
        for row in c:
            sat, band, track = dict(row).values()
            if sat not in combos:
                combos[sat] = {}
            if band not in combos[sat]:
                combos[sat][band] = []
            combos[sat][band].append(track)
        return combos
        


def add_data_to_db(all_new_data, overwrite_existing_data=True):
    '''
    Update table SD_ALL_RESULTS with run information and S.D. results.
    Update table NOTES_DESC with new notes.
    Update table ALL_COMBOS with new combo of sat_type, band, and track.
    Input:
        all_new_data = list of dictionary such as from info.output in ProcessResults.py
        overwrite_existing_data = True/False. If True, this function overwrites rows matching rx_num + run_num
    '''

    def insert_into_with_mask(c, table_name, entries, mask, overwrite=False):
        rows_from_db = select_from(c, table_name, mask, print_msg=False)
        if len(rows_from_db) == 0:
            insert_into(c, table_name, entries)
        elif len(rows_from_db) == 1:
            if overwrite:
                delete_from(c, table_name, mask)
                insert_into(c, table_name, entries)
        else:
            if overwrite:
                raise RuntimeError('Find mulitple rows matching mask. Please refine mask. Error.')
                
    with get_db(True) as db:
        c = db.cursor()

        for d in all_new_data:
            
            if overwrite_existing_data:
                mask = []
                mask.append( 'run_num='+str(d['file_base'].split('-')[1]) )
                mask.append( 'rx_num='+str(d['file_base'].split('-')[0].replace('RX','')) )
                delete_from(c, 'SD_ALL_RESULTS', mask, print_msg=True)

            def find_combos(d):
                
                sy, fr, tr = [],[],[]
                for k_sy,_ in d.items():
                    for k_fr in d[k_sy]:
                        for k_tr in d[k_sy][k_fr]:
                            sy.append(k_sy)
                            fr.append(k_fr)
                            tr.append(k_tr)
                return sy, fr, tr

            # sat_type_list = ['GPS','GPS','GLN','GLN','Gal']
            # band_list = ['L1','L2','L1','L2','L1']
            # track_list = ['CA','CMCL','CA','CA','BOC_1_1_DP_MBOC']
            sat_type_list,band_list,track_list = find_combos(d['stats']['sd'])

            # If no S.D. results, a row of invalid data is saved to db, so that "Run List" tab could show this run.
            # The cause could be no measuremnt or jammer was never on.
            # If there are S.D. results, this row will be overwriten later.
            if len(sat_type_list) == 0: 
                print("Empty sat_type_list!!! Save an invalid row in db.")
                # 1. Insert notes into table NOTES_DESC
                notes_ID = run_num = int(d['file_base'].split('-')[1])
                d_row = {'notes_ID': notes_ID, 'notes': d['notes']}
                mask = []
                mask.append( 'notes_ID='+str(d_row['notes_ID']) )
                insert_into_with_mask(c, 'NOTES_DESC', d_row, mask, overwrite=True)

                # 2. Insert empty result into table SD_ALL_RESULTS
                d_row = {
                    'run_date': datetime.strptime(d['date'],'%Y-%m-%d %H:%M:%S'),
                    'run_num': int(d['file_base'].split('-')[1]),
                    'rx_num': int(d['file_base'].split('-')[0].replace('RX','')),
                    'rx_desc': d['unit_desc'],
                    'rx_type': d['unit_type'],
                    'rx_filter_type': d['filter_type'],
                    'custom_fw': int(d['custom_fw']),
                    # 'notes': d['notes'],
                    'notes_ID': int(d['file_base'].split('-')[1]),
                    'scenario_ID': str(d['test_num']),
                    'default_para': int(d['test_para_default']),
                    'freq_type': d['freq_type'],
                    'version': float(d['stats']['version']),
                    'sat_type': 'GPS',
                    'band': 'L1',
                    'track': 'CA',
                    'sd_cno_mean': 0.0,
                    'sd_cno_std': 0.0,
                    'sd_code_mav': 0.0,
                    'sd_carrier_mav': 0.0,
                    'valid': 0,
                    'sd_cycle_slips': 0,
                    'sd_cycle_slip_rate': 0.0,
                    'fixtype_percentile_95': 0.0,
                    'fixtype_percentile_99': 0.0,
                    'fixtype_percentile_999': 0.0,
                    'err_3d_percentile_95': 0.0,
                    'err_3d_percentile_99': 0.0,
                    'err_3d_percentile_999': 0.0,
                    'sigma_3d_percentile_95': 0.0,
                    'sigma_3d_percentile_99': 0.0,
                    'sigma_3d_percentile_999': 0.0
                }

                mask = []
                mask.append( 'run_num='+str(d_row['run_num']) )
                mask.append( 'rx_num='+str(d_row['rx_num']) )
                mask.append( 'sat_type='+'\''+str(d_row['sat_type'])+'\'')
                mask.append( 'band='+'\''+str(d_row['band'])+'\'')
                mask.append( 'track='+'\''+str(d_row['track'])+'\'')
                insert_into_with_mask(c, 'SD_ALL_RESULTS', d_row, mask, overwrite=True)

                # 3. No need to update table ALL_COMBOS

                return

            for i, sat_type, band, track in zip(range(len(sat_type_list)),sat_type_list,band_list,track_list):
                if sat_type not in d['stats']['sd']:
                    continue
                if band not in d['stats']['sd'][sat_type]:
                    continue
                if track not in d['stats']['sd'][sat_type][band]:
                    continue

                first_pair = list(d['stats']['sd'][sat_type][band][track].items())[0]
                if len(first_pair) == 0:
                    # Skip if no data
                    continue

                if d['freq_type'] == 'Constant':
                    # Single step freqency test
                    first_pair = list(d['stats']['sd'][sat_type][band][track].items())[0]
                    time_interval = first_pair[0]
                    sd_cno_mean = float(d['stats']['sd'][sat_type][band][track][time_interval]['cno']['mean'])
                    sd_cno_std = float(d['stats']['sd'][sat_type][band][track][time_interval]['cno']['std'])
                    sd_code_mav = float(d['stats']['sd'][sat_type][band][track][time_interval]['code_over_sv']['mad'])
                    sd_carrier_mav = float(d['stats']['sd'][sat_type][band][track][time_interval]['carrier_over_sv']['mad'])
                    sd_cycle_slips = d['stats']['sd'][sat_type][band][track][time_interval]["total_cycle_slips"]
                    sd_cycle_slip_rate = d['stats']['sd'][sat_type][band][track][time_interval]["avg_slip_rate"]
                    fixtype_percentile_95, fixtype_percentile_99, fixtype_percentile_999 = d["fixtype_percentiles"]
                    err_3d_percentile_95, err_3d_percentile_99, err_3d_percentile_999 = d["err_3d_percentiles"]
                    sigma_3d_percentile_95, sigma_3d_percentile_99, sigma_3d_percentile_999 = d["sigma_3d_percentiles"]

                    if int(d['stats']['sd'][sat_type][band][track][time_interval]['n']) == 0:
                        if d['unit_type'] != 'novatel' or (d['unit_type'] == 'novatel' and i>0):
                            # Skip if len data = 0
                            continue
                else:
                    # Stepped or customized frequency test
                    # if d['test_num'] in ['2b','3b','3c','5b']:
                    list_sd_cno_mean = []
                    list_sd_code_mad = []
                    list_sd_carrier_mad = []
                    num_meas = 0
                    for k,val in d['stats']['sd'][sat_type][band][track].items():
                        time_interval = k
                        list_sd_cno_mean.append(val['cno']['mean'])
                        list_sd_code_mad.append(val['code']['mad'])
                        list_sd_carrier_mad.append(val['carrier_over_sv']['mad'])
                        num_meas += int(d['stats']['sd'][sat_type][band][track][time_interval]['n'])
                    sd_cno_mean = numpy.mean(list_sd_cno_mean)
                    sd_cno_std = numpy.std(list_sd_cno_mean)
                    sd_code_mav = numpy.mean(list_sd_code_mad)
                    sd_carrier_mav = numpy.mean(list_sd_carrier_mad)
                    sd_cycle_slips = d['stats']['sd'][sat_type][band][track][time_interval]["total_cycle_slips"]
                    sd_cycle_slip_rate = d['stats']['sd'][sat_type][band][track][time_interval]["avg_slip_rate"]
                    fixtype_percentile_95, fixtype_percentile_99, fixtype_percentile_999 = d["fixtype_percentiles"]
                    err_3d_percentile_95, err_3d_percentile_99, err_3d_percentile_999 = d["err_3d_percentiles"]
                    sigma_3d_percentile_95, sigma_3d_percentile_99, sigma_3d_percentile_999 = d["sigma_3d_percentiles"]
                    if num_meas == 0:
                        if d['unit_type'] != 'novatel' or (d['unit_type'] == 'novatel' and i>0):
                            # Skip if len data = 0
                            continue

                # After loading data from json
                # 1. Insert notes to table NOTES_DESC
                notes_ID = run_num = int(d['file_base'].split('-')[1])
                d_row = {'notes_ID': notes_ID,
                        'notes': d['notes']}

                mask = []
                mask.append( 'notes_ID='+str(d_row['notes_ID']) )
                insert_into_with_mask(c, 'NOTES_DESC', d_row, mask, overwrite=True)

                # 2. Insert all results to table SD_ALL_RESULTS
                d_row = {
                    'run_date': datetime.strptime(d['date'],'%Y-%m-%d %H:%M:%S'),
                    'run_num': int(d['file_base'].split('-')[1]),
                    'rx_num': int(d['file_base'].split('-')[0].replace('RX','')),
                    'rx_desc': d['unit_desc'],
                    'rx_type': d['unit_type'],
                    'rx_filter_type': d['filter_type'],
                    'custom_fw': int(d['custom_fw']),
                    # 'notes': d['notes'],
                    'notes_ID': int(d['file_base'].split('-')[1]),
                    'scenario_ID': str(d['test_num']),
                    'default_para': int(d['test_para_default']),
                    'freq_type': d['freq_type'],
                    'version': float(d['stats']['version']),
                    'sat_type': sat_type,
                    'band': band,
                    'track': track,
                    'sd_cno_mean': sd_cno_mean,
                    'sd_cno_std': sd_cno_std,
                    'sd_code_mav': sd_code_mav,
                    'sd_carrier_mav': sd_carrier_mav,
                    'valid': 1,
                    'sd_cycle_slips': sd_cycle_slips,
                    'sd_cycle_slip_rate': sd_cycle_slip_rate,
                    'fixtype_percentile_95': fixtype_percentile_95,
                    'fixtype_percentile_99': fixtype_percentile_99,
                    'fixtype_percentile_999': fixtype_percentile_999,
                    'err_3d_percentile_95': err_3d_percentile_95,
                    'err_3d_percentile_99': err_3d_percentile_99,
                    'err_3d_percentile_999': err_3d_percentile_999,
                    'sigma_3d_percentile_95': sigma_3d_percentile_95,
                    'sigma_3d_percentile_99': sigma_3d_percentile_99,
                    'sigma_3d_percentile_999': sigma_3d_percentile_999
                }

                mask = []
                mask.append( 'run_num='+str(d_row['run_num']) )
                mask.append( 'rx_num='+str(d_row['rx_num']) )
                mask.append( 'sat_type='+'\''+str(d_row['sat_type'])+'\'')
                mask.append( 'band='+'\''+str(d_row['band'])+'\'')
                mask.append( 'track='+'\''+str(d_row['track'])+'\'')
                insert_into_with_mask(c, 'SD_ALL_RESULTS', d_row, mask, overwrite=True)

                # 3. Update table ALL_COMBOS
                d_row = {'scenario_ID': str(d['test_num']),
                        'sat_type': sat_type,
                        'band': band,
                        'track': track
                }

                mask = []
                mask.append( 'scenario_ID='+'\''+str(d_row['scenario_ID']+'\'') )
                mask.append( 'sat_type='+'\''+str(d_row['sat_type'])+'\'')
                mask.append( 'band='+'\''+str(d_row['band'])+'\'')
                mask.append( 'track='+'\''+str(d_row['track'])+'\'')
                insert_into_with_mask(c, 'ALL_COMBOS', d_row, mask, overwrite=False)


def read_rows_from_db(d, mask):
    '''
    Select rows from db filtered by mask
    Parse each row and insert data into d[key=(rx_desc)+(rx_filter_type)] for app.py to pass to results.html to plot
    Inputs:
        d = {} 
        mask = list of string = list of conditions to SELECT
    '''
    with get_db() as db:
        db_c = db.cursor()
        
        rows_from_db = select_from(db_c, 'SD_ALL_RESULTS', mask)

        for data in rows_from_db:
            rx_filer_type = data['rx_filter_type'].split('.')[1] if '.' in data['rx_filter_type'] else data['rx_filter_type'] # Solve conflict on both filter_type = None
            unit_desc = str(data['rx_desc']) + ':' + rx_filer_type
            if unit_desc in d:
                d_unit = d[unit_desc]
            else:
                d[unit_desc] = {}
                d_unit = d[unit_desc]
                d_unit['text'] = []
                d_unit['x'] = []
                d_unit['filter_type'] = []
                d_unit['marker'] = [] 

            d_unit['x'].append( data['run_date'] )
            file_base = 'RX'+str(data['rx_num'])+'-'+str(data['run_num'])
            d_unit['text'].append( file_base )
            d_unit['filter_type'].append( rx_filer_type )

            ## marker is a function of fw
            if data['custom_fw']:
                d_unit['marker'].append( 'cross' )
            else:
                d_unit['marker'].append( 'circle' )

            d_unit.setdefault('data', OrderedDict())
            d_unit['data'].setdefault('cno',OrderedDict())
            d_unit['data']['cno'].setdefault('mean',[])
            d_unit['data']['cno'].setdefault('std',[])
            d_unit['data'].setdefault('code',[])
            d_unit['data'].setdefault('carrier',[])
            d_unit['data']['cno']['mean'].append(float(data['sd_cno_mean']))
            d_unit['data']['cno']['std'].append(float(data['sd_cno_std']))
            d_unit['data']['code'].append(float(data['sd_code_mav']))
            d_unit['data']['carrier'].append(float(data['sd_carrier_mav']))


def read_rows_from_db_compare(d, mask):
    with get_db() as db:
        db_c = db.cursor()

        rows_from_db = select_from(db_c, 'SD_ALL_RESULTS', mask)

        for data in rows_from_db:
            rx_filter_type = data['rx_filter_type'].split('.')[1] if '.' in data['rx_filter_type'] else data['rx_filter_type']
            rx_num = f"RX{data['rx_num']}"
            if rx_num not in d:
                d[rx_num] = {}
                d[rx_num]['scenario'] = []
                d[rx_num]['text'] = []
                d[rx_num]['filter_type'] = []
                d[rx_num]['sat'] = []
                d[rx_num]['band'] = []
                d[rx_num]['track'] = []
                d[rx_num]['data'] = {}
                d[rx_num]['data']['cno'] = {}
                d[rx_num]['data']['cno']['mean'] = []
                d[rx_num]['data']['cno']['std'] = []
                d[rx_num]['data']['code'] = []
                d[rx_num]['data']['carrier'] = []
                d[rx_num]['data']['cycle_slips'] = []
                d[rx_num]['data']['cycle_slip_rate'] = []
                d[rx_num]['data']['fixtype_percentile_95'] = []
                d[rx_num]['data']['fixtype_percentile_99'] = []
                d[rx_num]['data']['fixtype_percentile_999'] = []
                d[rx_num]['data']['err_3d_percentile_95'] = []
                d[rx_num]['data']['err_3d_percentile_99'] = []
                d[rx_num]['data']['err_3d_percentile_999'] = []
                d[rx_num]['data']['sigma_3d_percentile_95'] = []
                d[rx_num]['data']['sigma_3d_percentile_99'] = []
                d[rx_num]['data']['sigma_3d_percentile_999'] = []
                

            d[rx_num]['scenario'].append( data['scenario_ID'] )
            d[rx_num]['text'].append(f"{rx_num}-{data['run_num']}")
            d[rx_num]['filter_type'].append(rx_filter_type)
            d[rx_num]['sat'].append(data['sat_type'])
            d[rx_num]['track'].append(data['track'])
            d[rx_num]['band'].append(data['band'])
            d[rx_num]['data']['cno']['mean'].append(float(data['sd_cno_mean']))
            d[rx_num]['data']['cno']['std'].append(float(data['sd_cno_std']))
            d[rx_num]['data']['code'].append(float(data['sd_code_mav']))
            d[rx_num]['data']['carrier'].append(float(data['sd_carrier_mav']))
            d[rx_num]['data']['cycle_slips'].append(int(data['sd_cycle_slips']))
            d[rx_num]['data']['cycle_slip_rate'].append(float(data['sd_cycle_slip_rate']))
            d[rx_num]['data']['fixtype_percentile_95'].append(float(data['fixtype_percentile_95']))
            d[rx_num]['data']['fixtype_percentile_99'].append(float(data['fixtype_percentile_99']))
            d[rx_num]['data']['fixtype_percentile_999'].append(float(data['fixtype_percentile_999']))
            d[rx_num]['data']['err_3d_percentile_95'].append(float(data['err_3d_percentile_95']))
            d[rx_num]['data']['err_3d_percentile_99'].append(float(data['err_3d_percentile_99']))
            d[rx_num]['data']['err_3d_percentile_999'].append(float(data['err_3d_percentile_999']))
            d[rx_num]['data']['sigma_3d_percentile_95'].append(float(data['sigma_3d_percentile_95']))
            d[rx_num]['data']['sigma_3d_percentile_99'].append(float(data['sigma_3d_percentile_99']))
            d[rx_num]['data']['sigma_3d_percentile_999'].append(float(data['sigma_3d_percentile_999']))



###############
# For testing #
###############

def load_output_json(fname):
    '''
    Load json file as dictionary and return it.
    Input:
        fname = string, json file name
    '''
    d = {}
    if os.path.isfile(fname):
        # Load current menu optiones
        with open(fname, "r") as jsonFile:
            try:
                d = json.load(jsonFile)
            except:
                print("Failed to load file "+fname)
    return d

    
def init_db_with_rx_run(rx, run_list):
    '''
    1. Create table if it does not exist.
    2. Load json files and insert data into db
    Inputs:
        rx = receiver number
        run_list = list, list of run_num
    '''
    # Initialize tables with data from json
    with get_db(True) as db:
        c = db.cursor()
        ds = []
        for run_num in run_list:
            fname = 'OutputResults/RX'+str(rx)+'-'+str(run_num)+'.json'
            print('Load file', fname)
            d = load_output_json(fname)
            ds.append(d)
        
        add_data_to_db(ds)


def get_rx_run_num(s):
    '''
    Parse input string eg. RX3-200_202 and return 3, 200, 202
    '''
    rx = int(s.split('-')[0].replace('RX',''))
    from_to = s.split('-')[1].split('_')
    if len(from_to) == 1:
        rnum_start = int(from_to[0])
        rnum_end = int(from_to[0])+1
    elif len(from_to) == 2:
        rnum_start = int(from_to[0])
        rnum_end = int(from_to[1])
    else:
        raise RuntimeError('Wrong input arg. Error.')
    return rx, rnum_start, rnum_end


if __name__ == "__main__":
    import requests
    import argparse

    parser = argparse.ArgumentParser(formatter_class=argparse.RawTextHelpFormatter,
                                     description=usage)
    parser.add_argument('-l','--load', help='Load db with arg=RX?-? from ./OutputResults/RX?-?.json',)
    parser.add_argument('-p','--print', help='Print rows from db with arg=RX?-?',)
    args = parser.parse_args()


    # 1. Create Tables
    print('Create tables')
    init_table()
    
    if args.load:
        # eg. args.load = 'RX3-120_121'
        print('Input:', args.load)
        rx, rnum_start, rnum_end = get_rx_run_num(args.load)
        init_db_with_rx_run(rx, range(rnum_start,rnum_end))
    else:
        print('Do nothing.')

    if args.print:
        print('Input:', args.print)

        rx, rnum_start, rnum_end = get_rx_run_num(args.print)

        want_scn_num = None
        # want_scn_num = 7
        want_st_type = 'GPS'
        want_band = 'L1'
        want_track = 'CA'
        want_run_para = 'All'
        want_filters = []
        #  want_filters = ['None', 'FIR + FFT']
        # want_filters = ['FIR + FFT']
        
        mask = []
        mask.append( 'rx_num='+str(rx))
        mask.append( '(run_num>='+str(rnum_start)+' AND '+'run_num<'+str(rnum_end)+')')
        mask.append( 'version=1.1')
        if want_scn_num is not None: mask.append( 'scenario_ID='+'\''+str(want_scn_num)+'\'' )
        mask.append( 'sat_type='+'\''+str(want_st_type)+'\'')
        mask.append( 'band='+'\''+str(want_band)+'\'')
        mask.append( 'track='+'\''+str(want_track)+'\'')
        if want_run_para == 'Default':
            mask.append( 'default_para=\'1\'' )
        elif want_run_para == 'Modified':
            mask.append( 'default_para=\'0\'' )
        if len(want_filters) > 0:
            mask_by_filter_type = '(' + ' OR '.join(['rx_filter_type='+'\''+x+'\'' for x in want_filters]) + ')' 
            mask.append( mask_by_filter_type )

        d = {}
        read_rows_from_db(d, mask)
        
        print('')
        print(json.dumps(d, indent=2))

        print('List of results:')
        for k,v in d.items():
            print(k,v['text'])
        
