#
# A set of utility functions that can read a RINEX navigation file
#
# The functions in this file will download 3rd party RINEX NAV (ephemeris) from 
# the internet. Three sources of ephemeris are used:
#
# 1. DLR (the most complete)
# 2. ESA (seems to only cover Europe)
# 3. UCSD (GPS only)
#
# Initially, only one source of ephemeris was used. However, we now merge the RINEX. This
# helps if a station is down, or if there isn't full global coverage. We were very 
# dependent on the DLR data. At least once, this was published late and we had an issue.
# Therefore, another script generates RINEX for a series of Trimble receivers. This
# gets published in this directory tree:
#
# /net/meson/mnt/data_drive/RINEXNAV/TrimbleRINEX/
#
# This script was updated to read all 3 third party RINEX sources and merge them. It then
# tests for Trimble RINEX and merges that data. 
#
# ToDo:
#   * The physical paramaters are slightly different for each system - need to check the ICDs and update
#   * Need to update the ephemeris calculation to handle BDS GEOs
#   * GLONASS uses a different ephemeris format, need to implement (including the Runge-kutta integration)
#   * Improve the RINEX merge - can we better use xarray and avoid lots of duplicates?
#

# Utilities for reading RINEX 2 & 3
# git clone https://github.com/geospace-code/georinex
# cd georinex 
# python3 -m pip install -e .
#
# Note: georinex needs python >= 3.7
#
# May need this:
# sudo python3 -m pip install netCDF4

import georinex as gr
import numpy as np
import math
from scipy import optimize
import mutils as m
import os
from ftplib import FTP
import datetime
import xarray

def getEph(nav, svn, t):
  eph = m.dotdict({})
  deltaTMin = np.nan
  
  ####################################################
  # Get the closest ephemeris file for this SV
  ####################################################
  # We've merged ephemeris, so we may have duplicates with
  # _X added to the end of the SV name (where X is an 
  # integer).

  # find all the different names (due to duplicates) 
  # for this satelliet
  svList = []

  for thisSV in nav.sv:
    svID = thisSV.item()
    if(svn in svID):
      svList.append(svID)

  # Sort it - DLR should output the regular satellite name
  # (with no _X) unless there are duplicates in the file.
  # To make this more independent of Trimble, favor the DLR
  # data (due to the sort)
  svList = list(set(svList))
  svList.sort() 

  t_ind = []
  # Now find an index with the min time error
  MinTime = 99999999
  for thisSV in svList: 
    deltaT = t - nav['Toe'].sel(sv=thisSV)
    deltaTMin = np.nanmin(np.abs(deltaT))

    if(deltaTMin < MinTime):
      MinTime = deltaTMin
      [t_ind] = np.where(deltaTMin == np.abs(deltaT))
   
  if(len(t_ind)>0):
    index = t_ind[0] 

    eph['toe']   = nav['Toe'].sel(sv=svn).data[index]
    eph['axis']  = math.pow(nav['sqrtA'].sel(sv=svn).data[index],2)
    eph['e']     = nav['Eccentricity'].sel(sv=svn).data[index]
    eph['m0']    = nav['M0'].sel(sv=svn).data[index]
    eph['omega'] = nav['omega'].sel(sv=svn).data[index]
    eph['i0']    = nav['Io'].sel(sv=svn).data[index]
    eph['omg0']  = nav['Omega0'].sel(sv=svn).data[index]
    eph['dn']    = nav['DeltaN'].sel(sv=svn).data[index]
    eph['idot']  = nav['IDOT'].sel(sv=svn).data[index]
    eph['odot']  = nav['OmegaDot'].sel(sv=svn).data[index]
    eph['cuc']   = nav['Cuc'].sel(sv=svn).data[index]
    eph['cus']   = nav['Cus'].sel(sv=svn).data[index]
    eph['cic']   = nav['Cic'].sel(sv=svn).data[index]
    eph['cis']   = nav['Cis'].sel(sv=svn).data[index]
    eph['crc']   = nav['Crc'].sel(sv=svn).data[index]
    eph['crs']   = nav['Crs'].sel(sv=svn).data[index]

  return(eph,MinTime)

def getSVpos(eph, svn, t):
  WGS84_SQRT_U = 1.99649818432174e7
  omeg_E = 7.2921151467e-5 #Earth's rotation rate
  c = 299792458.0 #Speed of light

  # ToDo: Week roll over
  tk = t - eph.toe

  #mean angular motion
  #n0 = math.sqrt(mu/math.pow(axis,3))
  n0 = WGS84_SQRT_U / math.sqrt(math.pow(eph.axis,3))
  n = n0+eph.dn
  #Mean anomaly
  Mk = eph.m0 + tk*n

  ####################################################
  # Keplers equation for eccentric anomaly estimate
  ####################################################
  def f(x):
    return x-eph.e*math.sin(x)-Mk
  Ek=optimize.newton(f,1)

  a   = math.cos(Ek) 
  b   = 1.0 - (eph.e * a) 
  sek = math.sin(Ek) 

  ####################################################
  # True anomaly
  ####################################################
  r1me2     = math.sqrt(1-math.pow(eph.e,2))
  phik      = math.atan2(r1me2 * sek, a - eph.e) + eph.omega

  phik2     = phik+phik 
  sin_phik2 = math.sin(phik2) 
  cos_phik2 = math.cos(phik2) 

  del_uk = (eph.cuc * cos_phik2) + (eph.cus * sin_phik2)
  del_rk = (eph.crc * cos_phik2) + (eph.crs * sin_phik2)
  del_ik = (eph.cic * cos_phik2) + (eph.cis * sin_phik2)

  uk    = phik + del_uk 
  sinuk = math.sin(uk) 
  cosuk = math.cos(uk) 

  rk  = eph.axis * b + del_rk
  xkp = rk * cosuk
  ykp = rk * sinuk

  ik    = eph.i0 + del_ik + eph.idot*tk
  sinik = math.sin(ik) 
  cosik = math.cos(ik) 

  #OMEGA_rate = ed->ODOT_n ;
  #ok    = ed->OMEGA_n + OMEGA_rate * tk ; 

  ok = eph.omg0 + (eph.odot - omeg_E)*tk - omeg_E*eph.toe
  sinok = math.sin(ok)
  cosok = math.cos(ok)

  x = xkp * cosok - ykp * (cosik * sinok)
  y = xkp * sinok + ykp * (cosik * cosok)
  z = ykp * sinik
  
  return (x,y,z)


def downloadEph(year,dayofYear,thisType):

  # Check for a shared cache
  if(os.path.exists( '/net/meson/mnt/data_drive/RINEXNAV/')):
    CacheDir = '/net/meson/mnt/data_drive/RINEXNAV/'
  else:
    CacheDir = ''

  path = CacheDir + str(year)

  # Deal with year rollover - we store RINEX nav data in this directory
  if not os.path.exists( path ):
    os.makedirs( path )
    # We want to share the cache across uses, so change the permissions
    os.chmod(path, 0o777)

  # We save a version we can read much faster in a per system directory
  # make sure we have the directory
  if not os.path.exists( path +  '/' + thisType):
    os.makedirs( path +  '/' + thisType)
    # We want to share the cache across uses, so change the permissions
    os.chmod(path + '/' + thisType, 0o777)
  
  # see: 
  # https://cddis.nasa.gov/Data_and_Derived_Products/GNSS/broadcast_ephemeris_data.html
  #
  # We may want to eventually check out this tool and see if we can 
  # merge additional sources of RINEX if we find that the ESA data is not complete.
  # https://gnss.gfz-potsdam.de/services/gfzrnx
  #
  # Here's another source of the DLR data - needs a login:
  # https://cddis.nasa.gov/archive/gnss/data/daily/2022/brdc/
  #
  archives = [ # BKG archive - DLR - most complete, includes IRNSS - Preferred file
              {'server':'igs.bkg.bund.de',
              'filename':'BRDM00DLR_S_' + str(year) + str(dayofYear).zfill(3) + '0000_01D_MN.rnx.gz',
              'directory':'/IGS/BRDC/' + str(year) + '/' + str(dayofYear).zfill(3),
              'gotData':False},
              # UCSD - GPS only - likely US centric
              {'server':'garner.ucsd.edu',
              'filename':'auto' + str(dayofYear).zfill(3) + '0.' + str(year-2000).zfill(2) + 'n.Z',
              'directory':'/pub/rinex/' + str(year) + '/' + str(dayofYear).zfill(3),
              'gotData':False},
              # ESA - seems to not have good global coverage - no IRNSS
              {'server':'gssc.esa.int',
              'filename':'BRDM00DLR_S_' + str(year) + str(dayofYear).zfill(3) + '0000_01D_MN.rnx.gz',
              'directory':'/cddis/gnss/data/daily/' + str(year) + '/' + str(dayofYear).zfill(3),
              'gotData':False}]
            #    'filename':'BRDC00IGS_R_' + str(year) + str(dayofYear).zfill(3) + '0000_01D_MN.rnx.gz', 
            #   'directory':'/gnss/data/daily/' + str(year) + '/brdc',
            #   'gotData':False}]



  # Attempt to download them all.
  for elem in archives:
    if not os.path.exists( path + '/' + elem['filename']):
      print('get: ',elem['filename'])
      try:
        ftp = FTP(elem['server'], timeout = 15)
        ftp.login('anonymous','Trimble')
        ftp.sendcmd('CWD ' + elem['directory'])
        ftpfile = open(path + '/' + elem['filename'], 'wb')
        status = ftp.retrbinary('RETR ' + elem['filename'], ftpfile.write) # retrieve file
        # As this is a shared cache
        os.chmod(path + '/' + elem['filename'], 0o777)
        if str(status).startswith('226'): # comes from ftp status: '226 Transfer complete.'
          print('OK') # print 'OK' if transfer was successful
          elem['gotData'] = True
        else:
          print(status) # if error, print retrbinary's return
        ftpfile.close()
        ftp.close()
      except:
        print('problem with: ',elem['server'],elem['filename'])
    else:
      if(os.path.getsize(path + '/' + elem['filename']) > 0):
        print('Ephemeris file from cache: ',elem['server'],elem['filename'])
        elem['gotData'] = True

  # Get the first file from the archive - the archive is in priority order
  filename = []
  for elem in archives:
    if(elem['gotData'] == True):
      filename.append(path + '/' + elem['filename'])
  
  return(filename)

# Get the RINEX nav file:
# * we'll download data if we don't have the data
# * we'll read the RINEX file and strip out just the system we want to process
# * reading the text RINEX file is very slow. Georinex supports storing the
#   data in a much faster file format. Once we've read the RINEX data for
#   the first time, we save (per satellite system) and will use that file
#   next time through if the same day / satellite system is parsed. The
#   difference is orders of magnitude (from several seconds to not noticable).
def getRINEXNav(year,dayofYear,thisType):
  print('Download / Check - 3rd party RINEX cache ...')
  filename = downloadEph(year,dayofYear,thisType)
  print('Loading ephemeris - 3rd party RINEX...')
  
  # ESA and IGS both provide the DLR data - uniquify the name so we
  # only load once.
  filename = list(set(filename))

  print('Available 3rd party',filename)

  navData = []
  # As of 2022-03-28 both the ESA and DLR data is down. The "Auto" data from UCSD is
  # only GPS and has some problems, so throw away the RINEX here if it is only from Auto
  # Should we even bother downloading the UCSD data?
  if( (len(filename) > 1) or (('auto' in filename[0]) == False)):
    # Process the 3rd party ephemeris
    for thisfile in filename:
      try:

        # Having issues with this ephemeris source, skip for now
        if('auto' in thisfile):
          continue

        if(len(thisfile ) > 0):
          # If we have read the data before, a faster to read version will be
          # available with an "nc" file extension. If it exists, read it.
          tokens = thisfile.split('/')
          fileRoot = ''
          fastFilename = ''
          for i in range(len(tokens)):
            fastFilename += tokens[i] 
            if(i==(len(tokens)-1)):
              continue
            elif(i==(len(tokens)-2)):
              fastFilename += '/' + thisType  + '/'
            else:
              fastFilename += '/'
            
            if(i<=(len(tokens)-1)):
              fileRoot += tokens[i] + '/'
            
          fastFilename += '.nc'

          nav = []
          if(os.path.exists(fastFilename)):
            print('Loading fast RINEX',fastFilename)
            nav = gr.load(fastFilename)
          else:
            # If the .nc file exists, gr.load() will fail, so erase it
            if(os.path.exists(thisfile + '.nc')):
              os.remove(thisfile + '.nc')
            # Read the RINEX file ... but also save in format
            # that is much faster to read, will have a .nc extension
            print('Loading regular RINEX',thisfile)
            nav = gr.load(thisfile,use=thisType,out=fileRoot)
            if(os.path.exists(thisfile + '.nc')):
              # The file exists - tweak the file name as we are breaking
              # out the data on a per satellite system basis
              os.rename(thisfile + '.nc',fastFilename)
              # As this is a shared cache
              os.chmod(fastFilename, 0o777)

          if( (len(nav) > 0)  and (len(nav.svtype) == 1) and (nav.svtype[0] == thisType)):
            print('Concating data from',thisfile)
            if(len(navData) > 0):
              navData = xarray.concat( (navData,nav) ,dim='time')
            else:
              navData = nav
            print('Total num Eph',len(navData['Toe']))
          else:
            print('No data of type',thisType,'from',thisfile)

      except:
        print('Problem with 3rd party RINEX')
  
  if(os.path.exists( '/net/meson/mnt/data_drive/RINEXNAV/TrimbleRINEX/' + str(year))):
    found = False
    # We are operating on a system that has Trimble RINEX archived
    # We've parsed the 3rd party RINEX, now append with Trimble data.
    rx = [ {'station':'Singapore'},
           {'station':'Australia'},
           {'station':'Moscow'},
           {'station':'Brazil'},
           {'station':'Peru'},
           {'station':'Kearl'},
           {'station':'Stanford'},
           {'station':'Bulgaria'},
           {'station':'Spain'},
           {'station':'Canada'},
           {'station':'Florida'},
           {'station':'Louisiana'},
           {'station':'Maine'},
           {'station':'California'}]

    for thisRX in rx:
      thisStation = thisRX['station']
      filename = '/net/meson/mnt/data_drive/RINEXNAV/TrimbleRINEX/' + str(year) + '/' + thisType + '/' + thisStation + str(dayofYear).zfill(3) + '.' + str(year-2000) + 'p.gz.nc'
      if(os.path.exists(filename)):
        if(found == False):
          print('Getting RINEX Data from Trimble receiver Cache:')
          found = True
        print(filename)
        thisNav = gr.load(filename)
        try:
          if( (len(thisNav.svtype) == 1) and (thisNav.svtype[0] == thisType)):
            if(len(navData) > 0):
              navData = xarray.concat( (navData,thisNav) ,dim='time')
            else:
              navData = thisNav
            print('Total num Eph',len(navData['Toe']))
        except:
          print('Concat issue with Trimble RINEX',filename,thisType)

  return(navData)


def getElev(eph,thisSV,thisTime,xUser,yUser,zUser):
  [x,y,z] = getSVpos(eph, thisSV, thisTime)
  [elev,_] = m.compute_elev_azi(xUser,yUser,zUser,x,y,z)
  return(elev)

if __name__ == "__main__":
  # Grab RINEX from a couple days ago.
  thisDay = datetime.datetime.now()
  # Backup a couple of days to make sure the data is available
  testDay = thisDay - datetime.timedelta(days=32)

  while(testDay < thisDay):
    DOY = m.date_to_doy(testDay.year,testDay.month,testDay.day)
    print('Getting RINEX for DOY:',DOY)

    #t = 345000
    t = 160000
    #svSys = 'E'
    svSys = 'C'
    # Get GPS data from a RINEX NAV file - downloading if needed
    nav = getRINEXNav(testDay.year,DOY,svSys)
    #for sv in range(1,37):
    for sv in range(1,64):
      (eph,deltaT) = getEph(nav, svSys + str(sv).zfill(2), t)
      print(sv,deltaT,eph)

    testDay += datetime.timedelta(days=1)


