|
|
|
@ -1,5 +1,4 @@ |
|
|
|
|
#!/usr/bin/env python3 |
|
|
|
|
import json |
|
|
|
|
import math |
|
|
|
|
import os |
|
|
|
|
import time |
|
|
|
@ -17,7 +16,7 @@ from common.params import Params, put_nonblocking |
|
|
|
|
from laika import AstroDog |
|
|
|
|
from laika.constants import SECS_IN_HR, SECS_IN_MIN |
|
|
|
|
from laika.downloader import DownloadFailed |
|
|
|
|
from laika.ephemeris import Ephemeris, EphemerisType, convert_ublox_gps_ephem, convert_ublox_glonass_ephem, parse_qcom_ephem |
|
|
|
|
from laika.ephemeris import EphemerisType, GPSEphemeris, GLONASSEphemeris, ephemeris_structs, parse_qcom_ephem |
|
|
|
|
from laika.gps_time import GPSTime |
|
|
|
|
from laika.helpers import ConstellationId |
|
|
|
|
from laika.raw_gnss import GNSSMeasurement, correct_measurements, process_measurements, read_raw_ublox, read_raw_qcom |
|
|
|
@ -51,10 +50,11 @@ class Laikad: |
|
|
|
|
self.auto_fetch_navs = auto_fetch_navs |
|
|
|
|
self.orbit_fetch_executor: Optional[ProcessPoolExecutor] = None |
|
|
|
|
self.orbit_fetch_future: Optional[Future] = None |
|
|
|
|
|
|
|
|
|
self.last_fetch_navs_t = None |
|
|
|
|
self.got_first_gnss_msg = False |
|
|
|
|
self.last_cached_t = None |
|
|
|
|
|
|
|
|
|
self.last_report_time = GPSTime(0, 0) |
|
|
|
|
self.last_fetch_navs_t = GPSTime(0, 0) |
|
|
|
|
self.last_cached_t = GPSTime(0, 0) |
|
|
|
|
self.save_ephemeris = save_ephemeris |
|
|
|
|
self.load_cache() |
|
|
|
|
|
|
|
|
@ -69,31 +69,34 @@ class Laikad: |
|
|
|
|
if not self.save_ephemeris: |
|
|
|
|
return |
|
|
|
|
|
|
|
|
|
cache = Params().get(EPHEMERIS_CACHE) |
|
|
|
|
if not cache: |
|
|
|
|
cache_bytes = Params().get(EPHEMERIS_CACHE) |
|
|
|
|
if not cache_bytes: |
|
|
|
|
return |
|
|
|
|
|
|
|
|
|
nav_dict = {} |
|
|
|
|
try: |
|
|
|
|
cache = json.loads(cache, object_hook=deserialize_hook) |
|
|
|
|
if cache['version'] == CACHE_VERSION: |
|
|
|
|
self.astro_dog.add_navs(cache['navs']) |
|
|
|
|
self.last_fetch_navs_t = cache['last_fetch_navs_t'] |
|
|
|
|
else: |
|
|
|
|
cache['navs'] = {} |
|
|
|
|
except json.decoder.JSONDecodeError: |
|
|
|
|
ephem_cache = ephemeris_structs.EphemerisCache.from_bytes(cache_bytes) |
|
|
|
|
glonass_navs = [GLONASSEphemeris(data_struct) for data_struct in ephem_cache.glonassEphemerides] |
|
|
|
|
gps_navs = [GPSEphemeris(data_struct) for data_struct in ephem_cache.gpsEphemerides] |
|
|
|
|
for e in sum([glonass_navs, gps_navs], []): |
|
|
|
|
if e.prn not in nav_dict: |
|
|
|
|
nav_dict[e.prn] = [] |
|
|
|
|
nav_dict[e.prn].append(e) |
|
|
|
|
self.astro_dog.add_navs(nav_dict) |
|
|
|
|
except Exception: |
|
|
|
|
cloudlog.exception("Error parsing cache") |
|
|
|
|
timestamp = self.last_fetch_navs_t.as_datetime() if self.last_fetch_navs_t is not None else 'Nan' |
|
|
|
|
cloudlog.debug( |
|
|
|
|
f"Loaded navs ({sum([len(v) for v in cache['navs']])}) cache with timestamp: {timestamp}. Unique orbit and nav sats: {list(cache['navs'].keys())} " + |
|
|
|
|
f"With time range: {[f'{start.as_datetime()}, {end.as_datetime()}' for (start,end) in self.astro_dog.navs_fetched_times._ranges]}") |
|
|
|
|
|
|
|
|
|
def cache_ephemeris(self, t: GPSTime): |
|
|
|
|
if self.save_ephemeris and (self.last_cached_t is None or t - self.last_cached_t > SECS_IN_MIN): |
|
|
|
|
put_nonblocking(EPHEMERIS_CACHE, json.dumps( |
|
|
|
|
{'version': CACHE_VERSION, 'last_fetch_navs_t': self.last_fetch_navs_t, 'navs': self.astro_dog.navs}, |
|
|
|
|
cls=CacheSerializer)) |
|
|
|
|
f"Loaded navs ({sum([len(nav_dict[prn]) for prn in nav_dict.keys()])}). Unique orbit and nav sats: {list(nav_dict.keys())} ") |
|
|
|
|
|
|
|
|
|
def cache_ephemeris(self): |
|
|
|
|
|
|
|
|
|
if self.save_ephemeris and (self.last_report_time - self.last_cached_t > SECS_IN_MIN): |
|
|
|
|
nav_list: List = sum([v for k,v in self.astro_dog.navs.items()], []) |
|
|
|
|
ephem_cache = ephemeris_structs.EphemerisCache(**{'glonassEphemerides': [e.data for e in nav_list if e.prn[0]=='R'], |
|
|
|
|
'gpsEphemerides': [e.data for e in nav_list if e.prn[0]=='G']}) |
|
|
|
|
put_nonblocking(EPHEMERIS_CACHE, ephem_cache.to_bytes()) |
|
|
|
|
cloudlog.debug("Cache saved") |
|
|
|
|
self.last_cached_t = t |
|
|
|
|
self.last_cached_t = self.last_report_time |
|
|
|
|
|
|
|
|
|
def get_lsq_fix(self, t, measurements): |
|
|
|
|
if self.last_fix_t is None or abs(self.last_fix_t - t) > 0: |
|
|
|
@ -139,6 +142,7 @@ class Laikad: |
|
|
|
|
week = report.gpsWeek |
|
|
|
|
tow = report.rcvTow |
|
|
|
|
new_meas = read_raw_ublox(report) |
|
|
|
|
self.last_report_time = GPSTime(week, tow) |
|
|
|
|
return week, tow, new_meas |
|
|
|
|
|
|
|
|
|
def is_ephemeris(self, gnss_msg): |
|
|
|
@ -155,14 +159,16 @@ class Laikad: |
|
|
|
|
ephem = parse_qcom_ephem(gnss_msg.drSvPoly, self.gps_week) |
|
|
|
|
else: |
|
|
|
|
if gnss_msg.which() == 'ephemeris': |
|
|
|
|
ephem = convert_ublox_gps_ephem(gnss_msg.ephemeris) |
|
|
|
|
data_struct = ephemeris_structs.Ephemeris.new_message(**gnss_msg.ephemeris.to_dict()) |
|
|
|
|
ephem = GPSEphemeris(data_struct) |
|
|
|
|
elif gnss_msg.which() == 'glonassEphemeris': |
|
|
|
|
ephem = convert_ublox_glonass_ephem(gnss_msg.glonassEphemeris) |
|
|
|
|
data_struct = ephemeris_structs.GlonassEphemeris.new_message(**gnss_msg.glonassEphemeris.to_dict()) |
|
|
|
|
ephem = GLONASSEphemeris(data_struct) |
|
|
|
|
else: |
|
|
|
|
cloudlog.error(f"Unsupported ephemeris type: {gnss_msg.which()}") |
|
|
|
|
return |
|
|
|
|
self.astro_dog.add_navs({ephem.prn: [ephem]}) |
|
|
|
|
self.cache_ephemeris(t=ephem.epoch) |
|
|
|
|
self.cache_ephemeris() |
|
|
|
|
|
|
|
|
|
def process_report(self, new_meas, t): |
|
|
|
|
# Filter measurements with unexpected pseudoranges for GPS and GLONASS satellites |
|
|
|
@ -272,7 +278,7 @@ class Laikad: |
|
|
|
|
|
|
|
|
|
def fetch_navs(self, t: GPSTime, block): |
|
|
|
|
# Download new navs if 1 hour of navs data left |
|
|
|
|
if t + SECS_IN_HR not in self.astro_dog.navs_fetched_times and (self.last_fetch_navs_t is None or abs(t - self.last_fetch_navs_t) > SECS_IN_MIN): |
|
|
|
|
if t + SECS_IN_HR not in self.astro_dog.navs_fetched_times and (abs(t - self.last_fetch_navs_t) > SECS_IN_MIN): |
|
|
|
|
astro_dog_vars = self.astro_dog.valid_const, self.astro_dog.auto_update, self.astro_dog.valid_ephem_types, self.astro_dog.cache_dir |
|
|
|
|
ret = None |
|
|
|
|
|
|
|
|
@ -290,7 +296,7 @@ class Laikad: |
|
|
|
|
self.last_fetch_navs_t = ret[2] |
|
|
|
|
else: |
|
|
|
|
self.astro_dog.navs, self.astro_dog.navs_fetched_times, self.last_fetch_navs_t = ret |
|
|
|
|
self.cache_ephemeris(t=t) |
|
|
|
|
self.cache_ephemeris() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_orbit_data(t: GPSTime, valid_const, auto_update, valid_ephem_types, cache_dir): |
|
|
|
@ -360,26 +366,6 @@ def kf_add_observations(gnss_kf: GNSSKalman, t: float, measurements: List[GNSSMe |
|
|
|
|
gnss_kf.predict_and_observe(t, kind, data) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class CacheSerializer(json.JSONEncoder): |
|
|
|
|
|
|
|
|
|
def default(self, o): |
|
|
|
|
if isinstance(o, Ephemeris): |
|
|
|
|
return o.to_json() |
|
|
|
|
if isinstance(o, GPSTime): |
|
|
|
|
return o.__dict__ |
|
|
|
|
if isinstance(o, np.ndarray): |
|
|
|
|
return o.tolist() |
|
|
|
|
return json.JSONEncoder.default(self, o) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def deserialize_hook(dct): |
|
|
|
|
if 'ephemeris' in dct: |
|
|
|
|
return Ephemeris.from_json(dct) |
|
|
|
|
if 'week' in dct: |
|
|
|
|
return GPSTime(dct['week'], dct['tow']) |
|
|
|
|
return dct |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class EphemerisSourceType(IntEnum): |
|
|
|
|
nav = 0 |
|
|
|
|
nasaUltraRapid = 1 |
|
|
|
|