|
|
@ -1,107 +1,34 @@ |
|
|
|
import threading |
|
|
|
import threading |
|
|
|
import numpy as np |
|
|
|
import numpy as np |
|
|
|
from abc import ABC, abstractmethod |
|
|
|
from collections.abc import Callable |
|
|
|
from typing import Any |
|
|
|
|
|
|
|
from openpilot.common.swaglog import cloudlog |
|
|
|
from openpilot.common.swaglog import cloudlog |
|
|
|
from openpilot.tools.lib.logreader import LogReader |
|
|
|
from openpilot.tools.lib.logreader import LogReader |
|
|
|
from openpilot.tools.lib.log_time_series import msgs_to_time_series |
|
|
|
from openpilot.tools.lib.log_time_series import msgs_to_time_series |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# TODO: support cereal/ZMQ streaming |
|
|
|
|
|
|
|
class DataSource(ABC): |
|
|
|
|
|
|
|
@abstractmethod |
|
|
|
|
|
|
|
def load_data(self) -> dict[str, Any]: |
|
|
|
|
|
|
|
pass |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
@abstractmethod |
|
|
|
|
|
|
|
def get_duration(self) -> float: |
|
|
|
|
|
|
|
pass |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class LogReaderSource(DataSource): |
|
|
|
|
|
|
|
def __init__(self, route_name: str): |
|
|
|
|
|
|
|
self.route_name = route_name |
|
|
|
|
|
|
|
self._duration = 0.0 |
|
|
|
|
|
|
|
self._start_time_mono = 0.0 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def load_data(self) -> dict[str, Any]: |
|
|
|
|
|
|
|
lr = LogReader(self.route_name) |
|
|
|
|
|
|
|
raw_time_series = msgs_to_time_series(lr) |
|
|
|
|
|
|
|
processed_data = self._expand_list_fields(raw_time_series) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
min_time = float('inf') |
|
|
|
|
|
|
|
max_time = float('-inf') |
|
|
|
|
|
|
|
for data in processed_data.values(): |
|
|
|
|
|
|
|
min_time = min(min_time, data['t'][0]) |
|
|
|
|
|
|
|
max_time = max(max_time, data['t'][-1]) |
|
|
|
|
|
|
|
self._start_time_mono = min_time |
|
|
|
|
|
|
|
self._duration = max_time - min_time |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
return {'time_series_data': processed_data, 'route_start_time_mono': self._start_time_mono, 'duration': self._duration} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def get_duration(self) -> float: |
|
|
|
|
|
|
|
return self._duration |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
# TODO: lists are expanded, but lists of structs are not |
|
|
|
|
|
|
|
def _expand_list_fields(self, time_series_data): |
|
|
|
|
|
|
|
expanded_data = {} |
|
|
|
|
|
|
|
for msg_type, data in time_series_data.items(): |
|
|
|
|
|
|
|
expanded_data[msg_type] = {} |
|
|
|
|
|
|
|
for field, values in data.items(): |
|
|
|
|
|
|
|
if field == 't': |
|
|
|
|
|
|
|
expanded_data[msg_type]['t'] = values |
|
|
|
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if values.dtype == object: # ragged array |
|
|
|
|
|
|
|
lens = np.fromiter((len(v) for v in values), dtype=int, count=len(values)) |
|
|
|
|
|
|
|
max_len = lens.max() if lens.size else 0 |
|
|
|
|
|
|
|
if max_len > 0: |
|
|
|
|
|
|
|
arr = np.full((len(values), max_len), None, dtype=object) |
|
|
|
|
|
|
|
for i, v in enumerate(values): |
|
|
|
|
|
|
|
arr[i, : lens[i]] = v |
|
|
|
|
|
|
|
for i in range(max_len): |
|
|
|
|
|
|
|
sub_arr = arr[:, i] |
|
|
|
|
|
|
|
expanded_data[msg_type][f"{field}/{i}"] = sub_arr |
|
|
|
|
|
|
|
elif values.ndim > 1: # regular multidimensional array |
|
|
|
|
|
|
|
for i in range(values.shape[1]): |
|
|
|
|
|
|
|
col_data = values[:, i] |
|
|
|
|
|
|
|
expanded_data[msg_type][f"{field}/{i}"] = col_data |
|
|
|
|
|
|
|
else: |
|
|
|
|
|
|
|
expanded_data[msg_type][field] = values |
|
|
|
|
|
|
|
return expanded_data |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class DataLoadedEvent: |
|
|
|
|
|
|
|
def __init__(self, data: dict[str, Any]): |
|
|
|
|
|
|
|
self.data = data |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class Observer(ABC): |
|
|
|
|
|
|
|
@abstractmethod |
|
|
|
|
|
|
|
def on_data_loaded(self, event: DataLoadedEvent): |
|
|
|
|
|
|
|
pass |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
class DataManager: |
|
|
|
class DataManager: |
|
|
|
def __init__(self): |
|
|
|
def __init__(self): |
|
|
|
self.time_series_data = {} |
|
|
|
self.time_series_data = {} |
|
|
|
self.loading = False |
|
|
|
self.loading = False |
|
|
|
self.route_start_time_mono = 0.0 |
|
|
|
self.route_start_time_mono = 0.0 |
|
|
|
self.duration = 0.0 |
|
|
|
self.duration = 0.0 |
|
|
|
self._observers: list[Observer] = [] |
|
|
|
self._callbacks: list[Callable[[dict], None]] = [] |
|
|
|
|
|
|
|
|
|
|
|
def add_observer(self, observer: Observer): |
|
|
|
def add_callback(self, callback: Callable[[dict], None]): |
|
|
|
self._observers.append(observer) |
|
|
|
self._callbacks.append(callback) |
|
|
|
|
|
|
|
|
|
|
|
def remove_observer(self, observer: Observer): |
|
|
|
def remove_callback(self, callback: Callable[[dict], None]): |
|
|
|
if observer in self._observers: |
|
|
|
if callback in self._callbacks: |
|
|
|
self._observers.remove(observer) |
|
|
|
self._callbacks.remove(callback) |
|
|
|
|
|
|
|
|
|
|
|
def _notify_observers(self, event: DataLoadedEvent): |
|
|
|
def _notify_callbacks(self, data: dict): |
|
|
|
for observer in self._observers: |
|
|
|
for callback in self._callbacks: |
|
|
|
observer.on_data_loaded(event) |
|
|
|
try: |
|
|
|
|
|
|
|
callback(data) |
|
|
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
|
|
cloudlog.exception(f"Error in data callback: {e}") |
|
|
|
|
|
|
|
|
|
|
|
def get_current_value_for_path(self, path: str, time_s: float, last_index: int | None = None): |
|
|
|
def get_current_value(self, path: str, time_s: float, last_index: int | None = None): |
|
|
|
try: |
|
|
|
try: |
|
|
|
abs_time_s = self.route_start_time_mono + time_s |
|
|
|
abs_time_s = self.route_start_time_mono + time_s |
|
|
|
msg_type, field_path = path.split('/', 1) |
|
|
|
msg_type, field_path = path.split('/', 1) |
|
|
@ -136,24 +63,24 @@ class DataManager: |
|
|
|
try: |
|
|
|
try: |
|
|
|
msg_type, field_path = path.split('/', 1) |
|
|
|
msg_type, field_path = path.split('/', 1) |
|
|
|
value_array = self.time_series_data.get(msg_type, {}).get(field_path) |
|
|
|
value_array = self.time_series_data.get(msg_type, {}).get(field_path) |
|
|
|
if value_array is not None: # only numbers and bools are plottable |
|
|
|
if value_array is not None: |
|
|
|
return np.issubdtype(value_array.dtype, np.number) or np.issubdtype(value_array.dtype, np.bool_) |
|
|
|
return np.issubdtype(value_array.dtype, np.number) or np.issubdtype(value_array.dtype, np.bool_) |
|
|
|
except (ValueError, KeyError): |
|
|
|
except (ValueError, KeyError): |
|
|
|
pass |
|
|
|
pass |
|
|
|
return False |
|
|
|
return False |
|
|
|
|
|
|
|
|
|
|
|
def get_time_series_data(self, path: str) -> tuple | None: |
|
|
|
def get_time_series(self, path: str): |
|
|
|
try: |
|
|
|
try: |
|
|
|
msg_type, field_path = path.split('/', 1) |
|
|
|
msg_type, field_path = path.split('/', 1) |
|
|
|
ts_data = self.time_series_data[msg_type] |
|
|
|
ts_data = self.time_series_data[msg_type] |
|
|
|
time_array = ts_data['t'] |
|
|
|
time_array = ts_data['t'] |
|
|
|
plot_values = ts_data[field_path] |
|
|
|
values = ts_data[field_path] |
|
|
|
|
|
|
|
|
|
|
|
if len(time_array) == 0: |
|
|
|
if len(time_array) == 0: |
|
|
|
return None |
|
|
|
return None |
|
|
|
|
|
|
|
|
|
|
|
rel_time_array = time_array - self.route_start_time_mono |
|
|
|
rel_time = time_array - self.route_start_time_mono |
|
|
|
return rel_time_array, plot_values |
|
|
|
return rel_time, values |
|
|
|
|
|
|
|
|
|
|
|
except (KeyError, ValueError): |
|
|
|
except (KeyError, ValueError): |
|
|
|
return None |
|
|
|
return None |
|
|
@ -163,19 +90,55 @@ class DataManager: |
|
|
|
return |
|
|
|
return |
|
|
|
|
|
|
|
|
|
|
|
self.loading = True |
|
|
|
self.loading = True |
|
|
|
data_source = LogReaderSource(route_name) |
|
|
|
threading.Thread(target=self._load_route_background, args=(route_name,), daemon=True).start() |
|
|
|
threading.Thread(target=self._load_in_background, args=(data_source,), daemon=True).start() |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _load_in_background(self, data_source: DataSource): |
|
|
|
def _load_route_background(self, route_name: str): |
|
|
|
try: |
|
|
|
try: |
|
|
|
data = data_source.load_data() |
|
|
|
lr = LogReader(route_name) |
|
|
|
self.time_series_data = data['time_series_data'] |
|
|
|
raw_data = msgs_to_time_series(lr) |
|
|
|
self.route_start_time_mono = data['route_start_time_mono'] |
|
|
|
processed_data = self._expand_list_fields(raw_data) |
|
|
|
self.duration = data['duration'] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
self._notify_observers(DataLoadedEvent(data)) |
|
|
|
min_time = float('inf') |
|
|
|
|
|
|
|
max_time = float('-inf') |
|
|
|
|
|
|
|
for data in processed_data.values(): |
|
|
|
|
|
|
|
if len(data['t']) > 0: |
|
|
|
|
|
|
|
min_time = min(min_time, data['t'][0]) |
|
|
|
|
|
|
|
max_time = max(max_time, data['t'][-1]) |
|
|
|
|
|
|
|
|
|
|
|
except Exception: |
|
|
|
self.time_series_data = processed_data |
|
|
|
cloudlog.exception("Error loading route:") |
|
|
|
self.route_start_time_mono = min_time if min_time != float('inf') else 0.0 |
|
|
|
|
|
|
|
self.duration = max_time - min_time if max_time != float('-inf') else 0.0 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
self._notify_callbacks({'time_series_data': processed_data, 'route_start_time_mono': self.route_start_time_mono, 'duration': self.duration}) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
except Exception as e: |
|
|
|
|
|
|
|
cloudlog.exception(f"Error loading route {route_name}: {e}") |
|
|
|
finally: |
|
|
|
finally: |
|
|
|
self.loading = False |
|
|
|
self.loading = False |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def _expand_list_fields(self, time_series_data): |
|
|
|
|
|
|
|
expanded_data = {} |
|
|
|
|
|
|
|
for msg_type, data in time_series_data.items(): |
|
|
|
|
|
|
|
expanded_data[msg_type] = {} |
|
|
|
|
|
|
|
for field, values in data.items(): |
|
|
|
|
|
|
|
if field == 't': |
|
|
|
|
|
|
|
expanded_data[msg_type]['t'] = values |
|
|
|
|
|
|
|
continue |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
if values.dtype == object: # ragged array |
|
|
|
|
|
|
|
lens = np.fromiter((len(v) for v in values), dtype=int, count=len(values)) |
|
|
|
|
|
|
|
max_len = lens.max() if lens.size else 0 |
|
|
|
|
|
|
|
if max_len > 0: |
|
|
|
|
|
|
|
arr = np.full((len(values), max_len), None, dtype=object) |
|
|
|
|
|
|
|
for i, v in enumerate(values): |
|
|
|
|
|
|
|
arr[i, : lens[i]] = v |
|
|
|
|
|
|
|
for i in range(max_len): |
|
|
|
|
|
|
|
sub_arr = arr[:, i] |
|
|
|
|
|
|
|
expanded_data[msg_type][f"{field}/{i}"] = sub_arr |
|
|
|
|
|
|
|
elif values.ndim > 1: # regular multidimensional array |
|
|
|
|
|
|
|
for i in range(values.shape[1]): |
|
|
|
|
|
|
|
col_data = values[:, i] |
|
|
|
|
|
|
|
expanded_data[msg_type][f"{field}/{i}"] = col_data |
|
|
|
|
|
|
|
else: |
|
|
|
|
|
|
|
expanded_data[msg_type][field] = values |
|
|
|
|
|
|
|
return expanded_data |
|
|
|