Merge remote-tracking branch 'origin/master' into gwm-driving

gwm-driving
Yassine Yousfi 3 weeks ago
commit 8114dc369d
  1. 1
      .gitignore
  2. 41
      .vscode/launch.json
  3. 5
      SConstruct
  4. 30
      docs/DEBUGGING_SAFETY.md
  5. 2
      launch_env.sh
  6. 6
      pyproject.toml
  7. 11
      selfdrive/controls/lib/desire_helper.py
  8. 2
      selfdrive/selfdrived/events.py
  9. 6
      selfdrive/test/test_onroad.py
  10. 14
      selfdrive/ui/translations/main_ar.ts
  11. 14
      selfdrive/ui/translations/main_de.ts
  12. 14
      selfdrive/ui/translations/main_es.ts
  13. 14
      selfdrive/ui/translations/main_fr.ts
  14. 14
      selfdrive/ui/translations/main_ja.ts
  15. 14
      selfdrive/ui/translations/main_ko.ts
  16. 16
      selfdrive/ui/translations/main_pt-BR.ts
  17. 14
      selfdrive/ui/translations/main_th.ts
  18. 14
      selfdrive/ui/translations/main_tr.ts
  19. 16
      selfdrive/ui/translations/main_zh-CHS.ts
  20. 16
      selfdrive/ui/translations/main_zh-CHT.ts
  21. 26
      system/hardware/tici/agnos.json
  22. 50
      system/hardware/tici/all-partitions.json
  23. 43
      system/journald.py
  24. 1
      system/logcatd/.gitignore
  25. 3
      system/logcatd/SConscript
  26. 75
      system/logcatd/logcatd_systemd.cc
  27. 6
      system/manager/process_config.py
  28. 227
      system/proclogd.py
  29. 6
      system/proclogd/SConscript
  30. 25
      system/proclogd/main.cc
  31. 239
      system/proclogd/proclog.cc
  32. 40
      system/proclogd/proclog.h
  33. 1
      system/proclogd/tests/.gitignore
  34. 142
      system/proclogd/tests/test_proclog.cc
  35. 2
      system/ubloxd/.gitignore
  36. 25
      system/ubloxd/SConscript
  37. 353
      system/ubloxd/generated/glonass.cpp
  38. 375
      system/ubloxd/generated/glonass.h
  39. 247
      system/ubloxd/generated/glonass.py
  40. 325
      system/ubloxd/generated/gps.cpp
  41. 359
      system/ubloxd/generated/gps.h
  42. 193
      system/ubloxd/generated/gps.py
  43. 424
      system/ubloxd/generated/ubx.cpp
  44. 484
      system/ubloxd/generated/ubx.h
  45. 273
      system/ubloxd/generated/ubx.py
  46. 20
      system/ubloxd/tests/print_gps_stats.py
  47. 360
      system/ubloxd/tests/test_glonass_kaitai.cc
  48. 2
      system/ubloxd/tests/test_glonass_runner.cc
  49. 530
      system/ubloxd/ublox_msg.cc
  50. 131
      system/ubloxd/ublox_msg.h
  51. 62
      system/ubloxd/ubloxd.cc
  52. 519
      system/ubloxd/ubloxd.py
  53. 1
      third_party/SConscript
  54. 16
      third_party/kaitai/custom_decoder.h
  55. 189
      third_party/kaitai/exceptions.h
  56. 689
      third_party/kaitai/kaitaistream.cpp
  57. 268
      third_party/kaitai/kaitaistream.h
  58. 20
      third_party/kaitai/kaitaistruct.h
  59. 311
      tools/jotpluggler/data.py
  60. 266
      tools/jotpluggler/datatree.py
  61. 262
      tools/jotpluggler/layout.py
  62. 247
      tools/jotpluggler/pluggle.py
  63. 195
      tools/jotpluggler/views.py
  64. 2
      tools/replay/SConscript
  65. 65
      tools/replay/framereader.cc
  66. 27
      tools/replay/framereader.h
  67. 346
      tools/replay/qcom_decoder.cc
  68. 88
      tools/replay/qcom_decoder.h
  69. 94
      uv.lock

1
.gitignore vendored

@ -50,7 +50,6 @@ cereal/services.h
cereal/gen
cereal/messaging/bridge
selfdrive/mapd/default_speeds_by_region.json
system/proclogd/proclogd
selfdrive/ui/translations/tmp
selfdrive/test/longitudinal_maneuvers/out
selfdrive/car/tests/cars_dump

@ -23,6 +23,11 @@
"id": "args",
"description": "Arguments to pass to the process",
"type": "promptString"
},
{
"id": "replayArg",
"type": "promptString",
"description": "Enter route or segment to replay."
}
],
"configurations": [
@ -40,7 +45,41 @@
"type": "cppdbg",
"request": "launch",
"program": "${workspaceFolder}/${input:cpp_process}",
"cwd": "${workspaceFolder}",
"cwd": "${workspaceFolder}"
},
{
"name": "Attach LLDB to Replay drive",
"type": "lldb",
"request": "attach",
"pid": "${command:pickMyProcess}",
"initCommands": [
"script import time; time.sleep(3)"
]
},
{
"name": "Replay drive",
"type": "debugpy",
"request": "launch",
"program": "${workspaceFolder}/opendbc/safety/tests/safety_replay/replay_drive.py",
"args": [
"${input:replayArg}"
],
"console": "integratedTerminal",
"justMyCode": false,
"env": {
"PYTHONPATH": "${workspaceFolder}"
},
"subProcess": true,
"stopOnEntry": false
}
],
"compounds": [
{
"name": "Replay drive + Safety LLDB",
"configurations": [
"Replay drive",
"Attach LLDB to Replay drive"
]
}
]
}

@ -338,11 +338,6 @@ SConscript([
'system/ubloxd/SConscript',
'system/loggerd/SConscript',
])
if arch != "Darwin":
SConscript([
'system/logcatd/SConscript',
'system/proclogd/SConscript',
])
if arch == "larch64":
SConscript(['system/camerad/SConscript'])

@ -0,0 +1,30 @@
# Debugging Panda Safety with Replay Drive + LLDB
## 1. Start the debugger in VS Code
* Select **Replay drive + Safety LLDB**.
* Enter the route or segment when prompted.
[<img src="https://github.com/user-attachments/assets/b0cc320a-083e-46a7-a9f8-ca775bbe5604">](https://github.com/user-attachments/assets/b0cc320a-083e-46a7-a9f8-ca775bbe5604)
## 2. Attach LLDB
* When prompted, pick the running **`replay_drive` process**.
* ⚠ Attach quickly, or `replay_drive` will start consuming messages.
> [!TIP]
> Add a Python breakpoint at the start of `replay_drive.py` to pause execution and give yourself time to attach LLDB.
## 3. Set breakpoints in VS Code
Breakpoints can be set directly in `modes/xxx.h` (or any C file).
No extra LLDB commands are required — just place breakpoints in the editor.
## 4. Resume execution
Once attached, you can step through both Python (on the replay) and C safety code as CAN logs are replayed.
> [!NOTE]
> * Use short routes for quicker iteration.
> * Pause `replay_drive` early to avoid wasting log messages.
## Video
View a demo of this workflow on the PR that added it: https://github.com/commaai/openpilot/pull/36055#issue-3352911578

@ -7,7 +7,7 @@ export OPENBLAS_NUM_THREADS=1
export VECLIB_MAXIMUM_THREADS=1
if [ -z "$AGNOS_VERSION" ]; then
export AGNOS_VERSION="13"
export AGNOS_VERSION="13.1"
fi
export STAGING_ROOT="/data/safe_staging"

@ -36,6 +36,9 @@ dependencies = [
"pyopenssl < 24.3.0",
"pyaudio",
# ubloxd (TODO: just use struct)
"kaitaistruct",
# panda
"libusb1",
"spidev; platform_system == 'Linux'",
@ -121,6 +124,7 @@ dev = [
tools = [
"metadrive-simulator @ https://github.com/commaai/metadrive/releases/download/MetaDrive-minimal-0.4.2.4/metadrive_simulator-0.4.2.4-py3-none-any.whl ; (platform_machine != 'aarch64')",
"dearpygui>=2.1.0",
]
[project.urls]
@ -158,7 +162,6 @@ testpaths = [
"system/camerad",
"system/hardware",
"system/loggerd",
"system/proclogd",
"system/tests",
"system/ubloxd",
"system/webrtc",
@ -247,6 +250,7 @@ exclude = [
"teleoprtc_repo",
"third_party",
"*.ipynb",
"generated",
]
lint.flake8-implicit-str-concat.allow-multiline = false

@ -40,6 +40,10 @@ class DesireHelper:
self.prev_one_blinker = False
self.desire = log.Desire.none
@staticmethod
def get_lane_change_direction(CS):
return LaneChangeDirection.left if CS.leftBlinker else LaneChangeDirection.right
def update(self, carstate, lateral_active, lane_change_prob):
v_ego = carstate.vEgo
one_blinker = carstate.leftBlinker != carstate.rightBlinker
@ -53,12 +57,13 @@ class DesireHelper:
if self.lane_change_state == LaneChangeState.off and one_blinker and not self.prev_one_blinker and not below_lane_change_speed:
self.lane_change_state = LaneChangeState.preLaneChange
self.lane_change_ll_prob = 1.0
# Initialize lane change direction to prevent UI alert flicker
self.lane_change_direction = self.get_lane_change_direction(carstate)
# LaneChangeState.preLaneChange
elif self.lane_change_state == LaneChangeState.preLaneChange:
# Set lane change direction
self.lane_change_direction = LaneChangeDirection.left if \
carstate.leftBlinker else LaneChangeDirection.right
# Update lane change direction
self.lane_change_direction = self.get_lane_change_direction(carstate)
torque_applied = carstate.steeringPressed and \
((carstate.steeringTorque > 0 and self.lane_change_direction == LaneChangeDirection.left) or

@ -243,7 +243,7 @@ def below_steer_speed_alert(CP: car.CarParams, CS: car.CarState, sm: messaging.S
f"Steer Unavailable Below {get_display_speed(CP.minSteerSpeed, metric)}",
"",
AlertStatus.userPrompt, AlertSize.small,
Priority.LOW, VisualAlert.steerRequired, AudibleAlert.prompt, 0.4)
Priority.LOW, VisualAlert.none, AudibleAlert.prompt, 0.4)
def calibration_incomplete_alert(CP: car.CarParams, CS: car.CarState, sm: messaging.SubMaster, metric: bool, soft_disable_time: int, personality) -> Alert:

@ -32,7 +32,7 @@ CPU usage budget
TEST_DURATION = 25
LOG_OFFSET = 8
MAX_TOTAL_CPU = 287. # total for all 8 cores
MAX_TOTAL_CPU = 300. # total for all 8 cores
PROCS = {
# Baseline CPU usage by process
"selfdrive.controls.controlsd": 16.0,
@ -56,10 +56,10 @@ PROCS = {
"selfdrive.ui.soundd": 3.0,
"selfdrive.ui.feedback.feedbackd": 1.0,
"selfdrive.monitoring.dmonitoringd": 4.0,
"./proclogd": 2.0,
"system.proclogd": 3.0,
"system.logmessaged": 1.0,
"system.tombstoned": 0,
"./logcatd": 1.0,
"system.journald": 1.0,
"system.micd": 5.0,
"system.timed": 0,
"selfdrive.pandad.pandad": 0,

@ -482,10 +482,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation>يتم تنزيل تحديث لنظام تشغيل جهازك في الخلفية. سيطلَب منك التحديث عندما يصبح جاهزاً للتثبيت.</translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation>محرك NVMe غير مثبَّت.</translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation>لم يكن openpilot قادراً على تحديد سيارتك. إما أن تكون سيارتك غير مدعومة أو أنه لم يتم التعرف على وحدة التحكم الإلكتروني (ECUs) فيها. يرجى تقديم طلب سحب من أجل إضافة نسخ برمجيات ثابتة إلى السيارة المناسبة. هل تحتاج إلى أي مساعدة؟ لا تتردد في التواصل مع doscord.comma.ai.</translation>
@ -1052,16 +1048,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -470,10 +470,6 @@ Der Firehose-Modus ermöglicht es dir, deine Trainingsdaten-Uploads zu maximiere
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation>Ein Update für das Betriebssystem deines Geräts wird im Hintergrund heruntergeladen. Du wirst aufgefordert, das Update zu installieren, sobald es bereit ist.</translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation>NVMe-Laufwerk nicht gemounted.</translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation>openpilot konnte dein Auto nicht identifizieren. Dein Auto wird entweder nicht unterstützt oder die Steuergeräte (ECUs) werden nicht erkannt. Bitte reiche einen Pull Request ein, um die Firmware-Versionen für das richtige Fahrzeug hinzuzufügen. Hilfe findest du auf discord.comma.ai.</translation>
@ -1034,16 +1030,6 @@ Der Firehose-Modus ermöglicht es dir, deine Trainingsdaten-Uploads zu maximiere
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -478,10 +478,6 @@ El Modo Firehose te permite maximizar las subidas de datos de entrenamiento para
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation>Se está descargando una actualización del sistema operativo de su dispositivo en segundo plano. Se le pedirá que actualice cuando esté listo para instalarse.</translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation>Unidad NVMe no montada.</translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation>openpilot no pudo identificar su automóvil. Su automóvil no es compatible o no se reconocen sus ECU. Por favor haga un pull request para agregar las versiones de firmware del vehículo adecuado. ¿Necesita ayuda? Únase a discord.comma.ai.</translation>
@ -1036,16 +1032,6 @@ El Modo Firehose te permite maximizar las subidas de datos de entrenamiento para
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation>Graba y almacena el audio del micrófono mientras conduces. El audio se incluirá en el video de la cámara del tablero en comma connect.</translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -472,10 +472,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation>Une mise à jour du système d&apos;exploitation de votre appareil est en cours de téléchargement en arrière-plan. Vous serez invité à effectuer la mise à jour lorsqu&apos;elle sera prête à être installée.</translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation>Le disque NVMe n&apos;est pas monté.</translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation>openpilot n&apos;a pas pu identifier votre voiture. Votre voiture n&apos;est pas supportée ou ses ECUs ne sont pas reconnues. Veuillez soumettre un pull request pour ajouter les versions de firmware au véhicule approprié. Besoin d&apos;aide ? Rejoignez discord.comma.ai.</translation>
@ -1030,16 +1026,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -472,10 +472,6 @@ Firehoseモードを有効にすると学習データを最大限アップロー
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation></translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation>SSDドライブ(NVMe)</translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation>openpilotが車両を識別できませんでしたECUが認識されていない可能性があります discord.comma.ai </translation>
@ -1031,16 +1027,6 @@ Firehoseモードを有効にすると学習データを最大限アップロー
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation> comma connect </translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -472,10 +472,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation> . .</translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation>NVMe .</translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation> . ECU가 . PR을 . discord.comma.ai에 .</translation>
@ -1031,16 +1027,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation> . comma connect의 .</translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -474,10 +474,6 @@ O Modo Firehose permite maximizar o envio de dados de treinamento para melhorar
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation>Uma atualização para o sistema operacional do seu dispositivo está sendo baixada em segundo plano. Você será solicitado a atualizar quando estiver pronto para instalar.</translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation>Unidade NVMe não montada.</translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation>O openpilot não conseguiu identificar o seu carro. Seu carro não é suportado ou seus ECUs não são reconhecidos. Envie um pull request para adicionar as versões de firmware ao veículo adequado. Precisa de ajuda? Junte-se discord.comma.ai.</translation>
@ -1036,18 +1032,6 @@ O Modo Firehose permite maximizar o envio de dados de treinamento para melhorar
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation>Grave e armazene o áudio do microfone enquanto estiver dirigindo. O áudio será incluído ao vídeo dashcam no comma connect.</translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation>Gravar feedback de áudio com o botão LKAS</translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation>Pressione o botão LKAS para gravar e compartilhar feedback de direção com a equipe do openpilot. Quando esta opção estiver desativada, o botão funcionará como um botão de marcador. O evento será destacado no comma connect e o segmento será preservado no armazenamento do seu dispositivo.
Observe que este recurso é compatível apenas com alguns modelos de carros.</translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -472,10 +472,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation> </translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation> NVMe</translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation>openpilot ECU pull request discord.comma.ai</translation>
@ -1027,16 +1023,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -469,10 +469,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation type="unfinished"></translation>
@ -1024,16 +1020,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -472,10 +472,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation></translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation>NVMe固态硬盘未被挂载</translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation>openpilot (ECU) Pull Request discord.comma.ai</translation>
@ -1031,18 +1027,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation> comma connect </translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation>使</translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation> openpilot comma connect
</translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -472,10 +472,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>An update to your device&apos;s operating system is downloading in the background. You will be prompted to update when it&apos;s ready to install.</source>
<translation></translation>
</message>
<message>
<source>NVMe drive not mounted.</source>
<translation>NVMe </translation>
</message>
<message>
<source>openpilot was unable to identify your car. Your car is either unsupported or its ECUs are not recognized. Please submit a pull request to add the firmware versions to the proper vehicle. Need help? Join discord.comma.ai.</source>
<translation>openpilot (ECU) Pull Request discord.comma.ai </translation>
@ -1031,18 +1027,6 @@ Firehose Mode allows you to maximize your training data uploads to improve openp
<source>Record and store microphone audio while driving. The audio will be included in the dashcam video in comma connect.</source>
<translation> comma connect </translation>
</message>
<message>
<source>Record Audio Feedback with LKAS button</source>
<translation>使</translation>
</message>
<message>
<source>Press the LKAS button to record and share driving feedback with the openpilot team. When this toggle is disabled, the button acts as a bookmark button. The event will be highlighted in comma connect and the segment will be preserved on your device&apos;s storage.
Note that this feature is only compatible with select cars.</source>
<translation> openpilot comma connect
</translation>
</message>
</context>
<context>
<name>WiFiPromptWidget</name>

@ -56,29 +56,29 @@
},
{
"name": "boot",
"url": "https://commadist.azureedge.net/agnosupdate/boot-3e26a0d330a1be1614a5c25cae238ca5d01c1be90802ad296c805c3bcbad0e7a.img.xz",
"hash": "3e26a0d330a1be1614a5c25cae238ca5d01c1be90802ad296c805c3bcbad0e7a",
"hash_raw": "3e26a0d330a1be1614a5c25cae238ca5d01c1be90802ad296c805c3bcbad0e7a",
"size": 18515968,
"url": "https://commadist.azureedge.net/agnosupdate/boot-b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb.img.xz",
"hash": "b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb",
"hash_raw": "b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb",
"size": 17442816,
"sparse": false,
"full_check": true,
"has_ab": true,
"ondevice_hash": "41d693d7e752c04210b4f8d68015d2367ee83e1cd54cc7b0aca3b79b4855e6b1"
"ondevice_hash": "8ed6c2796be5c5b29d64e6413b8e878d5bd1a3981d15216d2b5e84140cc4ea2a"
},
{
"name": "system",
"url": "https://commadist.azureedge.net/agnosupdate/system-4d0c7005e242fc757adbefd43b44ab2e78be53ca5145fceb160cc32ecf8d6cc3.img.xz",
"hash": "3596cd5d8a51dabcdd75c29f9317ca3dad9036b1083630ad719eaf584fdb1ce9",
"hash_raw": "4d0c7005e242fc757adbefd43b44ab2e78be53ca5145fceb160cc32ecf8d6cc3",
"size": 5368709120,
"url": "https://commadist.azureedge.net/agnosupdate/system-2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc.img.xz",
"hash": "325414e5c9f7516b2bf0fedb6abe6682f717897a6d84ab70d5afe91a59f244e9",
"hash_raw": "2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc",
"size": 4718592000,
"sparse": true,
"full_check": false,
"has_ab": true,
"ondevice_hash": "32cdbc0ce176e0ea92944e53be875c12374512fa09b6041e42e683519d36591e",
"ondevice_hash": "79f4f6d0b5b4a416f0f31261b430943a78e37c26d0e226e0ef412fe0eae3c727",
"alt": {
"hash": "4d0c7005e242fc757adbefd43b44ab2e78be53ca5145fceb160cc32ecf8d6cc3",
"url": "https://commadist.azureedge.net/agnosupdate/system-4d0c7005e242fc757adbefd43b44ab2e78be53ca5145fceb160cc32ecf8d6cc3.img",
"size": 5368709120
"hash": "2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc",
"url": "https://commadist.azureedge.net/agnosupdate/system-2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc.img",
"size": 4718592000
}
}
]

@ -339,62 +339,62 @@
},
{
"name": "boot",
"url": "https://commadist.azureedge.net/agnosupdate/boot-3e26a0d330a1be1614a5c25cae238ca5d01c1be90802ad296c805c3bcbad0e7a.img.xz",
"hash": "3e26a0d330a1be1614a5c25cae238ca5d01c1be90802ad296c805c3bcbad0e7a",
"hash_raw": "3e26a0d330a1be1614a5c25cae238ca5d01c1be90802ad296c805c3bcbad0e7a",
"size": 18515968,
"url": "https://commadist.azureedge.net/agnosupdate/boot-b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb.img.xz",
"hash": "b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb",
"hash_raw": "b96882012ab6cddda04f440009c798a6cff65977f984b12072e89afa592d86cb",
"size": 17442816,
"sparse": false,
"full_check": true,
"has_ab": true,
"ondevice_hash": "41d693d7e752c04210b4f8d68015d2367ee83e1cd54cc7b0aca3b79b4855e6b1"
"ondevice_hash": "8ed6c2796be5c5b29d64e6413b8e878d5bd1a3981d15216d2b5e84140cc4ea2a"
},
{
"name": "system",
"url": "https://commadist.azureedge.net/agnosupdate/system-4d0c7005e242fc757adbefd43b44ab2e78be53ca5145fceb160cc32ecf8d6cc3.img.xz",
"hash": "3596cd5d8a51dabcdd75c29f9317ca3dad9036b1083630ad719eaf584fdb1ce9",
"hash_raw": "4d0c7005e242fc757adbefd43b44ab2e78be53ca5145fceb160cc32ecf8d6cc3",
"size": 5368709120,
"url": "https://commadist.azureedge.net/agnosupdate/system-2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc.img.xz",
"hash": "325414e5c9f7516b2bf0fedb6abe6682f717897a6d84ab70d5afe91a59f244e9",
"hash_raw": "2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc",
"size": 4718592000,
"sparse": true,
"full_check": false,
"has_ab": true,
"ondevice_hash": "32cdbc0ce176e0ea92944e53be875c12374512fa09b6041e42e683519d36591e",
"ondevice_hash": "79f4f6d0b5b4a416f0f31261b430943a78e37c26d0e226e0ef412fe0eae3c727",
"alt": {
"hash": "4d0c7005e242fc757adbefd43b44ab2e78be53ca5145fceb160cc32ecf8d6cc3",
"url": "https://commadist.azureedge.net/agnosupdate/system-4d0c7005e242fc757adbefd43b44ab2e78be53ca5145fceb160cc32ecf8d6cc3.img",
"size": 5368709120
"hash": "2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc",
"url": "https://commadist.azureedge.net/agnosupdate/system-2b1bb223bf2100376ad5d543bfa4a483f33327b3478ec20ab36048388472c4bc.img",
"size": 4718592000
}
},
{
"name": "userdata_90",
"url": "https://commadist.azureedge.net/agnosupdate/userdata_90-a3695e6b4bade3dd9c2711cd92e93e9ac7744207c2af03b78f0b9a17e89d357f.img.xz",
"hash": "eeb50afb13973d7e54013fdb3ce0f4f396b8608c8325442966cad6b67e39a8d9",
"hash_raw": "a3695e6b4bade3dd9c2711cd92e93e9ac7744207c2af03b78f0b9a17e89d357f",
"url": "https://commadist.azureedge.net/agnosupdate/userdata_90-b3112984d2a8534a83d2ce43d35efdd10c7d163d9699f611f0f72ad9e9cb5af9.img.xz",
"hash": "bea163e6fb6ac6224c7f32619affb5afb834cd859971b0cab6d8297dd0098f0a",
"hash_raw": "b3112984d2a8534a83d2ce43d35efdd10c7d163d9699f611f0f72ad9e9cb5af9",
"size": 96636764160,
"sparse": true,
"full_check": true,
"has_ab": false,
"ondevice_hash": "537088b516805b32b1b4ad176e7f3fc6bc828e296398ce65cbf5f6150fb1a26f"
"ondevice_hash": "f4841c6ae3207197886e5efbd50f44cc24822680d7b785fa2d2743c657f23287"
},
{
"name": "userdata_89",
"url": "https://commadist.azureedge.net/agnosupdate/userdata_89-cbe9979b42b265c9e25a50e876faf5b592fe175aeb5936f2a97b345a6d4e53f5.img.xz",
"hash": "9456a8b117736e6f8eb35cc97fc62ddc8255f38a1be5959a6911498d6aaee08d",
"hash_raw": "cbe9979b42b265c9e25a50e876faf5b592fe175aeb5936f2a97b345a6d4e53f5",
"url": "https://commadist.azureedge.net/agnosupdate/userdata_89-3e63f670e4270474cec96f4da9250ee4e87e3106b0b043b7e82371e1c761e167.img.xz",
"hash": "b5458a29dd7d4a4c9b7ad77b8baa5f804142ac78d97c6668839bf2a650e32518",
"hash_raw": "3e63f670e4270474cec96f4da9250ee4e87e3106b0b043b7e82371e1c761e167",
"size": 95563022336,
"sparse": true,
"full_check": true,
"has_ab": false,
"ondevice_hash": "9e7293cf9a377cb2f3477698e7143e6085a42f7355d7eace5bf9e590992941a8"
"ondevice_hash": "1dc10c542d3b019258fc08dc7dfdb49d9abad065e46d030b89bc1a2e0197f526"
},
{
"name": "userdata_30",
"url": "https://commadist.azureedge.net/agnosupdate/userdata_30-0b25bb660f1c0c4475fc22a32a51cd32bb980f55b95069e6ab56dd8e47f00c31.img.xz",
"hash": "6c5c98c0fec64355ead5dfc9c1902653b4ea9a071e7b968d1ccd36565082f6b7",
"hash_raw": "0b25bb660f1c0c4475fc22a32a51cd32bb980f55b95069e6ab56dd8e47f00c31",
"url": "https://commadist.azureedge.net/agnosupdate/userdata_30-1d3885d4370974e55f0c6f567fd0344fc5ee10db067aa5810fbaf402eadb032c.img.xz",
"hash": "687d178cfc91be5d7e8aa1333405b610fdce01775b8333bd0985b81642b94eea",
"hash_raw": "1d3885d4370974e55f0c6f567fd0344fc5ee10db067aa5810fbaf402eadb032c",
"size": 32212254720,
"sparse": true,
"full_check": true,
"has_ab": false,
"ondevice_hash": "42b5c09a36866d9a52e78b038901669d5bebb02176c498ce11618f200bdfe6b5"
"ondevice_hash": "9ddbd1dae6ee7dc919f018364cf2f29dad138c9203c5a49aea0cbb9bf2e137e5"
}
]

@ -0,0 +1,43 @@
#!/usr/bin/env python3
import json
import subprocess
import cereal.messaging as messaging
from openpilot.common.swaglog import cloudlog
def main():
pm = messaging.PubMaster(['androidLog'])
cmd = ['journalctl', '-f', '-o', 'json']
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE, text=True)
assert proc.stdout is not None
try:
for line in proc.stdout:
line = line.strip()
if not line:
continue
try:
kv = json.loads(line)
except json.JSONDecodeError:
cloudlog.exception("failed to parse journalctl output")
continue
msg = messaging.new_message('androidLog')
entry = msg.androidLog
entry.ts = int(kv.get('__REALTIME_TIMESTAMP', 0))
entry.message = json.dumps(kv)
if '_PID' in kv:
entry.pid = int(kv['_PID'])
if 'PRIORITY' in kv:
entry.priority = int(kv['PRIORITY'])
if 'SYSLOG_IDENTIFIER' in kv:
entry.tag = kv['SYSLOG_IDENTIFIER']
pm.send('androidLog', msg)
finally:
proc.terminate()
proc.wait()
if __name__ == '__main__':
main()

@ -1 +0,0 @@
logcatd

@ -1,3 +0,0 @@
Import('env', 'messaging', 'common')
env.Program('logcatd', 'logcatd_systemd.cc', LIBS=[messaging, common, 'systemd'])

@ -1,75 +0,0 @@
#include <systemd/sd-journal.h>
#include <cassert>
#include <csignal>
#include <map>
#include <string>
#include "third_party/json11/json11.hpp"
#include "cereal/messaging/messaging.h"
#include "common/timing.h"
#include "common/util.h"
ExitHandler do_exit;
int main(int argc, char *argv[]) {
PubMaster pm({"androidLog"});
sd_journal *journal;
int err = sd_journal_open(&journal, 0);
assert(err >= 0);
err = sd_journal_get_fd(journal); // needed so sd_journal_wait() works properly if files rotate
assert(err >= 0);
err = sd_journal_seek_tail(journal);
assert(err >= 0);
// workaround for bug https://github.com/systemd/systemd/issues/9934
// call sd_journal_previous_skip after sd_journal_seek_tail (like journalctl -f does) to makes things work.
sd_journal_previous_skip(journal, 1);
while (!do_exit) {
err = sd_journal_next(journal);
assert(err >= 0);
// Wait for new message if we didn't receive anything
if (err == 0) {
err = sd_journal_wait(journal, 1000 * 1000);
assert(err >= 0);
continue; // Try again
}
uint64_t timestamp = 0;
err = sd_journal_get_realtime_usec(journal, &timestamp);
assert(err >= 0);
const void *data;
size_t length;
std::map<std::string, std::string> kv;
SD_JOURNAL_FOREACH_DATA(journal, data, length) {
std::string str((char*)data, length);
// Split "KEY=VALUE"" on "=" and put in map
std::size_t found = str.find("=");
if (found != std::string::npos) {
kv[str.substr(0, found)] = str.substr(found + 1, std::string::npos);
}
}
MessageBuilder msg;
// Build message
auto androidEntry = msg.initEvent().initAndroidLog();
androidEntry.setTs(timestamp);
androidEntry.setMessage(json11::Json(kv).dump());
if (kv.count("_PID")) androidEntry.setPid(std::atoi(kv["_PID"].c_str()));
if (kv.count("PRIORITY")) androidEntry.setPriority(std::atoi(kv["PRIORITY"].c_str()));
if (kv.count("SYSLOG_IDENTIFIER")) androidEntry.setTag(kv["SYSLOG_IDENTIFIER"]);
pm.send("androidLog", msg);
}
sd_journal_close(journal);
return 0;
}

@ -71,8 +71,8 @@ procs = [
NativeProcess("camerad", "system/camerad", ["./camerad"], driverview, enabled=not WEBCAM),
PythonProcess("webcamerad", "tools.webcam.camerad", driverview, enabled=WEBCAM),
NativeProcess("logcatd", "system/logcatd", ["./logcatd"], only_onroad, platform.system() != "Darwin"),
NativeProcess("proclogd", "system/proclogd", ["./proclogd"], only_onroad, platform.system() != "Darwin"),
PythonProcess("proclogd", "system.proclogd", only_onroad, enabled=platform.system() != "Darwin"),
PythonProcess("journald", "system.journald", only_onroad, platform.system() != "Darwin"),
PythonProcess("micd", "system.micd", iscar),
PythonProcess("timed", "system.timed", always_run, enabled=not PC),
@ -97,7 +97,7 @@ procs = [
PythonProcess("pandad", "selfdrive.pandad.pandad", always_run),
PythonProcess("paramsd", "selfdrive.locationd.paramsd", only_onroad),
PythonProcess("lagd", "selfdrive.locationd.lagd", only_onroad),
NativeProcess("ubloxd", "system/ubloxd", ["./ubloxd"], ublox, enabled=TICI),
PythonProcess("ubloxd", "system.ubloxd.ubloxd", ublox, enabled=TICI),
PythonProcess("pigeond", "system.ubloxd.pigeond", ublox, enabled=TICI),
PythonProcess("plannerd", "selfdrive.controls.plannerd", not_long_maneuver),
PythonProcess("maneuversd", "tools.longitudinal_maneuvers.maneuversd", long_maneuver),

@ -0,0 +1,227 @@
#!/usr/bin/env python3
import os
from typing import NoReturn, TypedDict
from cereal import messaging
from openpilot.common.realtime import Ratekeeper
from openpilot.common.swaglog import cloudlog
JIFFY = os.sysconf(os.sysconf_names['SC_CLK_TCK'])
PAGE_SIZE = os.sysconf(os.sysconf_names['SC_PAGE_SIZE'])
def _cpu_times() -> list[dict[str, float]]:
cpu_times: list[dict[str, float]] = []
try:
with open('/proc/stat') as f:
lines = f.readlines()[1:]
for line in lines:
if not line.startswith('cpu') or len(line) < 4 or not line[3].isdigit():
break
parts = line.split()
cpu_times.append({
'cpuNum': int(parts[0][3:]),
'user': float(parts[1]) / JIFFY,
'nice': float(parts[2]) / JIFFY,
'system': float(parts[3]) / JIFFY,
'idle': float(parts[4]) / JIFFY,
'iowait': float(parts[5]) / JIFFY,
'irq': float(parts[6]) / JIFFY,
'softirq': float(parts[7]) / JIFFY,
})
except Exception:
cloudlog.exception("failed to read /proc/stat")
return cpu_times
def _mem_info() -> dict[str, int]:
keys = ["MemTotal:", "MemFree:", "MemAvailable:", "Buffers:", "Cached:", "Active:", "Inactive:", "Shmem:"]
info: dict[str, int] = dict.fromkeys(keys, 0)
try:
with open('/proc/meminfo') as f:
for line in f:
parts = line.split()
if parts and parts[0] in info:
info[parts[0]] = int(parts[1]) * 1024
except Exception:
cloudlog.exception("failed to read /proc/meminfo")
return info
_STAT_POS = {
'pid': 1,
'state': 3,
'ppid': 4,
'utime': 14,
'stime': 15,
'cutime': 16,
'cstime': 17,
'priority': 18,
'nice': 19,
'num_threads': 20,
'starttime': 22,
'vsize': 23,
'rss': 24,
'processor': 39,
}
class ProcStat(TypedDict):
name: str
pid: int
state: str
ppid: int
utime: int
stime: int
cutime: int
cstime: int
priority: int
nice: int
num_threads: int
starttime: int
vms: int
rss: int
processor: int
def _parse_proc_stat(stat: str) -> ProcStat | None:
open_paren = stat.find('(')
close_paren = stat.rfind(')')
if open_paren == -1 or close_paren == -1 or open_paren > close_paren:
return None
name = stat[open_paren + 1:close_paren]
stat = stat[:open_paren] + stat[open_paren:close_paren].replace(' ', '_') + stat[close_paren:]
parts = stat.split()
if len(parts) < 52:
return None
try:
return {
'name': name,
'pid': int(parts[_STAT_POS['pid'] - 1]),
'state': parts[_STAT_POS['state'] - 1][0],
'ppid': int(parts[_STAT_POS['ppid'] - 1]),
'utime': int(parts[_STAT_POS['utime'] - 1]),
'stime': int(parts[_STAT_POS['stime'] - 1]),
'cutime': int(parts[_STAT_POS['cutime'] - 1]),
'cstime': int(parts[_STAT_POS['cstime'] - 1]),
'priority': int(parts[_STAT_POS['priority'] - 1]),
'nice': int(parts[_STAT_POS['nice'] - 1]),
'num_threads': int(parts[_STAT_POS['num_threads'] - 1]),
'starttime': int(parts[_STAT_POS['starttime'] - 1]),
'vms': int(parts[_STAT_POS['vsize'] - 1]),
'rss': int(parts[_STAT_POS['rss'] - 1]),
'processor': int(parts[_STAT_POS['processor'] - 1]),
}
except Exception:
cloudlog.exception("failed to parse /proc/<pid>/stat")
return None
class ProcExtra(TypedDict):
pid: int
name: str
exe: str
cmdline: list[str]
_proc_cache: dict[int, ProcExtra] = {}
def _get_proc_extra(pid: int, name: str) -> ProcExtra:
cache: ProcExtra | None = _proc_cache.get(pid)
if cache is None or cache.get('name') != name:
exe = ''
cmdline: list[str] = []
try:
exe = os.readlink(f'/proc/{pid}/exe')
except OSError:
pass
try:
with open(f'/proc/{pid}/cmdline', 'rb') as f:
cmdline = [c.decode('utf-8', errors='replace') for c in f.read().split(b'\0') if c]
except OSError:
pass
cache = {'pid': pid, 'name': name, 'exe': exe, 'cmdline': cmdline}
_proc_cache[pid] = cache
return cache
def _procs() -> list[ProcStat]:
stats: list[ProcStat] = []
for pid_str in os.listdir('/proc'):
if not pid_str.isdigit():
continue
try:
with open(f'/proc/{pid_str}/stat') as f:
stat = f.read()
parsed = _parse_proc_stat(stat)
if parsed is not None:
stats.append(parsed)
except OSError:
continue
return stats
def build_proc_log_message(msg) -> None:
pl = msg.procLog
procs = _procs()
l = pl.init('procs', len(procs))
for i, r in enumerate(procs):
proc = l[i]
proc.pid = r['pid']
proc.state = ord(r['state'][0])
proc.ppid = r['ppid']
proc.cpuUser = r['utime'] / JIFFY
proc.cpuSystem = r['stime'] / JIFFY
proc.cpuChildrenUser = r['cutime'] / JIFFY
proc.cpuChildrenSystem = r['cstime'] / JIFFY
proc.priority = r['priority']
proc.nice = r['nice']
proc.numThreads = r['num_threads']
proc.startTime = r['starttime'] / JIFFY
proc.memVms = r['vms']
proc.memRss = r['rss'] * PAGE_SIZE
proc.processor = r['processor']
proc.name = r['name']
extra = _get_proc_extra(r['pid'], r['name'])
proc.exe = extra['exe']
cmdline = proc.init('cmdline', len(extra['cmdline']))
for j, arg in enumerate(extra['cmdline']):
cmdline[j] = arg
cpu_times = _cpu_times()
cpu_list = pl.init('cpuTimes', len(cpu_times))
for i, ct in enumerate(cpu_times):
cpu = cpu_list[i]
cpu.cpuNum = ct['cpuNum']
cpu.user = ct['user']
cpu.nice = ct['nice']
cpu.system = ct['system']
cpu.idle = ct['idle']
cpu.iowait = ct['iowait']
cpu.irq = ct['irq']
cpu.softirq = ct['softirq']
mem_info = _mem_info()
pl.mem.total = mem_info["MemTotal:"]
pl.mem.free = mem_info["MemFree:"]
pl.mem.available = mem_info["MemAvailable:"]
pl.mem.buffers = mem_info["Buffers:"]
pl.mem.cached = mem_info["Cached:"]
pl.mem.active = mem_info["Active:"]
pl.mem.inactive = mem_info["Inactive:"]
pl.mem.shared = mem_info["Shmem:"]
def main() -> NoReturn:
pm = messaging.PubMaster(['procLog'])
rk = Ratekeeper(0.5)
while True:
msg = messaging.new_message('procLog', valid=True)
build_proc_log_message(msg)
pm.send('procLog', msg)
rk.keep_time()
if __name__ == '__main__':
main()

@ -1,6 +0,0 @@
Import('env', 'messaging', 'common')
libs = [messaging, 'pthread', common]
env.Program('proclogd', ['main.cc', 'proclog.cc'], LIBS=libs)
if GetOption('extras'):
env.Program('tests/test_proclog', ['tests/test_proclog.cc', 'proclog.cc'], LIBS=libs)

@ -1,25 +0,0 @@
#include <sys/resource.h>
#include "common/ratekeeper.h"
#include "common/util.h"
#include "system/proclogd/proclog.h"
ExitHandler do_exit;
int main(int argc, char **argv) {
setpriority(PRIO_PROCESS, 0, -15);
RateKeeper rk("proclogd", 0.5);
PubMaster publisher({"procLog"});
while (!do_exit) {
MessageBuilder msg;
buildProcLogMessage(msg);
publisher.send("procLog", msg);
rk.keepTime();
}
return 0;
}

@ -1,239 +0,0 @@
#include "system/proclogd/proclog.h"
#include <dirent.h>
#include <cassert>
#include <fstream>
#include <iterator>
#include <sstream>
#include "common/swaglog.h"
#include "common/util.h"
namespace Parser {
// parse /proc/stat
std::vector<CPUTime> cpuTimes(std::istream &stream) {
std::vector<CPUTime> cpu_times;
std::string line;
// skip the first line for cpu total
std::getline(stream, line);
while (std::getline(stream, line)) {
if (line.compare(0, 3, "cpu") != 0) break;
CPUTime t = {};
std::istringstream iss(line);
if (iss.ignore(3) >> t.id >> t.utime >> t.ntime >> t.stime >> t.itime >> t.iowtime >> t.irqtime >> t.sirqtime)
cpu_times.push_back(t);
}
return cpu_times;
}
// parse /proc/meminfo
std::unordered_map<std::string, uint64_t> memInfo(std::istream &stream) {
std::unordered_map<std::string, uint64_t> mem_info;
std::string line, key;
while (std::getline(stream, line)) {
uint64_t val = 0;
std::istringstream iss(line);
if (iss >> key >> val) {
mem_info[key] = val * 1024;
}
}
return mem_info;
}
// field position (https://man7.org/linux/man-pages/man5/proc.5.html)
enum StatPos {
pid = 1,
state = 3,
ppid = 4,
utime = 14,
stime = 15,
cutime = 16,
cstime = 17,
priority = 18,
nice = 19,
num_threads = 20,
starttime = 22,
vsize = 23,
rss = 24,
processor = 39,
MAX_FIELD = 52,
};
// parse /proc/pid/stat
std::optional<ProcStat> procStat(std::string stat) {
// To avoid being fooled by names containing a closing paren, scan backwards.
auto open_paren = stat.find('(');
auto close_paren = stat.rfind(')');
if (open_paren == std::string::npos || close_paren == std::string::npos || open_paren > close_paren) {
return std::nullopt;
}
std::string name = stat.substr(open_paren + 1, close_paren - open_paren - 1);
// replace space in name with _
std::replace(&stat[open_paren], &stat[close_paren], ' ', '_');
std::istringstream iss(stat);
std::vector<std::string> v{std::istream_iterator<std::string>(iss),
std::istream_iterator<std::string>()};
try {
if (v.size() != StatPos::MAX_FIELD) {
throw std::invalid_argument("stat");
}
ProcStat p = {
.name = name,
.pid = stoi(v[StatPos::pid - 1]),
.state = v[StatPos::state - 1][0],
.ppid = stoi(v[StatPos::ppid - 1]),
.utime = stoul(v[StatPos::utime - 1]),
.stime = stoul(v[StatPos::stime - 1]),
.cutime = stol(v[StatPos::cutime - 1]),
.cstime = stol(v[StatPos::cstime - 1]),
.priority = stol(v[StatPos::priority - 1]),
.nice = stol(v[StatPos::nice - 1]),
.num_threads = stol(v[StatPos::num_threads - 1]),
.starttime = stoull(v[StatPos::starttime - 1]),
.vms = stoul(v[StatPos::vsize - 1]),
.rss = stol(v[StatPos::rss - 1]),
.processor = stoi(v[StatPos::processor - 1]),
};
return p;
} catch (const std::invalid_argument &e) {
LOGE("failed to parse procStat (%s) :%s", e.what(), stat.c_str());
} catch (const std::out_of_range &e) {
LOGE("failed to parse procStat (%s) :%s", e.what(), stat.c_str());
}
return std::nullopt;
}
// return list of PIDs from /proc
std::vector<int> pids() {
std::vector<int> ids;
DIR *d = opendir("/proc");
assert(d);
char *p_end;
struct dirent *de = NULL;
while ((de = readdir(d))) {
if (de->d_type == DT_DIR) {
int pid = strtol(de->d_name, &p_end, 10);
if (p_end == (de->d_name + strlen(de->d_name))) {
ids.push_back(pid);
}
}
}
closedir(d);
return ids;
}
// null-delimited cmdline arguments to vector
std::vector<std::string> cmdline(std::istream &stream) {
std::vector<std::string> ret;
std::string line;
while (std::getline(stream, line, '\0')) {
if (!line.empty()) {
ret.push_back(line);
}
}
return ret;
}
const ProcCache &getProcExtraInfo(int pid, const std::string &name) {
static std::unordered_map<pid_t, ProcCache> proc_cache;
ProcCache &cache = proc_cache[pid];
if (cache.pid != pid || cache.name != name) {
cache.pid = pid;
cache.name = name;
std::string proc_path = "/proc/" + std::to_string(pid);
cache.exe = util::readlink(proc_path + "/exe");
std::ifstream stream(proc_path + "/cmdline");
cache.cmdline = cmdline(stream);
}
return cache;
}
} // namespace Parser
const double jiffy = sysconf(_SC_CLK_TCK);
const size_t page_size = sysconf(_SC_PAGE_SIZE);
void buildCPUTimes(cereal::ProcLog::Builder &builder) {
std::ifstream stream("/proc/stat");
std::vector<CPUTime> stats = Parser::cpuTimes(stream);
auto log_cpu_times = builder.initCpuTimes(stats.size());
for (int i = 0; i < stats.size(); ++i) {
auto l = log_cpu_times[i];
const CPUTime &r = stats[i];
l.setCpuNum(r.id);
l.setUser(r.utime / jiffy);
l.setNice(r.ntime / jiffy);
l.setSystem(r.stime / jiffy);
l.setIdle(r.itime / jiffy);
l.setIowait(r.iowtime / jiffy);
l.setIrq(r.irqtime / jiffy);
l.setSoftirq(r.sirqtime / jiffy);
}
}
void buildMemInfo(cereal::ProcLog::Builder &builder) {
std::ifstream stream("/proc/meminfo");
auto mem_info = Parser::memInfo(stream);
auto mem = builder.initMem();
mem.setTotal(mem_info["MemTotal:"]);
mem.setFree(mem_info["MemFree:"]);
mem.setAvailable(mem_info["MemAvailable:"]);
mem.setBuffers(mem_info["Buffers:"]);
mem.setCached(mem_info["Cached:"]);
mem.setActive(mem_info["Active:"]);
mem.setInactive(mem_info["Inactive:"]);
mem.setShared(mem_info["Shmem:"]);
}
void buildProcs(cereal::ProcLog::Builder &builder) {
auto pids = Parser::pids();
std::vector<ProcStat> proc_stats;
proc_stats.reserve(pids.size());
for (int pid : pids) {
std::string path = "/proc/" + std::to_string(pid) + "/stat";
if (auto stat = Parser::procStat(util::read_file(path))) {
proc_stats.push_back(*stat);
}
}
auto procs = builder.initProcs(proc_stats.size());
for (size_t i = 0; i < proc_stats.size(); i++) {
auto l = procs[i];
const ProcStat &r = proc_stats[i];
l.setPid(r.pid);
l.setState(r.state);
l.setPpid(r.ppid);
l.setCpuUser(r.utime / jiffy);
l.setCpuSystem(r.stime / jiffy);
l.setCpuChildrenUser(r.cutime / jiffy);
l.setCpuChildrenSystem(r.cstime / jiffy);
l.setPriority(r.priority);
l.setNice(r.nice);
l.setNumThreads(r.num_threads);
l.setStartTime(r.starttime / jiffy);
l.setMemVms(r.vms);
l.setMemRss((uint64_t)r.rss * page_size);
l.setProcessor(r.processor);
l.setName(r.name);
const ProcCache &extra_info = Parser::getProcExtraInfo(r.pid, r.name);
l.setExe(extra_info.exe);
auto lcmdline = l.initCmdline(extra_info.cmdline.size());
for (size_t j = 0; j < lcmdline.size(); j++) {
lcmdline.set(j, extra_info.cmdline[j]);
}
}
}
void buildProcLogMessage(MessageBuilder &msg) {
auto procLog = msg.initEvent().initProcLog();
buildProcs(procLog);
buildCPUTimes(procLog);
buildMemInfo(procLog);
}

@ -1,40 +0,0 @@
#include <optional>
#include <string>
#include <unordered_map>
#include <vector>
#include "cereal/messaging/messaging.h"
struct CPUTime {
int id;
unsigned long utime, ntime, stime, itime;
unsigned long iowtime, irqtime, sirqtime;
};
struct ProcCache {
int pid;
std::string name, exe;
std::vector<std::string> cmdline;
};
struct ProcStat {
int pid, ppid, processor;
char state;
long cutime, cstime, priority, nice, num_threads, rss;
unsigned long utime, stime, vms;
unsigned long long starttime;
std::string name;
};
namespace Parser {
std::vector<int> pids();
std::optional<ProcStat> procStat(std::string stat);
std::vector<std::string> cmdline(std::istream &stream);
std::vector<CPUTime> cpuTimes(std::istream &stream);
std::unordered_map<std::string, uint64_t> memInfo(std::istream &stream);
const ProcCache &getProcExtraInfo(int pid, const std::string &name);
}; // namespace Parser
void buildProcLogMessage(MessageBuilder &msg);

@ -1 +0,0 @@
test_proclog

@ -1,142 +0,0 @@
#define CATCH_CONFIG_MAIN
#include "catch2/catch.hpp"
#include "common/util.h"
#include "system/proclogd/proclog.h"
const std::string allowed_states = "RSDTZtWXxKWPI";
TEST_CASE("Parser::procStat") {
SECTION("from string") {
const std::string stat_str =
"33012 (code )) S 32978 6620 6620 0 -1 4194368 2042377 0 144 0 24510 11627 0 "
"0 20 0 39 0 53077 830029824 62214 18446744073709551615 94257242783744 94257366235808 "
"140735738643248 0 0 0 0 4098 1073808632 0 0 0 17 2 0 0 2 0 0 94257370858656 94257371248232 "
"94257404952576 140735738648768 140735738648823 140735738648823 140735738650595 0";
auto stat = Parser::procStat(stat_str);
REQUIRE(stat);
REQUIRE(stat->pid == 33012);
REQUIRE(stat->name == "code )");
REQUIRE(stat->state == 'S');
REQUIRE(stat->ppid == 32978);
REQUIRE(stat->utime == 24510);
REQUIRE(stat->stime == 11627);
REQUIRE(stat->cutime == 0);
REQUIRE(stat->cstime == 0);
REQUIRE(stat->priority == 20);
REQUIRE(stat->nice == 0);
REQUIRE(stat->num_threads == 39);
REQUIRE(stat->starttime == 53077);
REQUIRE(stat->vms == 830029824);
REQUIRE(stat->rss == 62214);
REQUIRE(stat->processor == 2);
}
SECTION("all processes") {
std::vector<int> pids = Parser::pids();
REQUIRE(pids.size() > 1);
for (int pid : pids) {
std::string stat_path = "/proc/" + std::to_string(pid) + "/stat";
INFO(stat_path);
if (auto stat = Parser::procStat(util::read_file(stat_path))) {
REQUIRE(stat->pid == pid);
REQUIRE(allowed_states.find(stat->state) != std::string::npos);
} else {
REQUIRE(util::file_exists(stat_path) == false);
}
}
}
}
TEST_CASE("Parser::cpuTimes") {
SECTION("from string") {
std::string stat =
"cpu 0 0 0 0 0 0 0 0 0 0\n"
"cpu0 1 2 3 4 5 6 7 8 9 10\n"
"cpu1 1 2 3 4 5 6 7 8 9 10\n";
std::istringstream stream(stat);
auto stats = Parser::cpuTimes(stream);
REQUIRE(stats.size() == 2);
for (int i = 0; i < stats.size(); ++i) {
REQUIRE(stats[i].id == i);
REQUIRE(stats[i].utime == 1);
REQUIRE(stats[i].ntime ==2);
REQUIRE(stats[i].stime == 3);
REQUIRE(stats[i].itime == 4);
REQUIRE(stats[i].iowtime == 5);
REQUIRE(stats[i].irqtime == 6);
REQUIRE(stats[i].sirqtime == 7);
}
}
SECTION("all cpus") {
std::istringstream stream(util::read_file("/proc/stat"));
auto stats = Parser::cpuTimes(stream);
REQUIRE(stats.size() == sysconf(_SC_NPROCESSORS_ONLN));
for (int i = 0; i < stats.size(); ++i) {
REQUIRE(stats[i].id == i);
}
}
}
TEST_CASE("Parser::memInfo") {
SECTION("from string") {
std::istringstream stream("MemTotal: 1024 kb\nMemFree: 2048 kb\n");
auto meminfo = Parser::memInfo(stream);
REQUIRE(meminfo["MemTotal:"] == 1024 * 1024);
REQUIRE(meminfo["MemFree:"] == 2048 * 1024);
}
SECTION("from /proc/meminfo") {
std::string require_keys[] = {"MemTotal:", "MemFree:", "MemAvailable:", "Buffers:", "Cached:", "Active:", "Inactive:", "Shmem:"};
std::istringstream stream(util::read_file("/proc/meminfo"));
auto meminfo = Parser::memInfo(stream);
for (auto &key : require_keys) {
REQUIRE(meminfo.find(key) != meminfo.end());
REQUIRE(meminfo[key] > 0);
}
}
}
void test_cmdline(std::string cmdline, const std::vector<std::string> requires) {
std::stringstream ss;
ss.write(&cmdline[0], cmdline.size());
auto cmds = Parser::cmdline(ss);
REQUIRE(cmds.size() == requires.size());
for (int i = 0; i < requires.size(); ++i) {
REQUIRE(cmds[i] == requires[i]);
}
}
TEST_CASE("Parser::cmdline") {
test_cmdline(std::string("a\0b\0c\0", 7), {"a", "b", "c"});
test_cmdline(std::string("a\0\0c\0", 6), {"a", "c"});
test_cmdline(std::string("a\0b\0c\0\0\0", 9), {"a", "b", "c"});
}
TEST_CASE("buildProcLoggerMessage") {
MessageBuilder msg;
buildProcLogMessage(msg);
kj::Array<capnp::word> buf = capnp::messageToFlatArray(msg);
capnp::FlatArrayMessageReader reader(buf);
auto log = reader.getRoot<cereal::Event>().getProcLog();
REQUIRE(log.totalSize().wordCount > 0);
// test cereal::ProcLog::CPUTimes
auto cpu_times = log.getCpuTimes();
REQUIRE(cpu_times.size() == sysconf(_SC_NPROCESSORS_ONLN));
REQUIRE(cpu_times[cpu_times.size() - 1].getCpuNum() == cpu_times.size() - 1);
// test cereal::ProcLog::Mem
auto mem = log.getMem();
REQUIRE(mem.getTotal() > 0);
REQUIRE(mem.getShared() > 0);
// test cereal::ProcLog::Process
auto procs = log.getProcs();
for (auto p : procs) {
REQUIRE(allowed_states.find(p.getState()) != std::string::npos);
if (p.getPid() == ::getpid()) {
REQUIRE(p.getName() == "test_proclog");
REQUIRE(p.getState() == 'R');
REQUIRE_THAT(p.getExe().cStr(), Catch::Matchers::Contains("test_proclog"));
REQUIRE_THAT(p.getCmdline()[0], Catch::Matchers::Contains("test_proclog"));
}
}
}

@ -1,2 +0,0 @@
ubloxd
tests/test_glonass_runner

@ -1,20 +1,11 @@
Import('env', 'common', 'messaging')
loc_libs = [messaging, common, 'kaitai', 'pthread']
Import('env')
if GetOption('kaitai'):
generated = Dir('generated').srcnode().abspath
cmd = f"kaitai-struct-compiler --target cpp_stl --outdir {generated} $SOURCES"
env.Command(['generated/ubx.cpp', 'generated/ubx.h'], 'ubx.ksy', cmd)
env.Command(['generated/gps.cpp', 'generated/gps.h'], 'gps.ksy', cmd)
glonass = env.Command(['generated/glonass.cpp', 'generated/glonass.h'], 'glonass.ksy', cmd)
current_dir = Dir('./generated/').srcnode().abspath
python_cmd = f"kaitai-struct-compiler --target python --outdir {current_dir} $SOURCES"
env.Command(File('./generated/ubx.py'), 'ubx.ksy', python_cmd)
env.Command(File('./generated/gps.py'), 'gps.ksy', python_cmd)
env.Command(File('./generated/glonass.py'), 'glonass.ksy', python_cmd)
# kaitai issue: https://github.com/kaitai-io/kaitai_struct/issues/910
patch = env.Command(None, 'glonass_fix.patch', 'git apply $SOURCES')
env.Depends(patch, glonass)
glonass_obj = env.Object('generated/glonass.cpp')
env.Program("ubloxd", ["ubloxd.cc", "ublox_msg.cc", "generated/ubx.cpp", "generated/gps.cpp", glonass_obj], LIBS=loc_libs)
if GetOption('extras'):
env.Program("tests/test_glonass_runner", ['tests/test_glonass_runner.cc', 'tests/test_glonass_kaitai.cc', glonass_obj], LIBS=[loc_libs])
py_glonass_fix = env.Command(None, File('./generated/glonass.py'), "sed -i 's/self._io.align_to_byte()/# self._io.align_to_byte()/' $SOURCES")
env.Depends(py_glonass_fix, File('./generated/glonass.py'))

@ -1,353 +0,0 @@
// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
#include "glonass.h"
glonass_t::glonass_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = this;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void glonass_t::_read() {
m_idle_chip = m__io->read_bits_int_be(1);
m_string_number = m__io->read_bits_int_be(4);
//m__io->align_to_byte();
switch (string_number()) {
case 4: {
m_data = new string_4_t(m__io, this, m__root);
break;
}
case 1: {
m_data = new string_1_t(m__io, this, m__root);
break;
}
case 3: {
m_data = new string_3_t(m__io, this, m__root);
break;
}
case 5: {
m_data = new string_5_t(m__io, this, m__root);
break;
}
case 2: {
m_data = new string_2_t(m__io, this, m__root);
break;
}
default: {
m_data = new string_non_immediate_t(m__io, this, m__root);
break;
}
}
m_hamming_code = m__io->read_bits_int_be(8);
m_pad_1 = m__io->read_bits_int_be(11);
m_superframe_number = m__io->read_bits_int_be(16);
m_pad_2 = m__io->read_bits_int_be(8);
m_frame_number = m__io->read_bits_int_be(8);
}
glonass_t::~glonass_t() {
_clean_up();
}
void glonass_t::_clean_up() {
if (m_data) {
delete m_data; m_data = 0;
}
}
glonass_t::string_4_t::string_4_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
f_tau_n = false;
f_delta_tau_n = false;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void glonass_t::string_4_t::_read() {
m_tau_n_sign = m__io->read_bits_int_be(1);
m_tau_n_value = m__io->read_bits_int_be(21);
m_delta_tau_n_sign = m__io->read_bits_int_be(1);
m_delta_tau_n_value = m__io->read_bits_int_be(4);
m_e_n = m__io->read_bits_int_be(5);
m_not_used_1 = m__io->read_bits_int_be(14);
m_p4 = m__io->read_bits_int_be(1);
m_f_t = m__io->read_bits_int_be(4);
m_not_used_2 = m__io->read_bits_int_be(3);
m_n_t = m__io->read_bits_int_be(11);
m_n = m__io->read_bits_int_be(5);
m_m = m__io->read_bits_int_be(2);
}
glonass_t::string_4_t::~string_4_t() {
_clean_up();
}
void glonass_t::string_4_t::_clean_up() {
}
int32_t glonass_t::string_4_t::tau_n() {
if (f_tau_n)
return m_tau_n;
m_tau_n = ((tau_n_sign()) ? ((tau_n_value() * -1)) : (tau_n_value()));
f_tau_n = true;
return m_tau_n;
}
int32_t glonass_t::string_4_t::delta_tau_n() {
if (f_delta_tau_n)
return m_delta_tau_n;
m_delta_tau_n = ((delta_tau_n_sign()) ? ((delta_tau_n_value() * -1)) : (delta_tau_n_value()));
f_delta_tau_n = true;
return m_delta_tau_n;
}
glonass_t::string_non_immediate_t::string_non_immediate_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void glonass_t::string_non_immediate_t::_read() {
m_data_1 = m__io->read_bits_int_be(64);
m_data_2 = m__io->read_bits_int_be(8);
}
glonass_t::string_non_immediate_t::~string_non_immediate_t() {
_clean_up();
}
void glonass_t::string_non_immediate_t::_clean_up() {
}
glonass_t::string_5_t::string_5_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void glonass_t::string_5_t::_read() {
m_n_a = m__io->read_bits_int_be(11);
m_tau_c = m__io->read_bits_int_be(32);
m_not_used = m__io->read_bits_int_be(1);
m_n_4 = m__io->read_bits_int_be(5);
m_tau_gps = m__io->read_bits_int_be(22);
m_l_n = m__io->read_bits_int_be(1);
}
glonass_t::string_5_t::~string_5_t() {
_clean_up();
}
void glonass_t::string_5_t::_clean_up() {
}
glonass_t::string_1_t::string_1_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
f_x_vel = false;
f_x_accel = false;
f_x = false;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void glonass_t::string_1_t::_read() {
m_not_used = m__io->read_bits_int_be(2);
m_p1 = m__io->read_bits_int_be(2);
m_t_k = m__io->read_bits_int_be(12);
m_x_vel_sign = m__io->read_bits_int_be(1);
m_x_vel_value = m__io->read_bits_int_be(23);
m_x_accel_sign = m__io->read_bits_int_be(1);
m_x_accel_value = m__io->read_bits_int_be(4);
m_x_sign = m__io->read_bits_int_be(1);
m_x_value = m__io->read_bits_int_be(26);
}
glonass_t::string_1_t::~string_1_t() {
_clean_up();
}
void glonass_t::string_1_t::_clean_up() {
}
int32_t glonass_t::string_1_t::x_vel() {
if (f_x_vel)
return m_x_vel;
m_x_vel = ((x_vel_sign()) ? ((x_vel_value() * -1)) : (x_vel_value()));
f_x_vel = true;
return m_x_vel;
}
int32_t glonass_t::string_1_t::x_accel() {
if (f_x_accel)
return m_x_accel;
m_x_accel = ((x_accel_sign()) ? ((x_accel_value() * -1)) : (x_accel_value()));
f_x_accel = true;
return m_x_accel;
}
int32_t glonass_t::string_1_t::x() {
if (f_x)
return m_x;
m_x = ((x_sign()) ? ((x_value() * -1)) : (x_value()));
f_x = true;
return m_x;
}
glonass_t::string_2_t::string_2_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
f_y_vel = false;
f_y_accel = false;
f_y = false;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void glonass_t::string_2_t::_read() {
m_b_n = m__io->read_bits_int_be(3);
m_p2 = m__io->read_bits_int_be(1);
m_t_b = m__io->read_bits_int_be(7);
m_not_used = m__io->read_bits_int_be(5);
m_y_vel_sign = m__io->read_bits_int_be(1);
m_y_vel_value = m__io->read_bits_int_be(23);
m_y_accel_sign = m__io->read_bits_int_be(1);
m_y_accel_value = m__io->read_bits_int_be(4);
m_y_sign = m__io->read_bits_int_be(1);
m_y_value = m__io->read_bits_int_be(26);
}
glonass_t::string_2_t::~string_2_t() {
_clean_up();
}
void glonass_t::string_2_t::_clean_up() {
}
int32_t glonass_t::string_2_t::y_vel() {
if (f_y_vel)
return m_y_vel;
m_y_vel = ((y_vel_sign()) ? ((y_vel_value() * -1)) : (y_vel_value()));
f_y_vel = true;
return m_y_vel;
}
int32_t glonass_t::string_2_t::y_accel() {
if (f_y_accel)
return m_y_accel;
m_y_accel = ((y_accel_sign()) ? ((y_accel_value() * -1)) : (y_accel_value()));
f_y_accel = true;
return m_y_accel;
}
int32_t glonass_t::string_2_t::y() {
if (f_y)
return m_y;
m_y = ((y_sign()) ? ((y_value() * -1)) : (y_value()));
f_y = true;
return m_y;
}
glonass_t::string_3_t::string_3_t(kaitai::kstream* p__io, glonass_t* p__parent, glonass_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
f_gamma_n = false;
f_z_vel = false;
f_z_accel = false;
f_z = false;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void glonass_t::string_3_t::_read() {
m_p3 = m__io->read_bits_int_be(1);
m_gamma_n_sign = m__io->read_bits_int_be(1);
m_gamma_n_value = m__io->read_bits_int_be(10);
m_not_used = m__io->read_bits_int_be(1);
m_p = m__io->read_bits_int_be(2);
m_l_n = m__io->read_bits_int_be(1);
m_z_vel_sign = m__io->read_bits_int_be(1);
m_z_vel_value = m__io->read_bits_int_be(23);
m_z_accel_sign = m__io->read_bits_int_be(1);
m_z_accel_value = m__io->read_bits_int_be(4);
m_z_sign = m__io->read_bits_int_be(1);
m_z_value = m__io->read_bits_int_be(26);
}
glonass_t::string_3_t::~string_3_t() {
_clean_up();
}
void glonass_t::string_3_t::_clean_up() {
}
int32_t glonass_t::string_3_t::gamma_n() {
if (f_gamma_n)
return m_gamma_n;
m_gamma_n = ((gamma_n_sign()) ? ((gamma_n_value() * -1)) : (gamma_n_value()));
f_gamma_n = true;
return m_gamma_n;
}
int32_t glonass_t::string_3_t::z_vel() {
if (f_z_vel)
return m_z_vel;
m_z_vel = ((z_vel_sign()) ? ((z_vel_value() * -1)) : (z_vel_value()));
f_z_vel = true;
return m_z_vel;
}
int32_t glonass_t::string_3_t::z_accel() {
if (f_z_accel)
return m_z_accel;
m_z_accel = ((z_accel_sign()) ? ((z_accel_value() * -1)) : (z_accel_value()));
f_z_accel = true;
return m_z_accel;
}
int32_t glonass_t::string_3_t::z() {
if (f_z)
return m_z;
m_z = ((z_sign()) ? ((z_value() * -1)) : (z_value()));
f_z = true;
return m_z;
}

@ -1,375 +0,0 @@
#ifndef GLONASS_H_
#define GLONASS_H_
// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
#include "kaitai/kaitaistruct.h"
#include <stdint.h>
#if KAITAI_STRUCT_VERSION < 9000L
#error "Incompatible Kaitai Struct C++/STL API: version 0.9 or later is required"
#endif
class glonass_t : public kaitai::kstruct {
public:
class string_4_t;
class string_non_immediate_t;
class string_5_t;
class string_1_t;
class string_2_t;
class string_3_t;
glonass_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent = 0, glonass_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~glonass_t();
class string_4_t : public kaitai::kstruct {
public:
string_4_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~string_4_t();
private:
bool f_tau_n;
int32_t m_tau_n;
public:
int32_t tau_n();
private:
bool f_delta_tau_n;
int32_t m_delta_tau_n;
public:
int32_t delta_tau_n();
private:
bool m_tau_n_sign;
uint64_t m_tau_n_value;
bool m_delta_tau_n_sign;
uint64_t m_delta_tau_n_value;
uint64_t m_e_n;
uint64_t m_not_used_1;
bool m_p4;
uint64_t m_f_t;
uint64_t m_not_used_2;
uint64_t m_n_t;
uint64_t m_n;
uint64_t m_m;
glonass_t* m__root;
glonass_t* m__parent;
public:
bool tau_n_sign() const { return m_tau_n_sign; }
uint64_t tau_n_value() const { return m_tau_n_value; }
bool delta_tau_n_sign() const { return m_delta_tau_n_sign; }
uint64_t delta_tau_n_value() const { return m_delta_tau_n_value; }
uint64_t e_n() const { return m_e_n; }
uint64_t not_used_1() const { return m_not_used_1; }
bool p4() const { return m_p4; }
uint64_t f_t() const { return m_f_t; }
uint64_t not_used_2() const { return m_not_used_2; }
uint64_t n_t() const { return m_n_t; }
uint64_t n() const { return m_n; }
uint64_t m() const { return m_m; }
glonass_t* _root() const { return m__root; }
glonass_t* _parent() const { return m__parent; }
};
class string_non_immediate_t : public kaitai::kstruct {
public:
string_non_immediate_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~string_non_immediate_t();
private:
uint64_t m_data_1;
uint64_t m_data_2;
glonass_t* m__root;
glonass_t* m__parent;
public:
uint64_t data_1() const { return m_data_1; }
uint64_t data_2() const { return m_data_2; }
glonass_t* _root() const { return m__root; }
glonass_t* _parent() const { return m__parent; }
};
class string_5_t : public kaitai::kstruct {
public:
string_5_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~string_5_t();
private:
uint64_t m_n_a;
uint64_t m_tau_c;
bool m_not_used;
uint64_t m_n_4;
uint64_t m_tau_gps;
bool m_l_n;
glonass_t* m__root;
glonass_t* m__parent;
public:
uint64_t n_a() const { return m_n_a; }
uint64_t tau_c() const { return m_tau_c; }
bool not_used() const { return m_not_used; }
uint64_t n_4() const { return m_n_4; }
uint64_t tau_gps() const { return m_tau_gps; }
bool l_n() const { return m_l_n; }
glonass_t* _root() const { return m__root; }
glonass_t* _parent() const { return m__parent; }
};
class string_1_t : public kaitai::kstruct {
public:
string_1_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~string_1_t();
private:
bool f_x_vel;
int32_t m_x_vel;
public:
int32_t x_vel();
private:
bool f_x_accel;
int32_t m_x_accel;
public:
int32_t x_accel();
private:
bool f_x;
int32_t m_x;
public:
int32_t x();
private:
uint64_t m_not_used;
uint64_t m_p1;
uint64_t m_t_k;
bool m_x_vel_sign;
uint64_t m_x_vel_value;
bool m_x_accel_sign;
uint64_t m_x_accel_value;
bool m_x_sign;
uint64_t m_x_value;
glonass_t* m__root;
glonass_t* m__parent;
public:
uint64_t not_used() const { return m_not_used; }
uint64_t p1() const { return m_p1; }
uint64_t t_k() const { return m_t_k; }
bool x_vel_sign() const { return m_x_vel_sign; }
uint64_t x_vel_value() const { return m_x_vel_value; }
bool x_accel_sign() const { return m_x_accel_sign; }
uint64_t x_accel_value() const { return m_x_accel_value; }
bool x_sign() const { return m_x_sign; }
uint64_t x_value() const { return m_x_value; }
glonass_t* _root() const { return m__root; }
glonass_t* _parent() const { return m__parent; }
};
class string_2_t : public kaitai::kstruct {
public:
string_2_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~string_2_t();
private:
bool f_y_vel;
int32_t m_y_vel;
public:
int32_t y_vel();
private:
bool f_y_accel;
int32_t m_y_accel;
public:
int32_t y_accel();
private:
bool f_y;
int32_t m_y;
public:
int32_t y();
private:
uint64_t m_b_n;
bool m_p2;
uint64_t m_t_b;
uint64_t m_not_used;
bool m_y_vel_sign;
uint64_t m_y_vel_value;
bool m_y_accel_sign;
uint64_t m_y_accel_value;
bool m_y_sign;
uint64_t m_y_value;
glonass_t* m__root;
glonass_t* m__parent;
public:
uint64_t b_n() const { return m_b_n; }
bool p2() const { return m_p2; }
uint64_t t_b() const { return m_t_b; }
uint64_t not_used() const { return m_not_used; }
bool y_vel_sign() const { return m_y_vel_sign; }
uint64_t y_vel_value() const { return m_y_vel_value; }
bool y_accel_sign() const { return m_y_accel_sign; }
uint64_t y_accel_value() const { return m_y_accel_value; }
bool y_sign() const { return m_y_sign; }
uint64_t y_value() const { return m_y_value; }
glonass_t* _root() const { return m__root; }
glonass_t* _parent() const { return m__parent; }
};
class string_3_t : public kaitai::kstruct {
public:
string_3_t(kaitai::kstream* p__io, glonass_t* p__parent = 0, glonass_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~string_3_t();
private:
bool f_gamma_n;
int32_t m_gamma_n;
public:
int32_t gamma_n();
private:
bool f_z_vel;
int32_t m_z_vel;
public:
int32_t z_vel();
private:
bool f_z_accel;
int32_t m_z_accel;
public:
int32_t z_accel();
private:
bool f_z;
int32_t m_z;
public:
int32_t z();
private:
bool m_p3;
bool m_gamma_n_sign;
uint64_t m_gamma_n_value;
bool m_not_used;
uint64_t m_p;
bool m_l_n;
bool m_z_vel_sign;
uint64_t m_z_vel_value;
bool m_z_accel_sign;
uint64_t m_z_accel_value;
bool m_z_sign;
uint64_t m_z_value;
glonass_t* m__root;
glonass_t* m__parent;
public:
bool p3() const { return m_p3; }
bool gamma_n_sign() const { return m_gamma_n_sign; }
uint64_t gamma_n_value() const { return m_gamma_n_value; }
bool not_used() const { return m_not_used; }
uint64_t p() const { return m_p; }
bool l_n() const { return m_l_n; }
bool z_vel_sign() const { return m_z_vel_sign; }
uint64_t z_vel_value() const { return m_z_vel_value; }
bool z_accel_sign() const { return m_z_accel_sign; }
uint64_t z_accel_value() const { return m_z_accel_value; }
bool z_sign() const { return m_z_sign; }
uint64_t z_value() const { return m_z_value; }
glonass_t* _root() const { return m__root; }
glonass_t* _parent() const { return m__parent; }
};
private:
bool m_idle_chip;
uint64_t m_string_number;
kaitai::kstruct* m_data;
uint64_t m_hamming_code;
uint64_t m_pad_1;
uint64_t m_superframe_number;
uint64_t m_pad_2;
uint64_t m_frame_number;
glonass_t* m__root;
kaitai::kstruct* m__parent;
public:
bool idle_chip() const { return m_idle_chip; }
uint64_t string_number() const { return m_string_number; }
kaitai::kstruct* data() const { return m_data; }
uint64_t hamming_code() const { return m_hamming_code; }
uint64_t pad_1() const { return m_pad_1; }
uint64_t superframe_number() const { return m_superframe_number; }
uint64_t pad_2() const { return m_pad_2; }
uint64_t frame_number() const { return m_frame_number; }
glonass_t* _root() const { return m__root; }
kaitai::kstruct* _parent() const { return m__parent; }
};
#endif // GLONASS_H_

@ -0,0 +1,247 @@
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Glonass(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.idle_chip = self._io.read_bits_int_be(1) != 0
self.string_number = self._io.read_bits_int_be(4)
# workaround for kaitai bit alignment issue (see glonass_fix.patch for C++)
# self._io.align_to_byte()
_on = self.string_number
if _on == 4:
self.data = Glonass.String4(self._io, self, self._root)
elif _on == 1:
self.data = Glonass.String1(self._io, self, self._root)
elif _on == 3:
self.data = Glonass.String3(self._io, self, self._root)
elif _on == 5:
self.data = Glonass.String5(self._io, self, self._root)
elif _on == 2:
self.data = Glonass.String2(self._io, self, self._root)
else:
self.data = Glonass.StringNonImmediate(self._io, self, self._root)
self.hamming_code = self._io.read_bits_int_be(8)
self.pad_1 = self._io.read_bits_int_be(11)
self.superframe_number = self._io.read_bits_int_be(16)
self.pad_2 = self._io.read_bits_int_be(8)
self.frame_number = self._io.read_bits_int_be(8)
class String4(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.tau_n_sign = self._io.read_bits_int_be(1) != 0
self.tau_n_value = self._io.read_bits_int_be(21)
self.delta_tau_n_sign = self._io.read_bits_int_be(1) != 0
self.delta_tau_n_value = self._io.read_bits_int_be(4)
self.e_n = self._io.read_bits_int_be(5)
self.not_used_1 = self._io.read_bits_int_be(14)
self.p4 = self._io.read_bits_int_be(1) != 0
self.f_t = self._io.read_bits_int_be(4)
self.not_used_2 = self._io.read_bits_int_be(3)
self.n_t = self._io.read_bits_int_be(11)
self.n = self._io.read_bits_int_be(5)
self.m = self._io.read_bits_int_be(2)
@property
def tau_n(self):
if hasattr(self, '_m_tau_n'):
return self._m_tau_n
self._m_tau_n = ((self.tau_n_value * -1) if self.tau_n_sign else self.tau_n_value)
return getattr(self, '_m_tau_n', None)
@property
def delta_tau_n(self):
if hasattr(self, '_m_delta_tau_n'):
return self._m_delta_tau_n
self._m_delta_tau_n = ((self.delta_tau_n_value * -1) if self.delta_tau_n_sign else self.delta_tau_n_value)
return getattr(self, '_m_delta_tau_n', None)
class StringNonImmediate(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.data_1 = self._io.read_bits_int_be(64)
self.data_2 = self._io.read_bits_int_be(8)
class String5(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.n_a = self._io.read_bits_int_be(11)
self.tau_c = self._io.read_bits_int_be(32)
self.not_used = self._io.read_bits_int_be(1) != 0
self.n_4 = self._io.read_bits_int_be(5)
self.tau_gps = self._io.read_bits_int_be(22)
self.l_n = self._io.read_bits_int_be(1) != 0
class String1(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.not_used = self._io.read_bits_int_be(2)
self.p1 = self._io.read_bits_int_be(2)
self.t_k = self._io.read_bits_int_be(12)
self.x_vel_sign = self._io.read_bits_int_be(1) != 0
self.x_vel_value = self._io.read_bits_int_be(23)
self.x_accel_sign = self._io.read_bits_int_be(1) != 0
self.x_accel_value = self._io.read_bits_int_be(4)
self.x_sign = self._io.read_bits_int_be(1) != 0
self.x_value = self._io.read_bits_int_be(26)
@property
def x_vel(self):
if hasattr(self, '_m_x_vel'):
return self._m_x_vel
self._m_x_vel = ((self.x_vel_value * -1) if self.x_vel_sign else self.x_vel_value)
return getattr(self, '_m_x_vel', None)
@property
def x_accel(self):
if hasattr(self, '_m_x_accel'):
return self._m_x_accel
self._m_x_accel = ((self.x_accel_value * -1) if self.x_accel_sign else self.x_accel_value)
return getattr(self, '_m_x_accel', None)
@property
def x(self):
if hasattr(self, '_m_x'):
return self._m_x
self._m_x = ((self.x_value * -1) if self.x_sign else self.x_value)
return getattr(self, '_m_x', None)
class String2(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.b_n = self._io.read_bits_int_be(3)
self.p2 = self._io.read_bits_int_be(1) != 0
self.t_b = self._io.read_bits_int_be(7)
self.not_used = self._io.read_bits_int_be(5)
self.y_vel_sign = self._io.read_bits_int_be(1) != 0
self.y_vel_value = self._io.read_bits_int_be(23)
self.y_accel_sign = self._io.read_bits_int_be(1) != 0
self.y_accel_value = self._io.read_bits_int_be(4)
self.y_sign = self._io.read_bits_int_be(1) != 0
self.y_value = self._io.read_bits_int_be(26)
@property
def y_vel(self):
if hasattr(self, '_m_y_vel'):
return self._m_y_vel
self._m_y_vel = ((self.y_vel_value * -1) if self.y_vel_sign else self.y_vel_value)
return getattr(self, '_m_y_vel', None)
@property
def y_accel(self):
if hasattr(self, '_m_y_accel'):
return self._m_y_accel
self._m_y_accel = ((self.y_accel_value * -1) if self.y_accel_sign else self.y_accel_value)
return getattr(self, '_m_y_accel', None)
@property
def y(self):
if hasattr(self, '_m_y'):
return self._m_y
self._m_y = ((self.y_value * -1) if self.y_sign else self.y_value)
return getattr(self, '_m_y', None)
class String3(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.p3 = self._io.read_bits_int_be(1) != 0
self.gamma_n_sign = self._io.read_bits_int_be(1) != 0
self.gamma_n_value = self._io.read_bits_int_be(10)
self.not_used = self._io.read_bits_int_be(1) != 0
self.p = self._io.read_bits_int_be(2)
self.l_n = self._io.read_bits_int_be(1) != 0
self.z_vel_sign = self._io.read_bits_int_be(1) != 0
self.z_vel_value = self._io.read_bits_int_be(23)
self.z_accel_sign = self._io.read_bits_int_be(1) != 0
self.z_accel_value = self._io.read_bits_int_be(4)
self.z_sign = self._io.read_bits_int_be(1) != 0
self.z_value = self._io.read_bits_int_be(26)
@property
def gamma_n(self):
if hasattr(self, '_m_gamma_n'):
return self._m_gamma_n
self._m_gamma_n = ((self.gamma_n_value * -1) if self.gamma_n_sign else self.gamma_n_value)
return getattr(self, '_m_gamma_n', None)
@property
def z_vel(self):
if hasattr(self, '_m_z_vel'):
return self._m_z_vel
self._m_z_vel = ((self.z_vel_value * -1) if self.z_vel_sign else self.z_vel_value)
return getattr(self, '_m_z_vel', None)
@property
def z_accel(self):
if hasattr(self, '_m_z_accel'):
return self._m_z_accel
self._m_z_accel = ((self.z_accel_value * -1) if self.z_accel_sign else self.z_accel_value)
return getattr(self, '_m_z_accel', None)
@property
def z(self):
if hasattr(self, '_m_z'):
return self._m_z
self._m_z = ((self.z_value * -1) if self.z_sign else self.z_value)
return getattr(self, '_m_z', None)

@ -1,325 +0,0 @@
// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
#include "gps.h"
#include "kaitai/exceptions.h"
gps_t::gps_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = this;
m_tlm = 0;
m_how = 0;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void gps_t::_read() {
m_tlm = new tlm_t(m__io, this, m__root);
m_how = new how_t(m__io, this, m__root);
n_body = true;
switch (how()->subframe_id()) {
case 1: {
n_body = false;
m_body = new subframe_1_t(m__io, this, m__root);
break;
}
case 2: {
n_body = false;
m_body = new subframe_2_t(m__io, this, m__root);
break;
}
case 3: {
n_body = false;
m_body = new subframe_3_t(m__io, this, m__root);
break;
}
case 4: {
n_body = false;
m_body = new subframe_4_t(m__io, this, m__root);
break;
}
}
}
gps_t::~gps_t() {
_clean_up();
}
void gps_t::_clean_up() {
if (m_tlm) {
delete m_tlm; m_tlm = 0;
}
if (m_how) {
delete m_how; m_how = 0;
}
if (!n_body) {
if (m_body) {
delete m_body; m_body = 0;
}
}
}
gps_t::subframe_1_t::subframe_1_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
f_af_0 = false;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void gps_t::subframe_1_t::_read() {
m_week_no = m__io->read_bits_int_be(10);
m_code = m__io->read_bits_int_be(2);
m_sv_accuracy = m__io->read_bits_int_be(4);
m_sv_health = m__io->read_bits_int_be(6);
m_iodc_msb = m__io->read_bits_int_be(2);
m_l2_p_data_flag = m__io->read_bits_int_be(1);
m_reserved1 = m__io->read_bits_int_be(23);
m_reserved2 = m__io->read_bits_int_be(24);
m_reserved3 = m__io->read_bits_int_be(24);
m_reserved4 = m__io->read_bits_int_be(16);
m__io->align_to_byte();
m_t_gd = m__io->read_s1();
m_iodc_lsb = m__io->read_u1();
m_t_oc = m__io->read_u2be();
m_af_2 = m__io->read_s1();
m_af_1 = m__io->read_s2be();
m_af_0_sign = m__io->read_bits_int_be(1);
m_af_0_value = m__io->read_bits_int_be(21);
m_reserved5 = m__io->read_bits_int_be(2);
}
gps_t::subframe_1_t::~subframe_1_t() {
_clean_up();
}
void gps_t::subframe_1_t::_clean_up() {
}
int32_t gps_t::subframe_1_t::af_0() {
if (f_af_0)
return m_af_0;
m_af_0 = ((af_0_sign()) ? ((af_0_value() - (1 << 21))) : (af_0_value()));
f_af_0 = true;
return m_af_0;
}
gps_t::subframe_3_t::subframe_3_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
f_omega_dot = false;
f_idot = false;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void gps_t::subframe_3_t::_read() {
m_c_ic = m__io->read_s2be();
m_omega_0 = m__io->read_s4be();
m_c_is = m__io->read_s2be();
m_i_0 = m__io->read_s4be();
m_c_rc = m__io->read_s2be();
m_omega = m__io->read_s4be();
m_omega_dot_sign = m__io->read_bits_int_be(1);
m_omega_dot_value = m__io->read_bits_int_be(23);
m__io->align_to_byte();
m_iode = m__io->read_u1();
m_idot_sign = m__io->read_bits_int_be(1);
m_idot_value = m__io->read_bits_int_be(13);
m_reserved = m__io->read_bits_int_be(2);
}
gps_t::subframe_3_t::~subframe_3_t() {
_clean_up();
}
void gps_t::subframe_3_t::_clean_up() {
}
int32_t gps_t::subframe_3_t::omega_dot() {
if (f_omega_dot)
return m_omega_dot;
m_omega_dot = ((omega_dot_sign()) ? ((omega_dot_value() - (1 << 23))) : (omega_dot_value()));
f_omega_dot = true;
return m_omega_dot;
}
int32_t gps_t::subframe_3_t::idot() {
if (f_idot)
return m_idot;
m_idot = ((idot_sign()) ? ((idot_value() - (1 << 13))) : (idot_value()));
f_idot = true;
return m_idot;
}
gps_t::subframe_4_t::subframe_4_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void gps_t::subframe_4_t::_read() {
m_data_id = m__io->read_bits_int_be(2);
m_page_id = m__io->read_bits_int_be(6);
m__io->align_to_byte();
n_body = true;
switch (page_id()) {
case 56: {
n_body = false;
m_body = new ionosphere_data_t(m__io, this, m__root);
break;
}
}
}
gps_t::subframe_4_t::~subframe_4_t() {
_clean_up();
}
void gps_t::subframe_4_t::_clean_up() {
if (!n_body) {
if (m_body) {
delete m_body; m_body = 0;
}
}
}
gps_t::subframe_4_t::ionosphere_data_t::ionosphere_data_t(kaitai::kstream* p__io, gps_t::subframe_4_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void gps_t::subframe_4_t::ionosphere_data_t::_read() {
m_a0 = m__io->read_s1();
m_a1 = m__io->read_s1();
m_a2 = m__io->read_s1();
m_a3 = m__io->read_s1();
m_b0 = m__io->read_s1();
m_b1 = m__io->read_s1();
m_b2 = m__io->read_s1();
m_b3 = m__io->read_s1();
}
gps_t::subframe_4_t::ionosphere_data_t::~ionosphere_data_t() {
_clean_up();
}
void gps_t::subframe_4_t::ionosphere_data_t::_clean_up() {
}
gps_t::how_t::how_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void gps_t::how_t::_read() {
m_tow_count = m__io->read_bits_int_be(17);
m_alert = m__io->read_bits_int_be(1);
m_anti_spoof = m__io->read_bits_int_be(1);
m_subframe_id = m__io->read_bits_int_be(3);
m_reserved = m__io->read_bits_int_be(2);
}
gps_t::how_t::~how_t() {
_clean_up();
}
void gps_t::how_t::_clean_up() {
}
gps_t::tlm_t::tlm_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void gps_t::tlm_t::_read() {
m_preamble = m__io->read_bytes(1);
if (!(preamble() == std::string("\x8B", 1))) {
throw kaitai::validation_not_equal_error<std::string>(std::string("\x8B", 1), preamble(), _io(), std::string("/types/tlm/seq/0"));
}
m_tlm = m__io->read_bits_int_be(14);
m_integrity_status = m__io->read_bits_int_be(1);
m_reserved = m__io->read_bits_int_be(1);
}
gps_t::tlm_t::~tlm_t() {
_clean_up();
}
void gps_t::tlm_t::_clean_up() {
}
gps_t::subframe_2_t::subframe_2_t(kaitai::kstream* p__io, gps_t* p__parent, gps_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void gps_t::subframe_2_t::_read() {
m_iode = m__io->read_u1();
m_c_rs = m__io->read_s2be();
m_delta_n = m__io->read_s2be();
m_m_0 = m__io->read_s4be();
m_c_uc = m__io->read_s2be();
m_e = m__io->read_s4be();
m_c_us = m__io->read_s2be();
m_sqrt_a = m__io->read_u4be();
m_t_oe = m__io->read_u2be();
m_fit_interval_flag = m__io->read_bits_int_be(1);
m_aoda = m__io->read_bits_int_be(5);
m_reserved = m__io->read_bits_int_be(2);
}
gps_t::subframe_2_t::~subframe_2_t() {
_clean_up();
}
void gps_t::subframe_2_t::_clean_up() {
}

@ -1,359 +0,0 @@
#ifndef GPS_H_
#define GPS_H_
// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
#include "kaitai/kaitaistruct.h"
#include <stdint.h>
#if KAITAI_STRUCT_VERSION < 9000L
#error "Incompatible Kaitai Struct C++/STL API: version 0.9 or later is required"
#endif
class gps_t : public kaitai::kstruct {
public:
class subframe_1_t;
class subframe_3_t;
class subframe_4_t;
class how_t;
class tlm_t;
class subframe_2_t;
gps_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent = 0, gps_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~gps_t();
class subframe_1_t : public kaitai::kstruct {
public:
subframe_1_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~subframe_1_t();
private:
bool f_af_0;
int32_t m_af_0;
public:
int32_t af_0();
private:
uint64_t m_week_no;
uint64_t m_code;
uint64_t m_sv_accuracy;
uint64_t m_sv_health;
uint64_t m_iodc_msb;
bool m_l2_p_data_flag;
uint64_t m_reserved1;
uint64_t m_reserved2;
uint64_t m_reserved3;
uint64_t m_reserved4;
int8_t m_t_gd;
uint8_t m_iodc_lsb;
uint16_t m_t_oc;
int8_t m_af_2;
int16_t m_af_1;
bool m_af_0_sign;
uint64_t m_af_0_value;
uint64_t m_reserved5;
gps_t* m__root;
gps_t* m__parent;
public:
uint64_t week_no() const { return m_week_no; }
uint64_t code() const { return m_code; }
uint64_t sv_accuracy() const { return m_sv_accuracy; }
uint64_t sv_health() const { return m_sv_health; }
uint64_t iodc_msb() const { return m_iodc_msb; }
bool l2_p_data_flag() const { return m_l2_p_data_flag; }
uint64_t reserved1() const { return m_reserved1; }
uint64_t reserved2() const { return m_reserved2; }
uint64_t reserved3() const { return m_reserved3; }
uint64_t reserved4() const { return m_reserved4; }
int8_t t_gd() const { return m_t_gd; }
uint8_t iodc_lsb() const { return m_iodc_lsb; }
uint16_t t_oc() const { return m_t_oc; }
int8_t af_2() const { return m_af_2; }
int16_t af_1() const { return m_af_1; }
bool af_0_sign() const { return m_af_0_sign; }
uint64_t af_0_value() const { return m_af_0_value; }
uint64_t reserved5() const { return m_reserved5; }
gps_t* _root() const { return m__root; }
gps_t* _parent() const { return m__parent; }
};
class subframe_3_t : public kaitai::kstruct {
public:
subframe_3_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~subframe_3_t();
private:
bool f_omega_dot;
int32_t m_omega_dot;
public:
int32_t omega_dot();
private:
bool f_idot;
int32_t m_idot;
public:
int32_t idot();
private:
int16_t m_c_ic;
int32_t m_omega_0;
int16_t m_c_is;
int32_t m_i_0;
int16_t m_c_rc;
int32_t m_omega;
bool m_omega_dot_sign;
uint64_t m_omega_dot_value;
uint8_t m_iode;
bool m_idot_sign;
uint64_t m_idot_value;
uint64_t m_reserved;
gps_t* m__root;
gps_t* m__parent;
public:
int16_t c_ic() const { return m_c_ic; }
int32_t omega_0() const { return m_omega_0; }
int16_t c_is() const { return m_c_is; }
int32_t i_0() const { return m_i_0; }
int16_t c_rc() const { return m_c_rc; }
int32_t omega() const { return m_omega; }
bool omega_dot_sign() const { return m_omega_dot_sign; }
uint64_t omega_dot_value() const { return m_omega_dot_value; }
uint8_t iode() const { return m_iode; }
bool idot_sign() const { return m_idot_sign; }
uint64_t idot_value() const { return m_idot_value; }
uint64_t reserved() const { return m_reserved; }
gps_t* _root() const { return m__root; }
gps_t* _parent() const { return m__parent; }
};
class subframe_4_t : public kaitai::kstruct {
public:
class ionosphere_data_t;
subframe_4_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~subframe_4_t();
class ionosphere_data_t : public kaitai::kstruct {
public:
ionosphere_data_t(kaitai::kstream* p__io, gps_t::subframe_4_t* p__parent = 0, gps_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~ionosphere_data_t();
private:
int8_t m_a0;
int8_t m_a1;
int8_t m_a2;
int8_t m_a3;
int8_t m_b0;
int8_t m_b1;
int8_t m_b2;
int8_t m_b3;
gps_t* m__root;
gps_t::subframe_4_t* m__parent;
public:
int8_t a0() const { return m_a0; }
int8_t a1() const { return m_a1; }
int8_t a2() const { return m_a2; }
int8_t a3() const { return m_a3; }
int8_t b0() const { return m_b0; }
int8_t b1() const { return m_b1; }
int8_t b2() const { return m_b2; }
int8_t b3() const { return m_b3; }
gps_t* _root() const { return m__root; }
gps_t::subframe_4_t* _parent() const { return m__parent; }
};
private:
uint64_t m_data_id;
uint64_t m_page_id;
ionosphere_data_t* m_body;
bool n_body;
public:
bool _is_null_body() { body(); return n_body; };
private:
gps_t* m__root;
gps_t* m__parent;
public:
uint64_t data_id() const { return m_data_id; }
uint64_t page_id() const { return m_page_id; }
ionosphere_data_t* body() const { return m_body; }
gps_t* _root() const { return m__root; }
gps_t* _parent() const { return m__parent; }
};
class how_t : public kaitai::kstruct {
public:
how_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~how_t();
private:
uint64_t m_tow_count;
bool m_alert;
bool m_anti_spoof;
uint64_t m_subframe_id;
uint64_t m_reserved;
gps_t* m__root;
gps_t* m__parent;
public:
uint64_t tow_count() const { return m_tow_count; }
bool alert() const { return m_alert; }
bool anti_spoof() const { return m_anti_spoof; }
uint64_t subframe_id() const { return m_subframe_id; }
uint64_t reserved() const { return m_reserved; }
gps_t* _root() const { return m__root; }
gps_t* _parent() const { return m__parent; }
};
class tlm_t : public kaitai::kstruct {
public:
tlm_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~tlm_t();
private:
std::string m_preamble;
uint64_t m_tlm;
bool m_integrity_status;
bool m_reserved;
gps_t* m__root;
gps_t* m__parent;
public:
std::string preamble() const { return m_preamble; }
uint64_t tlm() const { return m_tlm; }
bool integrity_status() const { return m_integrity_status; }
bool reserved() const { return m_reserved; }
gps_t* _root() const { return m__root; }
gps_t* _parent() const { return m__parent; }
};
class subframe_2_t : public kaitai::kstruct {
public:
subframe_2_t(kaitai::kstream* p__io, gps_t* p__parent = 0, gps_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~subframe_2_t();
private:
uint8_t m_iode;
int16_t m_c_rs;
int16_t m_delta_n;
int32_t m_m_0;
int16_t m_c_uc;
int32_t m_e;
int16_t m_c_us;
uint32_t m_sqrt_a;
uint16_t m_t_oe;
bool m_fit_interval_flag;
uint64_t m_aoda;
uint64_t m_reserved;
gps_t* m__root;
gps_t* m__parent;
public:
uint8_t iode() const { return m_iode; }
int16_t c_rs() const { return m_c_rs; }
int16_t delta_n() const { return m_delta_n; }
int32_t m_0() const { return m_m_0; }
int16_t c_uc() const { return m_c_uc; }
int32_t e() const { return m_e; }
int16_t c_us() const { return m_c_us; }
uint32_t sqrt_a() const { return m_sqrt_a; }
uint16_t t_oe() const { return m_t_oe; }
bool fit_interval_flag() const { return m_fit_interval_flag; }
uint64_t aoda() const { return m_aoda; }
uint64_t reserved() const { return m_reserved; }
gps_t* _root() const { return m__root; }
gps_t* _parent() const { return m__parent; }
};
private:
tlm_t* m_tlm;
how_t* m_how;
kaitai::kstruct* m_body;
bool n_body;
public:
bool _is_null_body() { body(); return n_body; };
private:
gps_t* m__root;
kaitai::kstruct* m__parent;
public:
tlm_t* tlm() const { return m_tlm; }
how_t* how() const { return m_how; }
kaitai::kstruct* body() const { return m_body; }
gps_t* _root() const { return m__root; }
kaitai::kstruct* _parent() const { return m__parent; }
};
#endif // GPS_H_

@ -0,0 +1,193 @@
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Gps(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.tlm = Gps.Tlm(self._io, self, self._root)
self.how = Gps.How(self._io, self, self._root)
_on = self.how.subframe_id
if _on == 1:
self.body = Gps.Subframe1(self._io, self, self._root)
elif _on == 2:
self.body = Gps.Subframe2(self._io, self, self._root)
elif _on == 3:
self.body = Gps.Subframe3(self._io, self, self._root)
elif _on == 4:
self.body = Gps.Subframe4(self._io, self, self._root)
class Subframe1(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.week_no = self._io.read_bits_int_be(10)
self.code = self._io.read_bits_int_be(2)
self.sv_accuracy = self._io.read_bits_int_be(4)
self.sv_health = self._io.read_bits_int_be(6)
self.iodc_msb = self._io.read_bits_int_be(2)
self.l2_p_data_flag = self._io.read_bits_int_be(1) != 0
self.reserved1 = self._io.read_bits_int_be(23)
self.reserved2 = self._io.read_bits_int_be(24)
self.reserved3 = self._io.read_bits_int_be(24)
self.reserved4 = self._io.read_bits_int_be(16)
self._io.align_to_byte()
self.t_gd = self._io.read_s1()
self.iodc_lsb = self._io.read_u1()
self.t_oc = self._io.read_u2be()
self.af_2 = self._io.read_s1()
self.af_1 = self._io.read_s2be()
self.af_0_sign = self._io.read_bits_int_be(1) != 0
self.af_0_value = self._io.read_bits_int_be(21)
self.reserved5 = self._io.read_bits_int_be(2)
@property
def af_0(self):
if hasattr(self, '_m_af_0'):
return self._m_af_0
self._m_af_0 = ((self.af_0_value - (1 << 21)) if self.af_0_sign else self.af_0_value)
return getattr(self, '_m_af_0', None)
class Subframe3(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.c_ic = self._io.read_s2be()
self.omega_0 = self._io.read_s4be()
self.c_is = self._io.read_s2be()
self.i_0 = self._io.read_s4be()
self.c_rc = self._io.read_s2be()
self.omega = self._io.read_s4be()
self.omega_dot_sign = self._io.read_bits_int_be(1) != 0
self.omega_dot_value = self._io.read_bits_int_be(23)
self._io.align_to_byte()
self.iode = self._io.read_u1()
self.idot_sign = self._io.read_bits_int_be(1) != 0
self.idot_value = self._io.read_bits_int_be(13)
self.reserved = self._io.read_bits_int_be(2)
@property
def omega_dot(self):
if hasattr(self, '_m_omega_dot'):
return self._m_omega_dot
self._m_omega_dot = ((self.omega_dot_value - (1 << 23)) if self.omega_dot_sign else self.omega_dot_value)
return getattr(self, '_m_omega_dot', None)
@property
def idot(self):
if hasattr(self, '_m_idot'):
return self._m_idot
self._m_idot = ((self.idot_value - (1 << 13)) if self.idot_sign else self.idot_value)
return getattr(self, '_m_idot', None)
class Subframe4(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.data_id = self._io.read_bits_int_be(2)
self.page_id = self._io.read_bits_int_be(6)
self._io.align_to_byte()
_on = self.page_id
if _on == 56:
self.body = Gps.Subframe4.IonosphereData(self._io, self, self._root)
class IonosphereData(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.a0 = self._io.read_s1()
self.a1 = self._io.read_s1()
self.a2 = self._io.read_s1()
self.a3 = self._io.read_s1()
self.b0 = self._io.read_s1()
self.b1 = self._io.read_s1()
self.b2 = self._io.read_s1()
self.b3 = self._io.read_s1()
class How(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.tow_count = self._io.read_bits_int_be(17)
self.alert = self._io.read_bits_int_be(1) != 0
self.anti_spoof = self._io.read_bits_int_be(1) != 0
self.subframe_id = self._io.read_bits_int_be(3)
self.reserved = self._io.read_bits_int_be(2)
class Tlm(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.preamble = self._io.read_bytes(1)
if not self.preamble == b"\x8B":
raise kaitaistruct.ValidationNotEqualError(b"\x8B", self.preamble, self._io, u"/types/tlm/seq/0")
self.tlm = self._io.read_bits_int_be(14)
self.integrity_status = self._io.read_bits_int_be(1) != 0
self.reserved = self._io.read_bits_int_be(1) != 0
class Subframe2(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.iode = self._io.read_u1()
self.c_rs = self._io.read_s2be()
self.delta_n = self._io.read_s2be()
self.m_0 = self._io.read_s4be()
self.c_uc = self._io.read_s2be()
self.e = self._io.read_s4be()
self.c_us = self._io.read_s2be()
self.sqrt_a = self._io.read_u4be()
self.t_oe = self._io.read_u2be()
self.fit_interval_flag = self._io.read_bits_int_be(1) != 0
self.aoda = self._io.read_bits_int_be(5)
self.reserved = self._io.read_bits_int_be(2)

@ -1,424 +0,0 @@
// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
#include "ubx.h"
#include "kaitai/exceptions.h"
ubx_t::ubx_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = this;
f_checksum = false;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void ubx_t::_read() {
m_magic = m__io->read_bytes(2);
if (!(magic() == std::string("\xB5\x62", 2))) {
throw kaitai::validation_not_equal_error<std::string>(std::string("\xB5\x62", 2), magic(), _io(), std::string("/seq/0"));
}
m_msg_type = m__io->read_u2be();
m_length = m__io->read_u2le();
n_body = true;
switch (msg_type()) {
case 2569: {
n_body = false;
m_body = new mon_hw_t(m__io, this, m__root);
break;
}
case 533: {
n_body = false;
m_body = new rxm_rawx_t(m__io, this, m__root);
break;
}
case 531: {
n_body = false;
m_body = new rxm_sfrbx_t(m__io, this, m__root);
break;
}
case 309: {
n_body = false;
m_body = new nav_sat_t(m__io, this, m__root);
break;
}
case 2571: {
n_body = false;
m_body = new mon_hw2_t(m__io, this, m__root);
break;
}
case 263: {
n_body = false;
m_body = new nav_pvt_t(m__io, this, m__root);
break;
}
}
}
ubx_t::~ubx_t() {
_clean_up();
}
void ubx_t::_clean_up() {
if (!n_body) {
if (m_body) {
delete m_body; m_body = 0;
}
}
if (f_checksum) {
}
}
ubx_t::rxm_rawx_t::rxm_rawx_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
m_meas = 0;
m__raw_meas = 0;
m__io__raw_meas = 0;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void ubx_t::rxm_rawx_t::_read() {
m_rcv_tow = m__io->read_f8le();
m_week = m__io->read_u2le();
m_leap_s = m__io->read_s1();
m_num_meas = m__io->read_u1();
m_rec_stat = m__io->read_u1();
m_reserved1 = m__io->read_bytes(3);
m__raw_meas = new std::vector<std::string>();
m__io__raw_meas = new std::vector<kaitai::kstream*>();
m_meas = new std::vector<measurement_t*>();
const int l_meas = num_meas();
for (int i = 0; i < l_meas; i++) {
m__raw_meas->push_back(m__io->read_bytes(32));
kaitai::kstream* io__raw_meas = new kaitai::kstream(m__raw_meas->at(m__raw_meas->size() - 1));
m__io__raw_meas->push_back(io__raw_meas);
m_meas->push_back(new measurement_t(io__raw_meas, this, m__root));
}
}
ubx_t::rxm_rawx_t::~rxm_rawx_t() {
_clean_up();
}
void ubx_t::rxm_rawx_t::_clean_up() {
if (m__raw_meas) {
delete m__raw_meas; m__raw_meas = 0;
}
if (m__io__raw_meas) {
for (std::vector<kaitai::kstream*>::iterator it = m__io__raw_meas->begin(); it != m__io__raw_meas->end(); ++it) {
delete *it;
}
delete m__io__raw_meas; m__io__raw_meas = 0;
}
if (m_meas) {
for (std::vector<measurement_t*>::iterator it = m_meas->begin(); it != m_meas->end(); ++it) {
delete *it;
}
delete m_meas; m_meas = 0;
}
}
ubx_t::rxm_rawx_t::measurement_t::measurement_t(kaitai::kstream* p__io, ubx_t::rxm_rawx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void ubx_t::rxm_rawx_t::measurement_t::_read() {
m_pr_mes = m__io->read_f8le();
m_cp_mes = m__io->read_f8le();
m_do_mes = m__io->read_f4le();
m_gnss_id = static_cast<ubx_t::gnss_type_t>(m__io->read_u1());
m_sv_id = m__io->read_u1();
m_reserved2 = m__io->read_bytes(1);
m_freq_id = m__io->read_u1();
m_lock_time = m__io->read_u2le();
m_cno = m__io->read_u1();
m_pr_stdev = m__io->read_u1();
m_cp_stdev = m__io->read_u1();
m_do_stdev = m__io->read_u1();
m_trk_stat = m__io->read_u1();
m_reserved3 = m__io->read_bytes(1);
}
ubx_t::rxm_rawx_t::measurement_t::~measurement_t() {
_clean_up();
}
void ubx_t::rxm_rawx_t::measurement_t::_clean_up() {
}
ubx_t::rxm_sfrbx_t::rxm_sfrbx_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
m_body = 0;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void ubx_t::rxm_sfrbx_t::_read() {
m_gnss_id = static_cast<ubx_t::gnss_type_t>(m__io->read_u1());
m_sv_id = m__io->read_u1();
m_reserved1 = m__io->read_bytes(1);
m_freq_id = m__io->read_u1();
m_num_words = m__io->read_u1();
m_reserved2 = m__io->read_bytes(1);
m_version = m__io->read_u1();
m_reserved3 = m__io->read_bytes(1);
m_body = new std::vector<uint32_t>();
const int l_body = num_words();
for (int i = 0; i < l_body; i++) {
m_body->push_back(m__io->read_u4le());
}
}
ubx_t::rxm_sfrbx_t::~rxm_sfrbx_t() {
_clean_up();
}
void ubx_t::rxm_sfrbx_t::_clean_up() {
if (m_body) {
delete m_body; m_body = 0;
}
}
ubx_t::nav_sat_t::nav_sat_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
m_svs = 0;
m__raw_svs = 0;
m__io__raw_svs = 0;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void ubx_t::nav_sat_t::_read() {
m_itow = m__io->read_u4le();
m_version = m__io->read_u1();
m_num_svs = m__io->read_u1();
m_reserved = m__io->read_bytes(2);
m__raw_svs = new std::vector<std::string>();
m__io__raw_svs = new std::vector<kaitai::kstream*>();
m_svs = new std::vector<nav_t*>();
const int l_svs = num_svs();
for (int i = 0; i < l_svs; i++) {
m__raw_svs->push_back(m__io->read_bytes(12));
kaitai::kstream* io__raw_svs = new kaitai::kstream(m__raw_svs->at(m__raw_svs->size() - 1));
m__io__raw_svs->push_back(io__raw_svs);
m_svs->push_back(new nav_t(io__raw_svs, this, m__root));
}
}
ubx_t::nav_sat_t::~nav_sat_t() {
_clean_up();
}
void ubx_t::nav_sat_t::_clean_up() {
if (m__raw_svs) {
delete m__raw_svs; m__raw_svs = 0;
}
if (m__io__raw_svs) {
for (std::vector<kaitai::kstream*>::iterator it = m__io__raw_svs->begin(); it != m__io__raw_svs->end(); ++it) {
delete *it;
}
delete m__io__raw_svs; m__io__raw_svs = 0;
}
if (m_svs) {
for (std::vector<nav_t*>::iterator it = m_svs->begin(); it != m_svs->end(); ++it) {
delete *it;
}
delete m_svs; m_svs = 0;
}
}
ubx_t::nav_sat_t::nav_t::nav_t(kaitai::kstream* p__io, ubx_t::nav_sat_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void ubx_t::nav_sat_t::nav_t::_read() {
m_gnss_id = static_cast<ubx_t::gnss_type_t>(m__io->read_u1());
m_sv_id = m__io->read_u1();
m_cno = m__io->read_u1();
m_elev = m__io->read_s1();
m_azim = m__io->read_s2le();
m_pr_res = m__io->read_s2le();
m_flags = m__io->read_u4le();
}
ubx_t::nav_sat_t::nav_t::~nav_t() {
_clean_up();
}
void ubx_t::nav_sat_t::nav_t::_clean_up() {
}
ubx_t::nav_pvt_t::nav_pvt_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void ubx_t::nav_pvt_t::_read() {
m_i_tow = m__io->read_u4le();
m_year = m__io->read_u2le();
m_month = m__io->read_u1();
m_day = m__io->read_u1();
m_hour = m__io->read_u1();
m_min = m__io->read_u1();
m_sec = m__io->read_u1();
m_valid = m__io->read_u1();
m_t_acc = m__io->read_u4le();
m_nano = m__io->read_s4le();
m_fix_type = m__io->read_u1();
m_flags = m__io->read_u1();
m_flags2 = m__io->read_u1();
m_num_sv = m__io->read_u1();
m_lon = m__io->read_s4le();
m_lat = m__io->read_s4le();
m_height = m__io->read_s4le();
m_h_msl = m__io->read_s4le();
m_h_acc = m__io->read_u4le();
m_v_acc = m__io->read_u4le();
m_vel_n = m__io->read_s4le();
m_vel_e = m__io->read_s4le();
m_vel_d = m__io->read_s4le();
m_g_speed = m__io->read_s4le();
m_head_mot = m__io->read_s4le();
m_s_acc = m__io->read_s4le();
m_head_acc = m__io->read_u4le();
m_p_dop = m__io->read_u2le();
m_flags3 = m__io->read_u1();
m_reserved1 = m__io->read_bytes(5);
m_head_veh = m__io->read_s4le();
m_mag_dec = m__io->read_s2le();
m_mag_acc = m__io->read_u2le();
}
ubx_t::nav_pvt_t::~nav_pvt_t() {
_clean_up();
}
void ubx_t::nav_pvt_t::_clean_up() {
}
ubx_t::mon_hw2_t::mon_hw2_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void ubx_t::mon_hw2_t::_read() {
m_ofs_i = m__io->read_s1();
m_mag_i = m__io->read_u1();
m_ofs_q = m__io->read_s1();
m_mag_q = m__io->read_u1();
m_cfg_source = static_cast<ubx_t::mon_hw2_t::config_source_t>(m__io->read_u1());
m_reserved1 = m__io->read_bytes(3);
m_low_lev_cfg = m__io->read_u4le();
m_reserved2 = m__io->read_bytes(8);
m_post_status = m__io->read_u4le();
m_reserved3 = m__io->read_bytes(4);
}
ubx_t::mon_hw2_t::~mon_hw2_t() {
_clean_up();
}
void ubx_t::mon_hw2_t::_clean_up() {
}
ubx_t::mon_hw_t::mon_hw_t(kaitai::kstream* p__io, ubx_t* p__parent, ubx_t* p__root) : kaitai::kstruct(p__io) {
m__parent = p__parent;
m__root = p__root;
try {
_read();
} catch(...) {
_clean_up();
throw;
}
}
void ubx_t::mon_hw_t::_read() {
m_pin_sel = m__io->read_u4le();
m_pin_bank = m__io->read_u4le();
m_pin_dir = m__io->read_u4le();
m_pin_val = m__io->read_u4le();
m_noise_per_ms = m__io->read_u2le();
m_agc_cnt = m__io->read_u2le();
m_a_status = static_cast<ubx_t::mon_hw_t::antenna_status_t>(m__io->read_u1());
m_a_power = static_cast<ubx_t::mon_hw_t::antenna_power_t>(m__io->read_u1());
m_flags = m__io->read_u1();
m_reserved1 = m__io->read_bytes(1);
m_used_mask = m__io->read_u4le();
m_vp = m__io->read_bytes(17);
m_jam_ind = m__io->read_u1();
m_reserved2 = m__io->read_bytes(2);
m_pin_irq = m__io->read_u4le();
m_pull_h = m__io->read_u4le();
m_pull_l = m__io->read_u4le();
}
ubx_t::mon_hw_t::~mon_hw_t() {
_clean_up();
}
void ubx_t::mon_hw_t::_clean_up() {
}
uint16_t ubx_t::checksum() {
if (f_checksum)
return m_checksum;
std::streampos _pos = m__io->pos();
m__io->seek((length() + 6));
m_checksum = m__io->read_u2le();
m__io->seek(_pos);
f_checksum = true;
return m_checksum;
}

@ -1,484 +0,0 @@
#ifndef UBX_H_
#define UBX_H_
// This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
#include "kaitai/kaitaistruct.h"
#include <stdint.h>
#include <vector>
#if KAITAI_STRUCT_VERSION < 9000L
#error "Incompatible Kaitai Struct C++/STL API: version 0.9 or later is required"
#endif
class ubx_t : public kaitai::kstruct {
public:
class rxm_rawx_t;
class rxm_sfrbx_t;
class nav_sat_t;
class nav_pvt_t;
class mon_hw2_t;
class mon_hw_t;
enum gnss_type_t {
GNSS_TYPE_GPS = 0,
GNSS_TYPE_SBAS = 1,
GNSS_TYPE_GALILEO = 2,
GNSS_TYPE_BEIDOU = 3,
GNSS_TYPE_IMES = 4,
GNSS_TYPE_QZSS = 5,
GNSS_TYPE_GLONASS = 6
};
ubx_t(kaitai::kstream* p__io, kaitai::kstruct* p__parent = 0, ubx_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~ubx_t();
class rxm_rawx_t : public kaitai::kstruct {
public:
class measurement_t;
rxm_rawx_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~rxm_rawx_t();
class measurement_t : public kaitai::kstruct {
public:
measurement_t(kaitai::kstream* p__io, ubx_t::rxm_rawx_t* p__parent = 0, ubx_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~measurement_t();
private:
double m_pr_mes;
double m_cp_mes;
float m_do_mes;
gnss_type_t m_gnss_id;
uint8_t m_sv_id;
std::string m_reserved2;
uint8_t m_freq_id;
uint16_t m_lock_time;
uint8_t m_cno;
uint8_t m_pr_stdev;
uint8_t m_cp_stdev;
uint8_t m_do_stdev;
uint8_t m_trk_stat;
std::string m_reserved3;
ubx_t* m__root;
ubx_t::rxm_rawx_t* m__parent;
public:
double pr_mes() const { return m_pr_mes; }
double cp_mes() const { return m_cp_mes; }
float do_mes() const { return m_do_mes; }
gnss_type_t gnss_id() const { return m_gnss_id; }
uint8_t sv_id() const { return m_sv_id; }
std::string reserved2() const { return m_reserved2; }
uint8_t freq_id() const { return m_freq_id; }
uint16_t lock_time() const { return m_lock_time; }
uint8_t cno() const { return m_cno; }
uint8_t pr_stdev() const { return m_pr_stdev; }
uint8_t cp_stdev() const { return m_cp_stdev; }
uint8_t do_stdev() const { return m_do_stdev; }
uint8_t trk_stat() const { return m_trk_stat; }
std::string reserved3() const { return m_reserved3; }
ubx_t* _root() const { return m__root; }
ubx_t::rxm_rawx_t* _parent() const { return m__parent; }
};
private:
double m_rcv_tow;
uint16_t m_week;
int8_t m_leap_s;
uint8_t m_num_meas;
uint8_t m_rec_stat;
std::string m_reserved1;
std::vector<measurement_t*>* m_meas;
ubx_t* m__root;
ubx_t* m__parent;
std::vector<std::string>* m__raw_meas;
std::vector<kaitai::kstream*>* m__io__raw_meas;
public:
double rcv_tow() const { return m_rcv_tow; }
uint16_t week() const { return m_week; }
int8_t leap_s() const { return m_leap_s; }
uint8_t num_meas() const { return m_num_meas; }
uint8_t rec_stat() const { return m_rec_stat; }
std::string reserved1() const { return m_reserved1; }
std::vector<measurement_t*>* meas() const { return m_meas; }
ubx_t* _root() const { return m__root; }
ubx_t* _parent() const { return m__parent; }
std::vector<std::string>* _raw_meas() const { return m__raw_meas; }
std::vector<kaitai::kstream*>* _io__raw_meas() const { return m__io__raw_meas; }
};
class rxm_sfrbx_t : public kaitai::kstruct {
public:
rxm_sfrbx_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~rxm_sfrbx_t();
private:
gnss_type_t m_gnss_id;
uint8_t m_sv_id;
std::string m_reserved1;
uint8_t m_freq_id;
uint8_t m_num_words;
std::string m_reserved2;
uint8_t m_version;
std::string m_reserved3;
std::vector<uint32_t>* m_body;
ubx_t* m__root;
ubx_t* m__parent;
public:
gnss_type_t gnss_id() const { return m_gnss_id; }
uint8_t sv_id() const { return m_sv_id; }
std::string reserved1() const { return m_reserved1; }
uint8_t freq_id() const { return m_freq_id; }
uint8_t num_words() const { return m_num_words; }
std::string reserved2() const { return m_reserved2; }
uint8_t version() const { return m_version; }
std::string reserved3() const { return m_reserved3; }
std::vector<uint32_t>* body() const { return m_body; }
ubx_t* _root() const { return m__root; }
ubx_t* _parent() const { return m__parent; }
};
class nav_sat_t : public kaitai::kstruct {
public:
class nav_t;
nav_sat_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~nav_sat_t();
class nav_t : public kaitai::kstruct {
public:
nav_t(kaitai::kstream* p__io, ubx_t::nav_sat_t* p__parent = 0, ubx_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~nav_t();
private:
gnss_type_t m_gnss_id;
uint8_t m_sv_id;
uint8_t m_cno;
int8_t m_elev;
int16_t m_azim;
int16_t m_pr_res;
uint32_t m_flags;
ubx_t* m__root;
ubx_t::nav_sat_t* m__parent;
public:
gnss_type_t gnss_id() const { return m_gnss_id; }
uint8_t sv_id() const { return m_sv_id; }
uint8_t cno() const { return m_cno; }
int8_t elev() const { return m_elev; }
int16_t azim() const { return m_azim; }
int16_t pr_res() const { return m_pr_res; }
uint32_t flags() const { return m_flags; }
ubx_t* _root() const { return m__root; }
ubx_t::nav_sat_t* _parent() const { return m__parent; }
};
private:
uint32_t m_itow;
uint8_t m_version;
uint8_t m_num_svs;
std::string m_reserved;
std::vector<nav_t*>* m_svs;
ubx_t* m__root;
ubx_t* m__parent;
std::vector<std::string>* m__raw_svs;
std::vector<kaitai::kstream*>* m__io__raw_svs;
public:
uint32_t itow() const { return m_itow; }
uint8_t version() const { return m_version; }
uint8_t num_svs() const { return m_num_svs; }
std::string reserved() const { return m_reserved; }
std::vector<nav_t*>* svs() const { return m_svs; }
ubx_t* _root() const { return m__root; }
ubx_t* _parent() const { return m__parent; }
std::vector<std::string>* _raw_svs() const { return m__raw_svs; }
std::vector<kaitai::kstream*>* _io__raw_svs() const { return m__io__raw_svs; }
};
class nav_pvt_t : public kaitai::kstruct {
public:
nav_pvt_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~nav_pvt_t();
private:
uint32_t m_i_tow;
uint16_t m_year;
uint8_t m_month;
uint8_t m_day;
uint8_t m_hour;
uint8_t m_min;
uint8_t m_sec;
uint8_t m_valid;
uint32_t m_t_acc;
int32_t m_nano;
uint8_t m_fix_type;
uint8_t m_flags;
uint8_t m_flags2;
uint8_t m_num_sv;
int32_t m_lon;
int32_t m_lat;
int32_t m_height;
int32_t m_h_msl;
uint32_t m_h_acc;
uint32_t m_v_acc;
int32_t m_vel_n;
int32_t m_vel_e;
int32_t m_vel_d;
int32_t m_g_speed;
int32_t m_head_mot;
int32_t m_s_acc;
uint32_t m_head_acc;
uint16_t m_p_dop;
uint8_t m_flags3;
std::string m_reserved1;
int32_t m_head_veh;
int16_t m_mag_dec;
uint16_t m_mag_acc;
ubx_t* m__root;
ubx_t* m__parent;
public:
uint32_t i_tow() const { return m_i_tow; }
uint16_t year() const { return m_year; }
uint8_t month() const { return m_month; }
uint8_t day() const { return m_day; }
uint8_t hour() const { return m_hour; }
uint8_t min() const { return m_min; }
uint8_t sec() const { return m_sec; }
uint8_t valid() const { return m_valid; }
uint32_t t_acc() const { return m_t_acc; }
int32_t nano() const { return m_nano; }
uint8_t fix_type() const { return m_fix_type; }
uint8_t flags() const { return m_flags; }
uint8_t flags2() const { return m_flags2; }
uint8_t num_sv() const { return m_num_sv; }
int32_t lon() const { return m_lon; }
int32_t lat() const { return m_lat; }
int32_t height() const { return m_height; }
int32_t h_msl() const { return m_h_msl; }
uint32_t h_acc() const { return m_h_acc; }
uint32_t v_acc() const { return m_v_acc; }
int32_t vel_n() const { return m_vel_n; }
int32_t vel_e() const { return m_vel_e; }
int32_t vel_d() const { return m_vel_d; }
int32_t g_speed() const { return m_g_speed; }
int32_t head_mot() const { return m_head_mot; }
int32_t s_acc() const { return m_s_acc; }
uint32_t head_acc() const { return m_head_acc; }
uint16_t p_dop() const { return m_p_dop; }
uint8_t flags3() const { return m_flags3; }
std::string reserved1() const { return m_reserved1; }
int32_t head_veh() const { return m_head_veh; }
int16_t mag_dec() const { return m_mag_dec; }
uint16_t mag_acc() const { return m_mag_acc; }
ubx_t* _root() const { return m__root; }
ubx_t* _parent() const { return m__parent; }
};
class mon_hw2_t : public kaitai::kstruct {
public:
enum config_source_t {
CONFIG_SOURCE_FLASH = 102,
CONFIG_SOURCE_OTP = 111,
CONFIG_SOURCE_CONFIG_PINS = 112,
CONFIG_SOURCE_ROM = 113
};
mon_hw2_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~mon_hw2_t();
private:
int8_t m_ofs_i;
uint8_t m_mag_i;
int8_t m_ofs_q;
uint8_t m_mag_q;
config_source_t m_cfg_source;
std::string m_reserved1;
uint32_t m_low_lev_cfg;
std::string m_reserved2;
uint32_t m_post_status;
std::string m_reserved3;
ubx_t* m__root;
ubx_t* m__parent;
public:
int8_t ofs_i() const { return m_ofs_i; }
uint8_t mag_i() const { return m_mag_i; }
int8_t ofs_q() const { return m_ofs_q; }
uint8_t mag_q() const { return m_mag_q; }
config_source_t cfg_source() const { return m_cfg_source; }
std::string reserved1() const { return m_reserved1; }
uint32_t low_lev_cfg() const { return m_low_lev_cfg; }
std::string reserved2() const { return m_reserved2; }
uint32_t post_status() const { return m_post_status; }
std::string reserved3() const { return m_reserved3; }
ubx_t* _root() const { return m__root; }
ubx_t* _parent() const { return m__parent; }
};
class mon_hw_t : public kaitai::kstruct {
public:
enum antenna_status_t {
ANTENNA_STATUS_INIT = 0,
ANTENNA_STATUS_DONTKNOW = 1,
ANTENNA_STATUS_OK = 2,
ANTENNA_STATUS_SHORT = 3,
ANTENNA_STATUS_OPEN = 4
};
enum antenna_power_t {
ANTENNA_POWER_FALSE = 0,
ANTENNA_POWER_TRUE = 1,
ANTENNA_POWER_DONTKNOW = 2
};
mon_hw_t(kaitai::kstream* p__io, ubx_t* p__parent = 0, ubx_t* p__root = 0);
private:
void _read();
void _clean_up();
public:
~mon_hw_t();
private:
uint32_t m_pin_sel;
uint32_t m_pin_bank;
uint32_t m_pin_dir;
uint32_t m_pin_val;
uint16_t m_noise_per_ms;
uint16_t m_agc_cnt;
antenna_status_t m_a_status;
antenna_power_t m_a_power;
uint8_t m_flags;
std::string m_reserved1;
uint32_t m_used_mask;
std::string m_vp;
uint8_t m_jam_ind;
std::string m_reserved2;
uint32_t m_pin_irq;
uint32_t m_pull_h;
uint32_t m_pull_l;
ubx_t* m__root;
ubx_t* m__parent;
public:
uint32_t pin_sel() const { return m_pin_sel; }
uint32_t pin_bank() const { return m_pin_bank; }
uint32_t pin_dir() const { return m_pin_dir; }
uint32_t pin_val() const { return m_pin_val; }
uint16_t noise_per_ms() const { return m_noise_per_ms; }
uint16_t agc_cnt() const { return m_agc_cnt; }
antenna_status_t a_status() const { return m_a_status; }
antenna_power_t a_power() const { return m_a_power; }
uint8_t flags() const { return m_flags; }
std::string reserved1() const { return m_reserved1; }
uint32_t used_mask() const { return m_used_mask; }
std::string vp() const { return m_vp; }
uint8_t jam_ind() const { return m_jam_ind; }
std::string reserved2() const { return m_reserved2; }
uint32_t pin_irq() const { return m_pin_irq; }
uint32_t pull_h() const { return m_pull_h; }
uint32_t pull_l() const { return m_pull_l; }
ubx_t* _root() const { return m__root; }
ubx_t* _parent() const { return m__parent; }
};
private:
bool f_checksum;
uint16_t m_checksum;
public:
uint16_t checksum();
private:
std::string m_magic;
uint16_t m_msg_type;
uint16_t m_length;
kaitai::kstruct* m_body;
bool n_body;
public:
bool _is_null_body() { body(); return n_body; };
private:
ubx_t* m__root;
kaitai::kstruct* m__parent;
public:
std::string magic() const { return m_magic; }
uint16_t msg_type() const { return m_msg_type; }
uint16_t length() const { return m_length; }
kaitai::kstruct* body() const { return m_body; }
ubx_t* _root() const { return m__root; }
kaitai::kstruct* _parent() const { return m__parent; }
};
#endif // UBX_H_

@ -0,0 +1,273 @@
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
import kaitaistruct
from kaitaistruct import KaitaiStruct, KaitaiStream, BytesIO
from enum import Enum
if getattr(kaitaistruct, 'API_VERSION', (0, 9)) < (0, 9):
raise Exception("Incompatible Kaitai Struct Python API: 0.9 or later is required, but you have %s" % (kaitaistruct.__version__))
class Ubx(KaitaiStruct):
class GnssType(Enum):
gps = 0
sbas = 1
galileo = 2
beidou = 3
imes = 4
qzss = 5
glonass = 6
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.magic = self._io.read_bytes(2)
if not self.magic == b"\xB5\x62":
raise kaitaistruct.ValidationNotEqualError(b"\xB5\x62", self.magic, self._io, u"/seq/0")
self.msg_type = self._io.read_u2be()
self.length = self._io.read_u2le()
_on = self.msg_type
if _on == 2569:
self.body = Ubx.MonHw(self._io, self, self._root)
elif _on == 533:
self.body = Ubx.RxmRawx(self._io, self, self._root)
elif _on == 531:
self.body = Ubx.RxmSfrbx(self._io, self, self._root)
elif _on == 309:
self.body = Ubx.NavSat(self._io, self, self._root)
elif _on == 2571:
self.body = Ubx.MonHw2(self._io, self, self._root)
elif _on == 263:
self.body = Ubx.NavPvt(self._io, self, self._root)
class RxmRawx(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.rcv_tow = self._io.read_f8le()
self.week = self._io.read_u2le()
self.leap_s = self._io.read_s1()
self.num_meas = self._io.read_u1()
self.rec_stat = self._io.read_u1()
self.reserved1 = self._io.read_bytes(3)
self._raw_meas = []
self.meas = []
for i in range(self.num_meas):
self._raw_meas.append(self._io.read_bytes(32))
_io__raw_meas = KaitaiStream(BytesIO(self._raw_meas[i]))
self.meas.append(Ubx.RxmRawx.Measurement(_io__raw_meas, self, self._root))
class Measurement(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pr_mes = self._io.read_f8le()
self.cp_mes = self._io.read_f8le()
self.do_mes = self._io.read_f4le()
self.gnss_id = KaitaiStream.resolve_enum(Ubx.GnssType, self._io.read_u1())
self.sv_id = self._io.read_u1()
self.reserved2 = self._io.read_bytes(1)
self.freq_id = self._io.read_u1()
self.lock_time = self._io.read_u2le()
self.cno = self._io.read_u1()
self.pr_stdev = self._io.read_u1()
self.cp_stdev = self._io.read_u1()
self.do_stdev = self._io.read_u1()
self.trk_stat = self._io.read_u1()
self.reserved3 = self._io.read_bytes(1)
class RxmSfrbx(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.gnss_id = KaitaiStream.resolve_enum(Ubx.GnssType, self._io.read_u1())
self.sv_id = self._io.read_u1()
self.reserved1 = self._io.read_bytes(1)
self.freq_id = self._io.read_u1()
self.num_words = self._io.read_u1()
self.reserved2 = self._io.read_bytes(1)
self.version = self._io.read_u1()
self.reserved3 = self._io.read_bytes(1)
self.body = []
for i in range(self.num_words):
self.body.append(self._io.read_u4le())
class NavSat(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.itow = self._io.read_u4le()
self.version = self._io.read_u1()
self.num_svs = self._io.read_u1()
self.reserved = self._io.read_bytes(2)
self._raw_svs = []
self.svs = []
for i in range(self.num_svs):
self._raw_svs.append(self._io.read_bytes(12))
_io__raw_svs = KaitaiStream(BytesIO(self._raw_svs[i]))
self.svs.append(Ubx.NavSat.Nav(_io__raw_svs, self, self._root))
class Nav(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.gnss_id = KaitaiStream.resolve_enum(Ubx.GnssType, self._io.read_u1())
self.sv_id = self._io.read_u1()
self.cno = self._io.read_u1()
self.elev = self._io.read_s1()
self.azim = self._io.read_s2le()
self.pr_res = self._io.read_s2le()
self.flags = self._io.read_u4le()
class NavPvt(KaitaiStruct):
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.i_tow = self._io.read_u4le()
self.year = self._io.read_u2le()
self.month = self._io.read_u1()
self.day = self._io.read_u1()
self.hour = self._io.read_u1()
self.min = self._io.read_u1()
self.sec = self._io.read_u1()
self.valid = self._io.read_u1()
self.t_acc = self._io.read_u4le()
self.nano = self._io.read_s4le()
self.fix_type = self._io.read_u1()
self.flags = self._io.read_u1()
self.flags2 = self._io.read_u1()
self.num_sv = self._io.read_u1()
self.lon = self._io.read_s4le()
self.lat = self._io.read_s4le()
self.height = self._io.read_s4le()
self.h_msl = self._io.read_s4le()
self.h_acc = self._io.read_u4le()
self.v_acc = self._io.read_u4le()
self.vel_n = self._io.read_s4le()
self.vel_e = self._io.read_s4le()
self.vel_d = self._io.read_s4le()
self.g_speed = self._io.read_s4le()
self.head_mot = self._io.read_s4le()
self.s_acc = self._io.read_s4le()
self.head_acc = self._io.read_u4le()
self.p_dop = self._io.read_u2le()
self.flags3 = self._io.read_u1()
self.reserved1 = self._io.read_bytes(5)
self.head_veh = self._io.read_s4le()
self.mag_dec = self._io.read_s2le()
self.mag_acc = self._io.read_u2le()
class MonHw2(KaitaiStruct):
class ConfigSource(Enum):
flash = 102
otp = 111
config_pins = 112
rom = 113
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.ofs_i = self._io.read_s1()
self.mag_i = self._io.read_u1()
self.ofs_q = self._io.read_s1()
self.mag_q = self._io.read_u1()
self.cfg_source = KaitaiStream.resolve_enum(Ubx.MonHw2.ConfigSource, self._io.read_u1())
self.reserved1 = self._io.read_bytes(3)
self.low_lev_cfg = self._io.read_u4le()
self.reserved2 = self._io.read_bytes(8)
self.post_status = self._io.read_u4le()
self.reserved3 = self._io.read_bytes(4)
class MonHw(KaitaiStruct):
class AntennaStatus(Enum):
init = 0
dontknow = 1
ok = 2
short = 3
open = 4
class AntennaPower(Enum):
false = 0
true = 1
dontknow = 2
def __init__(self, _io, _parent=None, _root=None):
self._io = _io
self._parent = _parent
self._root = _root if _root else self
self._read()
def _read(self):
self.pin_sel = self._io.read_u4le()
self.pin_bank = self._io.read_u4le()
self.pin_dir = self._io.read_u4le()
self.pin_val = self._io.read_u4le()
self.noise_per_ms = self._io.read_u2le()
self.agc_cnt = self._io.read_u2le()
self.a_status = KaitaiStream.resolve_enum(Ubx.MonHw.AntennaStatus, self._io.read_u1())
self.a_power = KaitaiStream.resolve_enum(Ubx.MonHw.AntennaPower, self._io.read_u1())
self.flags = self._io.read_u1()
self.reserved1 = self._io.read_bytes(1)
self.used_mask = self._io.read_u4le()
self.vp = self._io.read_bytes(17)
self.jam_ind = self._io.read_u1()
self.reserved2 = self._io.read_bytes(2)
self.pin_irq = self._io.read_u4le()
self.pull_h = self._io.read_u4le()
self.pull_l = self._io.read_u4le()
@property
def checksum(self):
if hasattr(self, '_m_checksum'):
return self._m_checksum
_pos = self._io.pos()
self._io.seek((self.length + 6))
self._m_checksum = self._io.read_u2le()
self._io.seek(_pos)
return getattr(self, '_m_checksum', None)

@ -1,20 +0,0 @@
#!/usr/bin/env python3
import time
import cereal.messaging as messaging
if __name__ == "__main__":
sm = messaging.SubMaster(['ubloxGnss', 'gpsLocationExternal'])
while 1:
ug = sm['ubloxGnss']
gle = sm['gpsLocationExternal']
try:
cnos = []
for m in ug.measurementReport.measurements:
cnos.append(m.cno)
print(f"Sats: {ug.measurementReport.numMeas} Accuracy: {gle.horizontalAccuracy:.2f} m cnos", sorted(cnos))
except Exception:
pass
sm.update()
time.sleep(0.1)

@ -1,360 +0,0 @@
#include <iostream>
#include <vector>
#include <bitset>
#include <cassert>
#include <cstdlib>
#include <ctime>
#include "catch2/catch.hpp"
#include "system/ubloxd/generated/glonass.h"
typedef std::vector<std::pair<int, int64_t>> string_data;
#define IDLE_CHIP_IDX 0
#define STRING_NUMBER_IDX 1
// string data 1-5
#define HC_IDX 0
#define PAD1_IDX 1
#define SUPERFRAME_IDX 2
#define PAD2_IDX 3
#define FRAME_IDX 4
// Indexes for string number 1
#define ST1_NU_IDX 2
#define ST1_P1_IDX 3
#define ST1_T_K_IDX 4
#define ST1_X_VEL_S_IDX 5
#define ST1_X_VEL_V_IDX 6
#define ST1_X_ACCEL_S_IDX 7
#define ST1_X_ACCEL_V_IDX 8
#define ST1_X_S_IDX 9
#define ST1_X_V_IDX 10
#define ST1_HC_OFF 11
// Indexes for string number 2
#define ST2_BN_IDX 2
#define ST2_P2_IDX 3
#define ST2_TB_IDX 4
#define ST2_NU_IDX 5
#define ST2_Y_VEL_S_IDX 6
#define ST2_Y_VEL_V_IDX 7
#define ST2_Y_ACCEL_S_IDX 8
#define ST2_Y_ACCEL_V_IDX 9
#define ST2_Y_S_IDX 10
#define ST2_Y_V_IDX 11
#define ST2_HC_OFF 12
// Indexes for string number 3
#define ST3_P3_IDX 2
#define ST3_GAMMA_N_S_IDX 3
#define ST3_GAMMA_N_V_IDX 4
#define ST3_NU_1_IDX 5
#define ST3_P_IDX 6
#define ST3_L_N_IDX 7
#define ST3_Z_VEL_S_IDX 8
#define ST3_Z_VEL_V_IDX 9
#define ST3_Z_ACCEL_S_IDX 10
#define ST3_Z_ACCEL_V_IDX 11
#define ST3_Z_S_IDX 12
#define ST3_Z_V_IDX 13
#define ST3_HC_OFF 14
// Indexes for string number 4
#define ST4_TAU_N_S_IDX 2
#define ST4_TAU_N_V_IDX 3
#define ST4_DELTA_TAU_N_S_IDX 4
#define ST4_DELTA_TAU_N_V_IDX 5
#define ST4_E_N_IDX 6
#define ST4_NU_1_IDX 7
#define ST4_P4_IDX 8
#define ST4_F_T_IDX 9
#define ST4_NU_2_IDX 10
#define ST4_N_T_IDX 11
#define ST4_N_IDX 12
#define ST4_M_IDX 13
#define ST4_HC_OFF 14
// Indexes for string number 5
#define ST5_N_A_IDX 2
#define ST5_TAU_C_IDX 3
#define ST5_NU_IDX 4
#define ST5_N_4_IDX 5
#define ST5_TAU_GPS_IDX 6
#define ST5_L_N_IDX 7
#define ST5_HC_OFF 8
// Indexes for non immediate
#define ST6_DATA_1_IDX 2
#define ST6_DATA_2_IDX 3
#define ST6_HC_OFF 4
std::string generate_inp_data(string_data& data) {
std::string inp_data = "";
for (auto& [b, v] : data) {
std::string tmp = std::bitset<64>(v).to_string();
inp_data += tmp.substr(64-b, b);
}
assert(inp_data.size() == 128);
std::string string_data;
string_data.reserve(16);
for (int i = 0; i < 128; i+=8) {
std::string substr = inp_data.substr(i, 8);
string_data.push_back((uint8_t)std::stoi(substr.c_str(), 0, 2));
}
return string_data;
}
string_data generate_string_data(uint8_t string_number) {
srand((unsigned)time(0));
string_data data; //<bit length, value>
data.push_back({1, 0}); // idle chip
data.push_back({4, string_number}); // string number
if (string_number == 1) {
data.push_back({2, 3}); // not_used
data.push_back({2, 1}); // p1
data.push_back({12, 113}); // t_k
data.push_back({1, rand() & 1}); // x_vel_sign
data.push_back({23, 7122}); // x_vel_value
data.push_back({1, rand() & 1}); // x_accel_sign
data.push_back({4, 3}); // x_accel_value
data.push_back({1, rand() & 1}); // x_sign
data.push_back({26, 33554431}); // x_value
} else if (string_number == 2) {
data.push_back({3, 3}); // b_n
data.push_back({1, 1}); // p2
data.push_back({7, 123}); // t_b
data.push_back({5, 31}); // not_used
data.push_back({1, rand() & 1}); // y_vel_sign
data.push_back({23, 7422}); // y_vel_value
data.push_back({1, rand() & 1}); // y_accel_sign
data.push_back({4, 3}); // y_accel_value
data.push_back({1, rand() & 1}); // y_sign
data.push_back({26, 67108863}); // y_value
} else if (string_number == 3) {
data.push_back({1, 0}); // p3
data.push_back({1, 1}); // gamma_n_sign
data.push_back({10, 123}); // gamma_n_value
data.push_back({1, 0}); // not_used
data.push_back({2, 2}); // p
data.push_back({1, 1}); // l_n
data.push_back({1, rand() & 1}); // z_vel_sign
data.push_back({23, 1337}); // z_vel_value
data.push_back({1, rand() & 1}); // z_accel_sign
data.push_back({4, 9}); // z_accel_value
data.push_back({1, rand() & 1}); // z_sign
data.push_back({26, 100023}); // z_value
} else if (string_number == 4) {
data.push_back({1, rand() & 1}); // tau_n_sign
data.push_back({21, 197152}); // tau_n_value
data.push_back({1, rand() & 1}); // delta_tau_n_sign
data.push_back({4, 4}); // delta_tau_n_value
data.push_back({5, 0}); // e_n
data.push_back({14, 2}); // not_used_1
data.push_back({1, 1}); // p4
data.push_back({4, 9}); // f_t
data.push_back({3, 3}); // not_used_2
data.push_back({11, 2047}); // n_t
data.push_back({5, 2}); // n
data.push_back({2, 1}); // m
} else if (string_number == 5) {
data.push_back({11, 2047}); // n_a
data.push_back({32, 4294767295}); // tau_c
data.push_back({1, 0}); // not_used_1
data.push_back({5, 2}); // n_4
data.push_back({22, 4114304}); // tau_gps
data.push_back({1, 0}); // l_n
} else { // non-immediate data is not parsed
data.push_back({64, rand()}); // data_1
data.push_back({8, 6}); // data_2
}
data.push_back({8, rand() & 0xFF}); // hamming code
data.push_back({11, rand() & 0x7FF}); // pad
data.push_back({16, rand() & 0xFFFF}); // superframe
data.push_back({8, rand() & 0xFF}); // pad
data.push_back({8, rand() & 0xFF}); // frame
return data;
}
TEST_CASE("parse_string_number_1"){
string_data data = generate_string_data(1);
std::string inp_data = generate_inp_data(data);
kaitai::kstream stream(inp_data);
glonass_t gl_string(&stream);
REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second);
REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second);
REQUIRE(gl_string.hamming_code() == data[ST1_HC_OFF + HC_IDX].second);
REQUIRE(gl_string.pad_1() == data[ST1_HC_OFF + PAD1_IDX].second);
REQUIRE(gl_string.superframe_number() == data[ST1_HC_OFF + SUPERFRAME_IDX].second);
REQUIRE(gl_string.pad_2() == data[ST1_HC_OFF + PAD2_IDX].second);
REQUIRE(gl_string.frame_number() == data[ST1_HC_OFF + FRAME_IDX].second);
kaitai::kstream str1(inp_data);
glonass_t str1_data(&str1);
glonass_t::string_1_t* s1 = static_cast<glonass_t::string_1_t*>(str1_data.data());
REQUIRE(s1->not_used() == data[ST1_NU_IDX].second);
REQUIRE(s1->p1() == data[ST1_P1_IDX].second);
REQUIRE(s1->t_k() == data[ST1_T_K_IDX].second);
int mul = s1->x_vel_sign() ? (-1) : 1;
REQUIRE(s1->x_vel() == (data[ST1_X_VEL_V_IDX].second * mul));
mul = s1->x_accel_sign() ? (-1) : 1;
REQUIRE(s1->x_accel() == (data[ST1_X_ACCEL_V_IDX].second * mul));
mul = s1->x_sign() ? (-1) : 1;
REQUIRE(s1->x() == (data[ST1_X_V_IDX].second * mul));
}
TEST_CASE("parse_string_number_2"){
string_data data = generate_string_data(2);
std::string inp_data = generate_inp_data(data);
kaitai::kstream stream(inp_data);
glonass_t gl_string(&stream);
REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second);
REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second);
REQUIRE(gl_string.hamming_code() == data[ST2_HC_OFF + HC_IDX].second);
REQUIRE(gl_string.pad_1() == data[ST2_HC_OFF + PAD1_IDX].second);
REQUIRE(gl_string.superframe_number() == data[ST2_HC_OFF + SUPERFRAME_IDX].second);
REQUIRE(gl_string.pad_2() == data[ST2_HC_OFF + PAD2_IDX].second);
REQUIRE(gl_string.frame_number() == data[ST2_HC_OFF + FRAME_IDX].second);
kaitai::kstream str2(inp_data);
glonass_t str2_data(&str2);
glonass_t::string_2_t* s2 = static_cast<glonass_t::string_2_t*>(str2_data.data());
REQUIRE(s2->b_n() == data[ST2_BN_IDX].second);
REQUIRE(s2->not_used() == data[ST2_NU_IDX].second);
REQUIRE(s2->p2() == data[ST2_P2_IDX].second);
REQUIRE(s2->t_b() == data[ST2_TB_IDX].second);
int mul = s2->y_vel_sign() ? (-1) : 1;
REQUIRE(s2->y_vel() == (data[ST2_Y_VEL_V_IDX].second * mul));
mul = s2->y_accel_sign() ? (-1) : 1;
REQUIRE(s2->y_accel() == (data[ST2_Y_ACCEL_V_IDX].second * mul));
mul = s2->y_sign() ? (-1) : 1;
REQUIRE(s2->y() == (data[ST2_Y_V_IDX].second * mul));
}
TEST_CASE("parse_string_number_3"){
string_data data = generate_string_data(3);
std::string inp_data = generate_inp_data(data);
kaitai::kstream stream(inp_data);
glonass_t gl_string(&stream);
REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second);
REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second);
REQUIRE(gl_string.hamming_code() == data[ST3_HC_OFF + HC_IDX].second);
REQUIRE(gl_string.pad_1() == data[ST3_HC_OFF + PAD1_IDX].second);
REQUIRE(gl_string.superframe_number() == data[ST3_HC_OFF + SUPERFRAME_IDX].second);
REQUIRE(gl_string.pad_2() == data[ST3_HC_OFF + PAD2_IDX].second);
REQUIRE(gl_string.frame_number() == data[ST3_HC_OFF + FRAME_IDX].second);
kaitai::kstream str3(inp_data);
glonass_t str3_data(&str3);
glonass_t::string_3_t* s3 = static_cast<glonass_t::string_3_t*>(str3_data.data());
REQUIRE(s3->p3() == data[ST3_P3_IDX].second);
int mul = s3->gamma_n_sign() ? (-1) : 1;
REQUIRE(s3->gamma_n() == (data[ST3_GAMMA_N_V_IDX].second * mul));
REQUIRE(s3->not_used() == data[ST3_NU_1_IDX].second);
REQUIRE(s3->p() == data[ST3_P_IDX].second);
REQUIRE(s3->l_n() == data[ST3_L_N_IDX].second);
mul = s3->z_vel_sign() ? (-1) : 1;
REQUIRE(s3->z_vel() == (data[ST3_Z_VEL_V_IDX].second * mul));
mul = s3->z_accel_sign() ? (-1) : 1;
REQUIRE(s3->z_accel() == (data[ST3_Z_ACCEL_V_IDX].second * mul));
mul = s3->z_sign() ? (-1) : 1;
REQUIRE(s3->z() == (data[ST3_Z_V_IDX].second * mul));
}
TEST_CASE("parse_string_number_4"){
string_data data = generate_string_data(4);
std::string inp_data = generate_inp_data(data);
kaitai::kstream stream(inp_data);
glonass_t gl_string(&stream);
REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second);
REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second);
REQUIRE(gl_string.hamming_code() == data[ST4_HC_OFF + HC_IDX].second);
REQUIRE(gl_string.pad_1() == data[ST4_HC_OFF + PAD1_IDX].second);
REQUIRE(gl_string.superframe_number() == data[ST4_HC_OFF + SUPERFRAME_IDX].second);
REQUIRE(gl_string.pad_2() == data[ST4_HC_OFF + PAD2_IDX].second);
REQUIRE(gl_string.frame_number() == data[ST4_HC_OFF + FRAME_IDX].second);
kaitai::kstream str4(inp_data);
glonass_t str4_data(&str4);
glonass_t::string_4_t* s4 = static_cast<glonass_t::string_4_t*>(str4_data.data());
int mul = s4->tau_n_sign() ? (-1) : 1;
REQUIRE(s4->tau_n() == (data[ST4_TAU_N_V_IDX].second * mul));
mul = s4->delta_tau_n_sign() ? (-1) : 1;
REQUIRE(s4->delta_tau_n() == (data[ST4_DELTA_TAU_N_V_IDX].second * mul));
REQUIRE(s4->e_n() == data[ST4_E_N_IDX].second);
REQUIRE(s4->not_used_1() == data[ST4_NU_1_IDX].second);
REQUIRE(s4->p4() == data[ST4_P4_IDX].second);
REQUIRE(s4->f_t() == data[ST4_F_T_IDX].second);
REQUIRE(s4->not_used_2() == data[ST4_NU_2_IDX].second);
REQUIRE(s4->n_t() == data[ST4_N_T_IDX].second);
REQUIRE(s4->n() == data[ST4_N_IDX].second);
REQUIRE(s4->m() == data[ST4_M_IDX].second);
}
TEST_CASE("parse_string_number_5"){
string_data data = generate_string_data(5);
std::string inp_data = generate_inp_data(data);
kaitai::kstream stream(inp_data);
glonass_t gl_string(&stream);
REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second);
REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second);
REQUIRE(gl_string.hamming_code() == data[ST5_HC_OFF + HC_IDX].second);
REQUIRE(gl_string.pad_1() == data[ST5_HC_OFF + PAD1_IDX].second);
REQUIRE(gl_string.superframe_number() == data[ST5_HC_OFF + SUPERFRAME_IDX].second);
REQUIRE(gl_string.pad_2() == data[ST5_HC_OFF + PAD2_IDX].second);
REQUIRE(gl_string.frame_number() == data[ST5_HC_OFF + FRAME_IDX].second);
kaitai::kstream str5(inp_data);
glonass_t str5_data(&str5);
glonass_t::string_5_t* s5 = static_cast<glonass_t::string_5_t*>(str5_data.data());
REQUIRE(s5->n_a() == data[ST5_N_A_IDX].second);
REQUIRE(s5->tau_c() == data[ST5_TAU_C_IDX].second);
REQUIRE(s5->not_used() == data[ST5_NU_IDX].second);
REQUIRE(s5->n_4() == data[ST5_N_4_IDX].second);
REQUIRE(s5->tau_gps() == data[ST5_TAU_GPS_IDX].second);
REQUIRE(s5->l_n() == data[ST5_L_N_IDX].second);
}
TEST_CASE("parse_string_number_NI"){
string_data data = generate_string_data((rand() % 10) + 6);
std::string inp_data = generate_inp_data(data);
kaitai::kstream stream(inp_data);
glonass_t gl_string(&stream);
REQUIRE(gl_string.idle_chip() == data[IDLE_CHIP_IDX].second);
REQUIRE(gl_string.string_number() == data[STRING_NUMBER_IDX].second);
REQUIRE(gl_string.hamming_code() == data[ST6_HC_OFF + HC_IDX].second);
REQUIRE(gl_string.pad_1() == data[ST6_HC_OFF + PAD1_IDX].second);
REQUIRE(gl_string.superframe_number() == data[ST6_HC_OFF + SUPERFRAME_IDX].second);
REQUIRE(gl_string.pad_2() == data[ST6_HC_OFF + PAD2_IDX].second);
REQUIRE(gl_string.frame_number() == data[ST6_HC_OFF + FRAME_IDX].second);
kaitai::kstream strni(inp_data);
glonass_t strni_data(&strni);
glonass_t::string_non_immediate_t* sni = static_cast<glonass_t::string_non_immediate_t*>(strni_data.data());
REQUIRE(sni->data_1() == data[ST6_DATA_1_IDX].second);
REQUIRE(sni->data_2() == data[ST6_DATA_2_IDX].second);
}

@ -1,2 +0,0 @@
#define CATCH_CONFIG_MAIN
#include "catch2/catch.hpp"

@ -1,530 +0,0 @@
#include "system/ubloxd/ublox_msg.h"
#include <unistd.h>
#include <algorithm>
#include <cassert>
#include <chrono>
#include <cmath>
#include <cstdio>
#include <cstdlib>
#include <ctime>
#include <unordered_map>
#include <utility>
#include "common/swaglog.h"
const double gpsPi = 3.1415926535898;
#define UBLOX_MSG_SIZE(hdr) (*(uint16_t *)&hdr[4])
inline static bool bit_to_bool(uint8_t val, int shifts) {
return (bool)(val & (1 << shifts));
}
inline int UbloxMsgParser::needed_bytes() {
// Msg header incomplete?
if (bytes_in_parse_buf < ublox::UBLOX_HEADER_SIZE)
return ublox::UBLOX_HEADER_SIZE + ublox::UBLOX_CHECKSUM_SIZE - bytes_in_parse_buf;
uint16_t needed = UBLOX_MSG_SIZE(msg_parse_buf) + ublox::UBLOX_HEADER_SIZE + ublox::UBLOX_CHECKSUM_SIZE;
// too much data
if (needed < (uint16_t)bytes_in_parse_buf)
return -1;
return needed - (uint16_t)bytes_in_parse_buf;
}
inline bool UbloxMsgParser::valid_cheksum() {
uint8_t ck_a = 0, ck_b = 0;
for (int i = 2; i < bytes_in_parse_buf - ublox::UBLOX_CHECKSUM_SIZE; i++) {
ck_a = (ck_a + msg_parse_buf[i]) & 0xFF;
ck_b = (ck_b + ck_a) & 0xFF;
}
if (ck_a != msg_parse_buf[bytes_in_parse_buf - 2]) {
LOGD("Checksum a mismatch: %02X, %02X", ck_a, msg_parse_buf[6]);
return false;
}
if (ck_b != msg_parse_buf[bytes_in_parse_buf - 1]) {
LOGD("Checksum b mismatch: %02X, %02X", ck_b, msg_parse_buf[7]);
return false;
}
return true;
}
inline bool UbloxMsgParser::valid() {
return bytes_in_parse_buf >= ublox::UBLOX_HEADER_SIZE + ublox::UBLOX_CHECKSUM_SIZE &&
needed_bytes() == 0 && valid_cheksum();
}
inline bool UbloxMsgParser::valid_so_far() {
if (bytes_in_parse_buf > 0 && msg_parse_buf[0] != ublox::PREAMBLE1) {
return false;
}
if (bytes_in_parse_buf > 1 && msg_parse_buf[1] != ublox::PREAMBLE2) {
return false;
}
if (needed_bytes() == 0 && !valid()) {
return false;
}
return true;
}
bool UbloxMsgParser::add_data(float log_time, const uint8_t *incoming_data, uint32_t incoming_data_len, size_t &bytes_consumed) {
last_log_time = log_time;
int needed = needed_bytes();
if (needed > 0) {
bytes_consumed = std::min((uint32_t)needed, incoming_data_len);
// Add data to buffer
memcpy(msg_parse_buf + bytes_in_parse_buf, incoming_data, bytes_consumed);
bytes_in_parse_buf += bytes_consumed;
} else {
bytes_consumed = incoming_data_len;
}
// Validate msg format, detect invalid header and invalid checksum.
while (!valid_so_far() && bytes_in_parse_buf != 0) {
// Corrupted msg, drop a byte.
bytes_in_parse_buf -= 1;
if (bytes_in_parse_buf > 0)
memmove(&msg_parse_buf[0], &msg_parse_buf[1], bytes_in_parse_buf);
}
// There is redundant data at the end of buffer, reset the buffer.
if (needed_bytes() == -1) {
bytes_in_parse_buf = 0;
}
return valid();
}
std::pair<std::string, kj::Array<capnp::word>> UbloxMsgParser::gen_msg() {
std::string dat = data();
kaitai::kstream stream(dat);
ubx_t ubx_message(&stream);
auto body = ubx_message.body();
switch (ubx_message.msg_type()) {
case 0x0107:
return {"gpsLocationExternal", gen_nav_pvt(static_cast<ubx_t::nav_pvt_t*>(body))};
case 0x0213: // UBX-RXM-SFRB (Broadcast Navigation Data Subframe)
return {"ubloxGnss", gen_rxm_sfrbx(static_cast<ubx_t::rxm_sfrbx_t*>(body))};
case 0x0215: // UBX-RXM-RAW (Multi-GNSS Raw Measurement Data)
return {"ubloxGnss", gen_rxm_rawx(static_cast<ubx_t::rxm_rawx_t*>(body))};
case 0x0a09:
return {"ubloxGnss", gen_mon_hw(static_cast<ubx_t::mon_hw_t*>(body))};
case 0x0a0b:
return {"ubloxGnss", gen_mon_hw2(static_cast<ubx_t::mon_hw2_t*>(body))};
case 0x0135:
return {"ubloxGnss", gen_nav_sat(static_cast<ubx_t::nav_sat_t*>(body))};
default:
LOGE("Unknown message type %x", ubx_message.msg_type());
return {"ubloxGnss", kj::Array<capnp::word>()};
}
}
kj::Array<capnp::word> UbloxMsgParser::gen_nav_pvt(ubx_t::nav_pvt_t *msg) {
MessageBuilder msg_builder;
auto gpsLoc = msg_builder.initEvent().initGpsLocationExternal();
gpsLoc.setSource(cereal::GpsLocationData::SensorSource::UBLOX);
gpsLoc.setFlags(msg->flags());
gpsLoc.setHasFix((msg->flags() % 2) == 1);
gpsLoc.setLatitude(msg->lat() * 1e-07);
gpsLoc.setLongitude(msg->lon() * 1e-07);
gpsLoc.setAltitude(msg->height() * 1e-03);
gpsLoc.setSpeed(msg->g_speed() * 1e-03);
gpsLoc.setBearingDeg(msg->head_mot() * 1e-5);
gpsLoc.setHorizontalAccuracy(msg->h_acc() * 1e-03);
gpsLoc.setSatelliteCount(msg->num_sv());
std::tm timeinfo = std::tm();
timeinfo.tm_year = msg->year() - 1900;
timeinfo.tm_mon = msg->month() - 1;
timeinfo.tm_mday = msg->day();
timeinfo.tm_hour = msg->hour();
timeinfo.tm_min = msg->min();
timeinfo.tm_sec = msg->sec();
std::time_t utc_tt = timegm(&timeinfo);
gpsLoc.setUnixTimestampMillis(utc_tt * 1e+03 + msg->nano() * 1e-06);
float f[] = { msg->vel_n() * 1e-03f, msg->vel_e() * 1e-03f, msg->vel_d() * 1e-03f };
gpsLoc.setVNED(f);
gpsLoc.setVerticalAccuracy(msg->v_acc() * 1e-03);
gpsLoc.setSpeedAccuracy(msg->s_acc() * 1e-03);
gpsLoc.setBearingAccuracyDeg(msg->head_acc() * 1e-05);
return capnp::messageToFlatArray(msg_builder);
}
kj::Array<capnp::word> UbloxMsgParser::parse_gps_ephemeris(ubx_t::rxm_sfrbx_t *msg) {
// GPS subframes are packed into 10x 4 bytes, each containing 3 actual bytes
// We will first need to separate the data from the padding and parity
auto body = *msg->body();
assert(body.size() == 10);
std::string subframe_data;
subframe_data.reserve(30);
for (uint32_t word : body) {
word = word >> 6; // TODO: Verify parity
subframe_data.push_back(word >> 16);
subframe_data.push_back(word >> 8);
subframe_data.push_back(word >> 0);
}
// Collect subframes in map and parse when we have all the parts
{
kaitai::kstream stream(subframe_data);
gps_t subframe(&stream);
int subframe_id = subframe.how()->subframe_id();
if (subframe_id > 3 || subframe_id < 1) {
// don't parse almanac subframes
return kj::Array<capnp::word>();
}
gps_subframes[msg->sv_id()][subframe_id] = subframe_data;
}
// publish if subframes 1-3 have been collected
if (gps_subframes[msg->sv_id()].size() == 3) {
MessageBuilder msg_builder;
auto eph = msg_builder.initEvent().initUbloxGnss().initEphemeris();
eph.setSvId(msg->sv_id());
int iode_s2 = 0;
int iode_s3 = 0;
int iodc_lsb = 0;
int week;
// Subframe 1
{
kaitai::kstream stream(gps_subframes[msg->sv_id()][1]);
gps_t subframe(&stream);
gps_t::subframe_1_t* subframe_1 = static_cast<gps_t::subframe_1_t*>(subframe.body());
// Each message is incremented to be greater or equal than week 1877 (2015-12-27).
// To skip this use the current_time argument
week = subframe_1->week_no();
week += 1024;
if (week < 1877) {
week += 1024;
}
//eph.setGpsWeek(subframe_1->week_no());
eph.setTgd(subframe_1->t_gd() * pow(2, -31));
eph.setToc(subframe_1->t_oc() * pow(2, 4));
eph.setAf2(subframe_1->af_2() * pow(2, -55));
eph.setAf1(subframe_1->af_1() * pow(2, -43));
eph.setAf0(subframe_1->af_0() * pow(2, -31));
eph.setSvHealth(subframe_1->sv_health());
eph.setTowCount(subframe.how()->tow_count());
iodc_lsb = subframe_1->iodc_lsb();
}
// Subframe 2
{
kaitai::kstream stream(gps_subframes[msg->sv_id()][2]);
gps_t subframe(&stream);
gps_t::subframe_2_t* subframe_2 = static_cast<gps_t::subframe_2_t*>(subframe.body());
// GPS week refers to current week, the ephemeris can be valid for the next
// if toe equals 0, this can be verified by the TOW count if it is within the
// last 2 hours of the week (gps ephemeris valid for 4hours)
if (subframe_2->t_oe() == 0 and subframe.how()->tow_count()*6 >= (SECS_IN_WEEK - 2*SECS_IN_HR)){
week += 1;
}
eph.setCrs(subframe_2->c_rs() * pow(2, -5));
eph.setDeltaN(subframe_2->delta_n() * pow(2, -43) * gpsPi);
eph.setM0(subframe_2->m_0() * pow(2, -31) * gpsPi);
eph.setCuc(subframe_2->c_uc() * pow(2, -29));
eph.setEcc(subframe_2->e() * pow(2, -33));
eph.setCus(subframe_2->c_us() * pow(2, -29));
eph.setA(pow(subframe_2->sqrt_a() * pow(2, -19), 2.0));
eph.setToe(subframe_2->t_oe() * pow(2, 4));
iode_s2 = subframe_2->iode();
}
// Subframe 3
{
kaitai::kstream stream(gps_subframes[msg->sv_id()][3]);
gps_t subframe(&stream);
gps_t::subframe_3_t* subframe_3 = static_cast<gps_t::subframe_3_t*>(subframe.body());
eph.setCic(subframe_3->c_ic() * pow(2, -29));
eph.setOmega0(subframe_3->omega_0() * pow(2, -31) * gpsPi);
eph.setCis(subframe_3->c_is() * pow(2, -29));
eph.setI0(subframe_3->i_0() * pow(2, -31) * gpsPi);
eph.setCrc(subframe_3->c_rc() * pow(2, -5));
eph.setOmega(subframe_3->omega() * pow(2, -31) * gpsPi);
eph.setOmegaDot(subframe_3->omega_dot() * pow(2, -43) * gpsPi);
eph.setIode(subframe_3->iode());
eph.setIDot(subframe_3->idot() * pow(2, -43) * gpsPi);
iode_s3 = subframe_3->iode();
}
eph.setToeWeek(week);
eph.setTocWeek(week);
gps_subframes[msg->sv_id()].clear();
if (iodc_lsb != iode_s2 || iodc_lsb != iode_s3) {
// data set cutover, reject ephemeris
return kj::Array<capnp::word>();
}
return capnp::messageToFlatArray(msg_builder);
}
return kj::Array<capnp::word>();
}
kj::Array<capnp::word> UbloxMsgParser::parse_glonass_ephemeris(ubx_t::rxm_sfrbx_t *msg) {
// This parser assumes that no 2 satellites of the same frequency
// can be in view at the same time
auto body = *msg->body();
assert(body.size() == 4);
{
std::string string_data;
string_data.reserve(16);
for (uint32_t word : body) {
for (int i = 3; i >= 0; i--)
string_data.push_back(word >> 8*i);
}
kaitai::kstream stream(string_data);
glonass_t gl_string(&stream);
int string_number = gl_string.string_number();
if (string_number < 1 || string_number > 5 || gl_string.idle_chip()) {
// don't parse non immediate data, idle_chip == 0
return kj::Array<capnp::word>();
}
// Check if new string either has same superframe_id or log transmission times make sense
bool superframe_unknown = false;
bool needs_clear = false;
for (int i = 1; i <= 5; i++) {
if (glonass_strings[msg->freq_id()].find(i) == glonass_strings[msg->freq_id()].end())
continue;
if (glonass_string_superframes[msg->freq_id()][i] == 0 || gl_string.superframe_number() == 0) {
superframe_unknown = true;
} else if (glonass_string_superframes[msg->freq_id()][i] != gl_string.superframe_number()) {
needs_clear = true;
}
// Check if string times add up to being from the same frame
// If superframe is known this is redundant
// Strings are sent 2s apart and frames are 30s apart
if (superframe_unknown &&
std::abs((glonass_string_times[msg->freq_id()][i] - 2.0 * i) - (last_log_time - 2.0 * string_number)) > 10)
needs_clear = true;
}
if (needs_clear) {
glonass_strings[msg->freq_id()].clear();
glonass_string_superframes[msg->freq_id()].clear();
glonass_string_times[msg->freq_id()].clear();
}
glonass_strings[msg->freq_id()][string_number] = string_data;
glonass_string_superframes[msg->freq_id()][string_number] = gl_string.superframe_number();
glonass_string_times[msg->freq_id()][string_number] = last_log_time;
}
if (msg->sv_id() == 255) {
// data can be decoded before identifying the SV number, in this case 255
// is returned, which means "unknown" (ublox p32)
return kj::Array<capnp::word>();
}
// publish if strings 1-5 have been collected
if (glonass_strings[msg->freq_id()].size() != 5) {
return kj::Array<capnp::word>();
}
MessageBuilder msg_builder;
auto eph = msg_builder.initEvent().initUbloxGnss().initGlonassEphemeris();
eph.setSvId(msg->sv_id());
eph.setFreqNum(msg->freq_id() - 7);
uint16_t current_day = 0;
uint16_t tk = 0;
// string number 1
{
kaitai::kstream stream(glonass_strings[msg->freq_id()][1]);
glonass_t gl_stream(&stream);
glonass_t::string_1_t* data = static_cast<glonass_t::string_1_t*>(gl_stream.data());
eph.setP1(data->p1());
tk = data->t_k();
eph.setTkDEPRECATED(tk);
eph.setXVel(data->x_vel() * pow(2, -20));
eph.setXAccel(data->x_accel() * pow(2, -30));
eph.setX(data->x() * pow(2, -11));
}
// string number 2
{
kaitai::kstream stream(glonass_strings[msg->freq_id()][2]);
glonass_t gl_stream(&stream);
glonass_t::string_2_t* data = static_cast<glonass_t::string_2_t*>(gl_stream.data());
eph.setSvHealth(data->b_n()>>2); // MSB indicates health
eph.setP2(data->p2());
eph.setTb(data->t_b());
eph.setYVel(data->y_vel() * pow(2, -20));
eph.setYAccel(data->y_accel() * pow(2, -30));
eph.setY(data->y() * pow(2, -11));
}
// string number 3
{
kaitai::kstream stream(glonass_strings[msg->freq_id()][3]);
glonass_t gl_stream(&stream);
glonass_t::string_3_t* data = static_cast<glonass_t::string_3_t*>(gl_stream.data());
eph.setP3(data->p3());
eph.setGammaN(data->gamma_n() * pow(2, -40));
eph.setSvHealth(eph.getSvHealth() | data->l_n());
eph.setZVel(data->z_vel() * pow(2, -20));
eph.setZAccel(data->z_accel() * pow(2, -30));
eph.setZ(data->z() * pow(2, -11));
}
// string number 4
{
kaitai::kstream stream(glonass_strings[msg->freq_id()][4]);
glonass_t gl_stream(&stream);
glonass_t::string_4_t* data = static_cast<glonass_t::string_4_t*>(gl_stream.data());
current_day = data->n_t();
eph.setNt(current_day);
eph.setTauN(data->tau_n() * pow(2, -30));
eph.setDeltaTauN(data->delta_tau_n() * pow(2, -30));
eph.setAge(data->e_n());
eph.setP4(data->p4());
eph.setSvURA(glonass_URA_lookup.at(data->f_t()));
if (msg->sv_id() != data->n()) {
LOGE("SV_ID != SLOT_NUMBER: %d %" PRIu64, msg->sv_id(), data->n());
}
eph.setSvType(data->m());
}
// string number 5
{
kaitai::kstream stream(glonass_strings[msg->freq_id()][5]);
glonass_t gl_stream(&stream);
glonass_t::string_5_t* data = static_cast<glonass_t::string_5_t*>(gl_stream.data());
// string5 parsing is only needed to get the year, this can be removed and
// the year can be fetched later in laika (note rollovers and leap year)
eph.setN4(data->n_4());
int tk_seconds = SECS_IN_HR * ((tk>>7) & 0x1F) + SECS_IN_MIN * ((tk>>1) & 0x3F) + (tk & 0x1) * 30;
eph.setTkSeconds(tk_seconds);
}
glonass_strings[msg->freq_id()].clear();
return capnp::messageToFlatArray(msg_builder);
}
kj::Array<capnp::word> UbloxMsgParser::gen_rxm_sfrbx(ubx_t::rxm_sfrbx_t *msg) {
switch (msg->gnss_id()) {
case ubx_t::gnss_type_t::GNSS_TYPE_GPS:
return parse_gps_ephemeris(msg);
case ubx_t::gnss_type_t::GNSS_TYPE_GLONASS:
return parse_glonass_ephemeris(msg);
default:
return kj::Array<capnp::word>();
}
}
kj::Array<capnp::word> UbloxMsgParser::gen_rxm_rawx(ubx_t::rxm_rawx_t *msg) {
MessageBuilder msg_builder;
auto mr = msg_builder.initEvent().initUbloxGnss().initMeasurementReport();
mr.setRcvTow(msg->rcv_tow());
mr.setGpsWeek(msg->week());
mr.setLeapSeconds(msg->leap_s());
mr.setGpsWeek(msg->week());
auto mb = mr.initMeasurements(msg->num_meas());
auto measurements = *msg->meas();
for (int8_t i = 0; i < msg->num_meas(); i++) {
mb[i].setSvId(measurements[i]->sv_id());
mb[i].setPseudorange(measurements[i]->pr_mes());
mb[i].setCarrierCycles(measurements[i]->cp_mes());
mb[i].setDoppler(measurements[i]->do_mes());
mb[i].setGnssId(measurements[i]->gnss_id());
mb[i].setGlonassFrequencyIndex(measurements[i]->freq_id());
mb[i].setLocktime(measurements[i]->lock_time());
mb[i].setCno(measurements[i]->cno());
mb[i].setPseudorangeStdev(0.01 * (pow(2, (measurements[i]->pr_stdev() & 15)))); // weird scaling, might be wrong
mb[i].setCarrierPhaseStdev(0.004 * (measurements[i]->cp_stdev() & 15));
mb[i].setDopplerStdev(0.002 * (pow(2, (measurements[i]->do_stdev() & 15)))); // weird scaling, might be wrong
auto ts = mb[i].initTrackingStatus();
auto trk_stat = measurements[i]->trk_stat();
ts.setPseudorangeValid(bit_to_bool(trk_stat, 0));
ts.setCarrierPhaseValid(bit_to_bool(trk_stat, 1));
ts.setHalfCycleValid(bit_to_bool(trk_stat, 2));
ts.setHalfCycleSubtracted(bit_to_bool(trk_stat, 3));
}
mr.setNumMeas(msg->num_meas());
auto rs = mr.initReceiverStatus();
rs.setLeapSecValid(bit_to_bool(msg->rec_stat(), 0));
rs.setClkReset(bit_to_bool(msg->rec_stat(), 2));
return capnp::messageToFlatArray(msg_builder);
}
kj::Array<capnp::word> UbloxMsgParser::gen_nav_sat(ubx_t::nav_sat_t *msg) {
MessageBuilder msg_builder;
auto sr = msg_builder.initEvent().initUbloxGnss().initSatReport();
sr.setITow(msg->itow());
auto svs = sr.initSvs(msg->num_svs());
auto svs_data = *msg->svs();
for (int8_t i = 0; i < msg->num_svs(); i++) {
svs[i].setSvId(svs_data[i]->sv_id());
svs[i].setGnssId(svs_data[i]->gnss_id());
svs[i].setFlagsBitfield(svs_data[i]->flags());
svs[i].setCno(svs_data[i]->cno());
svs[i].setElevationDeg(svs_data[i]->elev());
svs[i].setAzimuthDeg(svs_data[i]->azim());
svs[i].setPseudorangeResidual(svs_data[i]->pr_res() * 0.1);
}
return capnp::messageToFlatArray(msg_builder);
}
kj::Array<capnp::word> UbloxMsgParser::gen_mon_hw(ubx_t::mon_hw_t *msg) {
MessageBuilder msg_builder;
auto hwStatus = msg_builder.initEvent().initUbloxGnss().initHwStatus();
hwStatus.setNoisePerMS(msg->noise_per_ms());
hwStatus.setFlags(msg->flags());
hwStatus.setAgcCnt(msg->agc_cnt());
hwStatus.setAStatus((cereal::UbloxGnss::HwStatus::AntennaSupervisorState) msg->a_status());
hwStatus.setAPower((cereal::UbloxGnss::HwStatus::AntennaPowerStatus) msg->a_power());
hwStatus.setJamInd(msg->jam_ind());
return capnp::messageToFlatArray(msg_builder);
}
kj::Array<capnp::word> UbloxMsgParser::gen_mon_hw2(ubx_t::mon_hw2_t *msg) {
MessageBuilder msg_builder;
auto hwStatus = msg_builder.initEvent().initUbloxGnss().initHwStatus2();
hwStatus.setOfsI(msg->ofs_i());
hwStatus.setMagI(msg->mag_i());
hwStatus.setOfsQ(msg->ofs_q());
hwStatus.setMagQ(msg->mag_q());
switch (msg->cfg_source()) {
case ubx_t::mon_hw2_t::config_source_t::CONFIG_SOURCE_ROM:
hwStatus.setCfgSource(cereal::UbloxGnss::HwStatus2::ConfigSource::ROM);
break;
case ubx_t::mon_hw2_t::config_source_t::CONFIG_SOURCE_OTP:
hwStatus.setCfgSource(cereal::UbloxGnss::HwStatus2::ConfigSource::OTP);
break;
case ubx_t::mon_hw2_t::config_source_t::CONFIG_SOURCE_CONFIG_PINS:
hwStatus.setCfgSource(cereal::UbloxGnss::HwStatus2::ConfigSource::CONFIGPINS);
break;
case ubx_t::mon_hw2_t::config_source_t::CONFIG_SOURCE_FLASH:
hwStatus.setCfgSource(cereal::UbloxGnss::HwStatus2::ConfigSource::FLASH);
break;
default:
hwStatus.setCfgSource(cereal::UbloxGnss::HwStatus2::ConfigSource::UNDEFINED);
break;
}
hwStatus.setLowLevCfg(msg->low_lev_cfg());
hwStatus.setPostStatus(msg->post_status());
return capnp::messageToFlatArray(msg_builder);
}

@ -1,131 +0,0 @@
#pragma once
#include <cassert>
#include <cstdint>
#include <ctime>
#include <memory>
#include <string>
#include <unordered_map>
#include <utility>
#include "cereal/messaging/messaging.h"
#include "common/util.h"
#include "system/ubloxd/generated/gps.h"
#include "system/ubloxd/generated/glonass.h"
#include "system/ubloxd/generated/ubx.h"
using namespace std::string_literals;
const int SECS_IN_MIN = 60;
const int SECS_IN_HR = 60 * SECS_IN_MIN;
const int SECS_IN_DAY = 24 * SECS_IN_HR;
const int SECS_IN_WEEK = 7 * SECS_IN_DAY;
// protocol constants
namespace ublox {
const uint8_t PREAMBLE1 = 0xb5;
const uint8_t PREAMBLE2 = 0x62;
const int UBLOX_HEADER_SIZE = 6;
const int UBLOX_CHECKSUM_SIZE = 2;
const int UBLOX_MAX_MSG_SIZE = 65536;
struct ubx_mga_ini_time_utc_t {
uint8_t type;
uint8_t version;
uint8_t ref;
int8_t leapSecs;
uint16_t year;
uint8_t month;
uint8_t day;
uint8_t hour;
uint8_t minute;
uint8_t second;
uint8_t reserved1;
uint32_t ns;
uint16_t tAccS;
uint16_t reserved2;
uint32_t tAccNs;
} __attribute__((packed));
inline std::string ubx_add_checksum(const std::string &msg) {
assert(msg.size() > 2);
uint8_t ck_a = 0, ck_b = 0;
for (int i = 2; i < msg.size(); i++) {
ck_a = (ck_a + msg[i]) & 0xFF;
ck_b = (ck_b + ck_a) & 0xFF;
}
std::string r = msg;
r.push_back(ck_a);
r.push_back(ck_b);
return r;
}
inline std::string build_ubx_mga_ini_time_utc(struct tm time) {
ublox::ubx_mga_ini_time_utc_t payload = {
.type = 0x10,
.version = 0x0,
.ref = 0x0,
.leapSecs = -128, // Unknown
.year = (uint16_t)(1900 + time.tm_year),
.month = (uint8_t)(1 + time.tm_mon),
.day = (uint8_t)time.tm_mday,
.hour = (uint8_t)time.tm_hour,
.minute = (uint8_t)time.tm_min,
.second = (uint8_t)time.tm_sec,
.reserved1 = 0x0,
.ns = 0,
.tAccS = 30,
.reserved2 = 0x0,
.tAccNs = 0,
};
assert(sizeof(payload) == 24);
std::string msg = "\xb5\x62\x13\x40\x18\x00"s;
msg += std::string((char*)&payload, sizeof(payload));
return ubx_add_checksum(msg);
}
}
class UbloxMsgParser {
public:
bool add_data(float log_time, const uint8_t *incoming_data, uint32_t incoming_data_len, size_t &bytes_consumed);
inline void reset() {bytes_in_parse_buf = 0;}
inline int needed_bytes();
inline std::string data() {return std::string((const char*)msg_parse_buf, bytes_in_parse_buf);}
std::pair<std::string, kj::Array<capnp::word>> gen_msg();
kj::Array<capnp::word> gen_nav_pvt(ubx_t::nav_pvt_t *msg);
kj::Array<capnp::word> gen_rxm_sfrbx(ubx_t::rxm_sfrbx_t *msg);
kj::Array<capnp::word> gen_rxm_rawx(ubx_t::rxm_rawx_t *msg);
kj::Array<capnp::word> gen_mon_hw(ubx_t::mon_hw_t *msg);
kj::Array<capnp::word> gen_mon_hw2(ubx_t::mon_hw2_t *msg);
kj::Array<capnp::word> gen_nav_sat(ubx_t::nav_sat_t *msg);
private:
inline bool valid_cheksum();
inline bool valid();
inline bool valid_so_far();
kj::Array<capnp::word> parse_gps_ephemeris(ubx_t::rxm_sfrbx_t *msg);
kj::Array<capnp::word> parse_glonass_ephemeris(ubx_t::rxm_sfrbx_t *msg);
std::unordered_map<int, std::unordered_map<int, std::string>> gps_subframes;
float last_log_time = 0.0;
size_t bytes_in_parse_buf = 0;
uint8_t msg_parse_buf[ublox::UBLOX_HEADER_SIZE + ublox::UBLOX_MAX_MSG_SIZE];
// user range accuracy in meters
const std::unordered_map<uint8_t, float> glonass_URA_lookup =
{{ 0, 1}, { 1, 2}, { 2, 2.5}, { 3, 4}, { 4, 5}, {5, 7},
{ 6, 10}, { 7, 12}, { 8, 14}, { 9, 16}, {10, 32},
{11, 64}, {12, 128}, {13, 256}, {14, 512}, {15, 1024}};
std::unordered_map<int, std::unordered_map<int, std::string>> glonass_strings;
std::unordered_map<int, std::unordered_map<int, long>> glonass_string_times;
std::unordered_map<int, std::unordered_map<int, int>> glonass_string_superframes;
};

@ -1,62 +0,0 @@
#include <cassert>
#include <kaitai/kaitaistream.h>
#include "cereal/messaging/messaging.h"
#include "common/swaglog.h"
#include "common/util.h"
#include "system/ubloxd/ublox_msg.h"
ExitHandler do_exit;
using namespace ublox;
int main() {
LOGW("starting ubloxd");
AlignedBuffer aligned_buf;
UbloxMsgParser parser;
PubMaster pm({"ubloxGnss", "gpsLocationExternal"});
std::unique_ptr<Context> context(Context::create());
std::unique_ptr<SubSocket> subscriber(SubSocket::create(context.get(), "ubloxRaw"));
assert(subscriber != NULL);
subscriber->setTimeout(100);
while (!do_exit) {
std::unique_ptr<Message> msg(subscriber->receive());
if (!msg) {
continue;
}
capnp::FlatArrayMessageReader cmsg(aligned_buf.align(msg.get()));
cereal::Event::Reader event = cmsg.getRoot<cereal::Event>();
auto ubloxRaw = event.getUbloxRaw();
float log_time = 1e-9 * event.getLogMonoTime();
const uint8_t *data = ubloxRaw.begin();
size_t len = ubloxRaw.size();
size_t bytes_consumed = 0;
while (bytes_consumed < len && !do_exit) {
size_t bytes_consumed_this_time = 0U;
if (parser.add_data(log_time, data + bytes_consumed, (uint32_t)(len - bytes_consumed), bytes_consumed_this_time)) {
try {
auto ublox_msg = parser.gen_msg();
if (ublox_msg.second.size() > 0) {
auto bytes = ublox_msg.second.asBytes();
pm.send(ublox_msg.first.c_str(), bytes.begin(), bytes.size());
}
} catch (const std::exception& e) {
LOGE("Error parsing ublox message %s", e.what());
}
parser.reset();
}
bytes_consumed += bytes_consumed_this_time;
}
}
return 0;
}

@ -0,0 +1,519 @@
#!/usr/bin/env python3
import math
import capnp
import calendar
import numpy as np
from collections import defaultdict
from dataclasses import dataclass
from cereal import log
from cereal import messaging
from openpilot.system.ubloxd.generated.ubx import Ubx
from openpilot.system.ubloxd.generated.gps import Gps
from openpilot.system.ubloxd.generated.glonass import Glonass
SECS_IN_MIN = 60
SECS_IN_HR = 60 * SECS_IN_MIN
SECS_IN_DAY = 24 * SECS_IN_HR
SECS_IN_WEEK = 7 * SECS_IN_DAY
class UbxFramer:
PREAMBLE1 = 0xB5
PREAMBLE2 = 0x62
HEADER_SIZE = 6
CHECKSUM_SIZE = 2
def __init__(self) -> None:
self.buf = bytearray()
self.last_log_time = 0.0
def reset(self) -> None:
self.buf.clear()
@staticmethod
def _checksum_ok(frame: bytes) -> bool:
ck_a = 0
ck_b = 0
for b in frame[2:-2]:
ck_a = (ck_a + b) & 0xFF
ck_b = (ck_b + ck_a) & 0xFF
return ck_a == frame[-2] and ck_b == frame[-1]
def add_data(self, log_time: float, incoming: bytes) -> list[bytes]:
self.last_log_time = log_time
out: list[bytes] = []
if not incoming:
return out
self.buf += incoming
while True:
# find preamble
if len(self.buf) < 2:
break
start = self.buf.find(b"\xB5\x62")
if start < 0:
# no preamble in buffer
self.buf.clear()
break
if start > 0:
# drop garbage before preamble
self.buf = self.buf[start:]
if len(self.buf) < self.HEADER_SIZE:
break
length_le = int.from_bytes(self.buf[4:6], 'little', signed=False)
total_len = self.HEADER_SIZE + length_le + self.CHECKSUM_SIZE
if len(self.buf) < total_len:
break
candidate = bytes(self.buf[:total_len])
if self._checksum_ok(candidate):
out.append(candidate)
# consume this frame
self.buf = self.buf[total_len:]
else:
# drop first byte and retry
self.buf = self.buf[1:]
return out
def _bit(b: int, shift: int) -> bool:
return (b & (1 << shift)) != 0
@dataclass
class EphemerisCaches:
gps_subframes: defaultdict[int, dict[int, bytes]]
glonass_strings: defaultdict[int, dict[int, bytes]]
glonass_string_times: defaultdict[int, dict[int, float]]
glonass_string_superframes: defaultdict[int, dict[int, int]]
class UbloxMsgParser:
gpsPi = 3.1415926535898
# user range accuracy in meters
glonass_URA_lookup: dict[int, float] = {
0: 1, 1: 2, 2: 2.5, 3: 4, 4: 5, 5: 7,
6: 10, 7: 12, 8: 14, 9: 16, 10: 32,
11: 64, 12: 128, 13: 256, 14: 512, 15: 1024,
}
def __init__(self) -> None:
self.framer = UbxFramer()
self.caches = EphemerisCaches(
gps_subframes=defaultdict(dict),
glonass_strings=defaultdict(dict),
glonass_string_times=defaultdict(dict),
glonass_string_superframes=defaultdict(dict),
)
# Message generation entry point
def parse_frame(self, frame: bytes) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder] | None:
# Quick header parse
msg_type = int.from_bytes(frame[2:4], 'big')
payload = frame[6:-2]
if msg_type == 0x0107:
body = Ubx.NavPvt.from_bytes(payload)
return self._gen_nav_pvt(body)
if msg_type == 0x0213:
# Manually parse RXM-SFRBX to avoid Kaitai EOF on some frames
if len(payload) < 8:
return None
gnss_id = payload[0]
sv_id = payload[1]
freq_id = payload[3]
num_words = payload[4]
exp = 8 + 4 * num_words
if exp != len(payload):
return None
words: list[int] = []
off = 8
for _ in range(num_words):
words.append(int.from_bytes(payload[off:off+4], 'little'))
off += 4
class _SfrbxView:
def __init__(self, gid: int, sid: int, fid: int, body: list[int]):
self.gnss_id = Ubx.GnssType(gid)
self.sv_id = sid
self.freq_id = fid
self.body = body
view = _SfrbxView(gnss_id, sv_id, freq_id, words)
return self._gen_rxm_sfrbx(view)
if msg_type == 0x0215:
body = Ubx.RxmRawx.from_bytes(payload)
return self._gen_rxm_rawx(body)
if msg_type == 0x0A09:
body = Ubx.MonHw.from_bytes(payload)
return self._gen_mon_hw(body)
if msg_type == 0x0A0B:
body = Ubx.MonHw2.from_bytes(payload)
return self._gen_mon_hw2(body)
if msg_type == 0x0135:
body = Ubx.NavSat.from_bytes(payload)
return self._gen_nav_sat(body)
return None
# NAV-PVT -> gpsLocationExternal
def _gen_nav_pvt(self, msg: Ubx.NavPvt) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder]:
dat = messaging.new_message('gpsLocationExternal', valid=True)
gps = dat.gpsLocationExternal
gps.source = log.GpsLocationData.SensorSource.ublox
gps.flags = msg.flags
gps.hasFix = (msg.flags % 2) == 1
gps.latitude = msg.lat * 1e-07
gps.longitude = msg.lon * 1e-07
gps.altitude = msg.height * 1e-03
gps.speed = msg.g_speed * 1e-03
gps.bearingDeg = msg.head_mot * 1e-5
gps.horizontalAccuracy = msg.h_acc * 1e-03
gps.satelliteCount = msg.num_sv
# build UTC timestamp millis (NAV-PVT is in UTC)
# tolerate invalid or unset date values like C++ timegm
try:
utc_tt = calendar.timegm((msg.year, msg.month, msg.day, msg.hour, msg.min, msg.sec, 0, 0, 0))
except Exception:
utc_tt = 0
gps.unixTimestampMillis = int(utc_tt * 1e3 + (msg.nano * 1e-6))
# match C++ float32 rounding semantics exactly
gps.vNED = [
float(np.float32(msg.vel_n) * np.float32(1e-03)),
float(np.float32(msg.vel_e) * np.float32(1e-03)),
float(np.float32(msg.vel_d) * np.float32(1e-03)),
]
gps.verticalAccuracy = msg.v_acc * 1e-03
gps.speedAccuracy = msg.s_acc * 1e-03
gps.bearingAccuracyDeg = msg.head_acc * 1e-05
return ('gpsLocationExternal', dat)
# RXM-SFRBX dispatch to GPS or GLONASS ephemeris
def _gen_rxm_sfrbx(self, msg) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder] | None:
if msg.gnss_id == Ubx.GnssType.gps:
return self._parse_gps_ephemeris(msg)
if msg.gnss_id == Ubx.GnssType.glonass:
return self._parse_glonass_ephemeris(msg)
return None
def _parse_gps_ephemeris(self, msg: Ubx.RxmSfrbx) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder] | None:
# body is list of 10 words; convert to 30-byte subframe (strip parity/padding)
body = msg.body
if len(body) != 10:
return None
subframe_data = bytearray()
for word in body:
word >>= 6
subframe_data.append((word >> 16) & 0xFF)
subframe_data.append((word >> 8) & 0xFF)
subframe_data.append(word & 0xFF)
sf = Gps.from_bytes(bytes(subframe_data))
subframe_id = sf.how.subframe_id
if subframe_id < 1 or subframe_id > 3:
return None
self.caches.gps_subframes[msg.sv_id][subframe_id] = bytes(subframe_data)
if len(self.caches.gps_subframes[msg.sv_id]) != 3:
return None
dat = messaging.new_message('ubloxGnss', valid=True)
eph = dat.ubloxGnss.init('ephemeris')
eph.svId = msg.sv_id
iode_s2 = 0
iode_s3 = 0
iodc_lsb = 0
week = 0
# Subframe 1
sf1 = Gps.from_bytes(self.caches.gps_subframes[msg.sv_id][1])
s1 = sf1.body
assert isinstance(s1, Gps.Subframe1)
week = s1.week_no
week += 1024
if week < 1877:
week += 1024
eph.tgd = s1.t_gd * math.pow(2, -31)
eph.toc = s1.t_oc * math.pow(2, 4)
eph.af2 = s1.af_2 * math.pow(2, -55)
eph.af1 = s1.af_1 * math.pow(2, -43)
eph.af0 = s1.af_0 * math.pow(2, -31)
eph.svHealth = s1.sv_health
eph.towCount = sf1.how.tow_count
iodc_lsb = s1.iodc_lsb
# Subframe 2
sf2 = Gps.from_bytes(self.caches.gps_subframes[msg.sv_id][2])
s2 = sf2.body
assert isinstance(s2, Gps.Subframe2)
if s2.t_oe == 0 and sf2.how.tow_count * 6 >= (SECS_IN_WEEK - 2 * SECS_IN_HR):
week += 1
eph.crs = s2.c_rs * math.pow(2, -5)
eph.deltaN = s2.delta_n * math.pow(2, -43) * self.gpsPi
eph.m0 = s2.m_0 * math.pow(2, -31) * self.gpsPi
eph.cuc = s2.c_uc * math.pow(2, -29)
eph.ecc = s2.e * math.pow(2, -33)
eph.cus = s2.c_us * math.pow(2, -29)
eph.a = math.pow(s2.sqrt_a * math.pow(2, -19), 2.0)
eph.toe = s2.t_oe * math.pow(2, 4)
iode_s2 = s2.iode
# Subframe 3
sf3 = Gps.from_bytes(self.caches.gps_subframes[msg.sv_id][3])
s3 = sf3.body
assert isinstance(s3, Gps.Subframe3)
eph.cic = s3.c_ic * math.pow(2, -29)
eph.omega0 = s3.omega_0 * math.pow(2, -31) * self.gpsPi
eph.cis = s3.c_is * math.pow(2, -29)
eph.i0 = s3.i_0 * math.pow(2, -31) * self.gpsPi
eph.crc = s3.c_rc * math.pow(2, -5)
eph.omega = s3.omega * math.pow(2, -31) * self.gpsPi
eph.omegaDot = s3.omega_dot * math.pow(2, -43) * self.gpsPi
eph.iode = s3.iode
eph.iDot = s3.idot * math.pow(2, -43) * self.gpsPi
iode_s3 = s3.iode
eph.toeWeek = week
eph.tocWeek = week
# clear cache for this SV
self.caches.gps_subframes[msg.sv_id].clear()
if not (iodc_lsb == iode_s2 == iode_s3):
return None
return ('ubloxGnss', dat)
def _parse_glonass_ephemeris(self, msg: Ubx.RxmSfrbx) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder] | None:
# words are 4 bytes each; Glonass parser expects 16 bytes (string)
body = msg.body
if len(body) != 4:
return None
string_bytes = bytearray()
for word in body:
for i in (3, 2, 1, 0):
string_bytes.append((word >> (8 * i)) & 0xFF)
gl = Glonass.from_bytes(bytes(string_bytes))
string_number = gl.string_number
if string_number < 1 or string_number > 5 or gl.idle_chip:
return None
# correlate by superframe and timing, similar to C++ logic
freq_id = msg.freq_id
superframe_unknown = False
needs_clear = False
for i in range(1, 6):
if i not in self.caches.glonass_strings[freq_id]:
continue
sf_prev = self.caches.glonass_string_superframes[freq_id].get(i, 0)
if sf_prev == 0 or gl.superframe_number == 0:
superframe_unknown = True
elif sf_prev != gl.superframe_number:
needs_clear = True
if superframe_unknown:
prev_time = self.caches.glonass_string_times[freq_id].get(i, 0.0)
if abs((prev_time - 2.0 * i) - (self.framer.last_log_time - 2.0 * string_number)) > 10:
needs_clear = True
if needs_clear:
self.caches.glonass_strings[freq_id].clear()
self.caches.glonass_string_superframes[freq_id].clear()
self.caches.glonass_string_times[freq_id].clear()
self.caches.glonass_strings[freq_id][string_number] = bytes(string_bytes)
self.caches.glonass_string_superframes[freq_id][string_number] = gl.superframe_number
self.caches.glonass_string_times[freq_id][string_number] = self.framer.last_log_time
if msg.sv_id == 255:
# unknown SV id
return None
if len(self.caches.glonass_strings[freq_id]) != 5:
return None
dat = messaging.new_message('ubloxGnss', valid=True)
eph = dat.ubloxGnss.init('glonassEphemeris')
eph.svId = msg.sv_id
eph.freqNum = msg.freq_id - 7
current_day = 0
tk = 0
# string 1
try:
s1 = Glonass.from_bytes(self.caches.glonass_strings[freq_id][1]).data
except Exception:
return None
assert isinstance(s1, Glonass.String1)
eph.p1 = int(s1.p1)
tk = int(s1.t_k)
eph.tkDEPRECATED = tk
eph.xVel = float(s1.x_vel) * math.pow(2, -20)
eph.xAccel = float(s1.x_accel) * math.pow(2, -30)
eph.x = float(s1.x) * math.pow(2, -11)
# string 2
try:
s2 = Glonass.from_bytes(self.caches.glonass_strings[freq_id][2]).data
except Exception:
return None
assert isinstance(s2, Glonass.String2)
eph.svHealth = int(s2.b_n >> 2)
eph.p2 = int(s2.p2)
eph.tb = int(s2.t_b)
eph.yVel = float(s2.y_vel) * math.pow(2, -20)
eph.yAccel = float(s2.y_accel) * math.pow(2, -30)
eph.y = float(s2.y) * math.pow(2, -11)
# string 3
try:
s3 = Glonass.from_bytes(self.caches.glonass_strings[freq_id][3]).data
except Exception:
return None
assert isinstance(s3, Glonass.String3)
eph.p3 = int(s3.p3)
eph.gammaN = float(s3.gamma_n) * math.pow(2, -40)
eph.svHealth = int(eph.svHealth | (1 if s3.l_n else 0))
eph.zVel = float(s3.z_vel) * math.pow(2, -20)
eph.zAccel = float(s3.z_accel) * math.pow(2, -30)
eph.z = float(s3.z) * math.pow(2, -11)
# string 4
try:
s4 = Glonass.from_bytes(self.caches.glonass_strings[freq_id][4]).data
except Exception:
return None
assert isinstance(s4, Glonass.String4)
current_day = int(s4.n_t)
eph.nt = current_day
eph.tauN = float(s4.tau_n) * math.pow(2, -30)
eph.deltaTauN = float(s4.delta_tau_n) * math.pow(2, -30)
eph.age = int(s4.e_n)
eph.p4 = int(s4.p4)
eph.svURA = float(self.glonass_URA_lookup.get(int(s4.f_t), 0.0))
# consistency check: SV slot number
# if it doesn't match, keep going but note mismatch (no logging here)
eph.svType = int(s4.m)
# string 5
try:
s5 = Glonass.from_bytes(self.caches.glonass_strings[freq_id][5]).data
except Exception:
return None
assert isinstance(s5, Glonass.String5)
eph.n4 = int(s5.n_4)
tk_seconds = int(SECS_IN_HR * ((tk >> 7) & 0x1F) + SECS_IN_MIN * ((tk >> 1) & 0x3F) + (tk & 0x1) * 30)
eph.tkSeconds = tk_seconds
self.caches.glonass_strings[freq_id].clear()
return ('ubloxGnss', dat)
def _gen_rxm_rawx(self, msg: Ubx.RxmRawx) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder]:
dat = messaging.new_message('ubloxGnss', valid=True)
mr = dat.ubloxGnss.init('measurementReport')
mr.rcvTow = msg.rcv_tow
mr.gpsWeek = msg.week
mr.leapSeconds = msg.leap_s
mb = mr.init('measurements', msg.num_meas)
for i, m in enumerate(msg.meas):
mb[i].svId = m.sv_id
mb[i].pseudorange = m.pr_mes
mb[i].carrierCycles = m.cp_mes
mb[i].doppler = m.do_mes
mb[i].gnssId = int(m.gnss_id.value)
mb[i].glonassFrequencyIndex = m.freq_id
mb[i].locktime = m.lock_time
mb[i].cno = m.cno
mb[i].pseudorangeStdev = 0.01 * (math.pow(2, (m.pr_stdev & 15)))
mb[i].carrierPhaseStdev = 0.004 * (m.cp_stdev & 15)
mb[i].dopplerStdev = 0.002 * (math.pow(2, (m.do_stdev & 15)))
ts = mb[i].init('trackingStatus')
trk = m.trk_stat
ts.pseudorangeValid = _bit(trk, 0)
ts.carrierPhaseValid = _bit(trk, 1)
ts.halfCycleValid = _bit(trk, 2)
ts.halfCycleSubtracted = _bit(trk, 3)
mr.numMeas = msg.num_meas
rs = mr.init('receiverStatus')
rs.leapSecValid = _bit(msg.rec_stat, 0)
rs.clkReset = _bit(msg.rec_stat, 2)
return ('ubloxGnss', dat)
def _gen_nav_sat(self, msg: Ubx.NavSat) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder]:
dat = messaging.new_message('ubloxGnss', valid=True)
sr = dat.ubloxGnss.init('satReport')
sr.iTow = msg.itow
svs = sr.init('svs', msg.num_svs)
for i, s in enumerate(msg.svs):
svs[i].svId = s.sv_id
svs[i].gnssId = int(s.gnss_id.value)
svs[i].flagsBitfield = s.flags
svs[i].cno = s.cno
svs[i].elevationDeg = s.elev
svs[i].azimuthDeg = s.azim
svs[i].pseudorangeResidual = s.pr_res * 0.1
return ('ubloxGnss', dat)
def _gen_mon_hw(self, msg: Ubx.MonHw) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder]:
dat = messaging.new_message('ubloxGnss', valid=True)
hw = dat.ubloxGnss.init('hwStatus')
hw.noisePerMS = msg.noise_per_ms
hw.flags = msg.flags
hw.agcCnt = msg.agc_cnt
hw.aStatus = int(msg.a_status.value)
hw.aPower = int(msg.a_power.value)
hw.jamInd = msg.jam_ind
return ('ubloxGnss', dat)
def _gen_mon_hw2(self, msg: Ubx.MonHw2) -> tuple[str, capnp.lib.capnp._DynamicStructBuilder]:
dat = messaging.new_message('ubloxGnss', valid=True)
hw = dat.ubloxGnss.init('hwStatus2')
hw.ofsI = msg.ofs_i
hw.magI = msg.mag_i
hw.ofsQ = msg.ofs_q
hw.magQ = msg.mag_q
# Map Ubx enum to cereal enum {undefined=0, rom=1, otp=2, configpins=3, flash=4}
cfg_map = {
Ubx.MonHw2.ConfigSource.rom: 1,
Ubx.MonHw2.ConfigSource.otp: 2,
Ubx.MonHw2.ConfigSource.config_pins: 3,
Ubx.MonHw2.ConfigSource.flash: 4,
}
hw.cfgSource = cfg_map.get(msg.cfg_source, 0)
hw.lowLevCfg = msg.low_lev_cfg
hw.postStatus = msg.post_status
return ('ubloxGnss', dat)
def main():
parser = UbloxMsgParser()
pm = messaging.PubMaster(['ubloxGnss', 'gpsLocationExternal'])
sock = messaging.sub_sock('ubloxRaw', timeout=100, conflate=False)
while True:
msg = messaging.recv_one_or_none(sock)
if msg is None:
continue
data = bytes(msg.ubloxRaw)
log_time = msg.logMonoTime * 1e-9
frames = parser.framer.add_data(log_time, data)
for frame in frames:
try:
res = parser.parse_frame(frame)
except Exception:
continue
if not res:
continue
service, dat = res
pm.send(service, dat)
if __name__ == '__main__':
main()

@ -1,4 +1,3 @@
Import('env')
env.Library('json11', ['json11/json11.cpp'], CCFLAGS=env['CCFLAGS'] + ['-Wno-unqualified-std-cast-call'])
env.Library('kaitai', ['kaitai/kaitaistream.cpp'], CPPDEFINES=['KS_STR_ENCODING_NONE'])

@ -1,16 +0,0 @@
#ifndef KAITAI_CUSTOM_DECODER_H
#define KAITAI_CUSTOM_DECODER_H
#include <string>
namespace kaitai {
class custom_decoder {
public:
virtual ~custom_decoder() {};
virtual std::string decode(std::string src) = 0;
};
}
#endif

@ -1,189 +0,0 @@
#ifndef KAITAI_EXCEPTIONS_H
#define KAITAI_EXCEPTIONS_H
#include <kaitai/kaitaistream.h>
#include <string>
#include <stdexcept>
// We need to use "noexcept" in virtual destructor of our exceptions
// subclasses. Different compilers have different ideas on how to
// achieve that: C++98 compilers prefer `throw()`, C++11 and later
// use `noexcept`. We define KS_NOEXCEPT macro for that.
#if __cplusplus >= 201103L || (defined(_MSC_VER) && _MSC_VER >= 1900)
#define KS_NOEXCEPT noexcept
#else
#define KS_NOEXCEPT throw()
#endif
namespace kaitai {
/**
* Common ancestor for all error originating from Kaitai Struct usage.
* Stores KSY source path, pointing to an element supposedly guilty of
* an error.
*/
class kstruct_error: public std::runtime_error {
public:
kstruct_error(const std::string what, const std::string src_path):
std::runtime_error(src_path + ": " + what),
m_src_path(src_path)
{
}
virtual ~kstruct_error() KS_NOEXCEPT {};
protected:
const std::string m_src_path;
};
/**
* Error that occurs when default endianness should be decided with
* a switch, but nothing matches (although using endianness expression
* implies that there should be some positive result).
*/
class undecided_endianness_error: public kstruct_error {
public:
undecided_endianness_error(const std::string src_path):
kstruct_error("unable to decide on endianness for a type", src_path)
{
}
virtual ~undecided_endianness_error() KS_NOEXCEPT {};
};
/**
* Common ancestor for all validation failures. Stores pointer to
* KaitaiStream IO object which was involved in an error.
*/
class validation_failed_error: public kstruct_error {
public:
validation_failed_error(const std::string what, kstream* io, const std::string src_path):
kstruct_error("at pos " + kstream::to_string(static_cast<int>(io->pos())) + ": validation failed: " + what, src_path),
m_io(io)
{
}
// "at pos #{io.pos}: validation failed: #{msg}"
virtual ~validation_failed_error() KS_NOEXCEPT {};
protected:
kstream* m_io;
};
/**
* Signals validation failure: we required "actual" value to be equal to
* "expected", but it turned out that it's not.
*/
template<typename T>
class validation_not_equal_error: public validation_failed_error {
public:
validation_not_equal_error<T>(const T& expected, const T& actual, kstream* io, const std::string src_path):
validation_failed_error("not equal", io, src_path),
m_expected(expected),
m_actual(actual)
{
}
// "not equal, expected #{expected.inspect}, but got #{actual.inspect}"
virtual ~validation_not_equal_error<T>() KS_NOEXCEPT {};
protected:
const T& m_expected;
const T& m_actual;
};
/**
* Signals validation failure: we required "actual" value to be greater
* than or equal to "min", but it turned out that it's not.
*/
template<typename T>
class validation_less_than_error: public validation_failed_error {
public:
validation_less_than_error<T>(const T& min, const T& actual, kstream* io, const std::string src_path):
validation_failed_error("not in range", io, src_path),
m_min(min),
m_actual(actual)
{
}
// "not in range, min #{min.inspect}, but got #{actual.inspect}"
virtual ~validation_less_than_error<T>() KS_NOEXCEPT {};
protected:
const T& m_min;
const T& m_actual;
};
/**
* Signals validation failure: we required "actual" value to be less
* than or equal to "max", but it turned out that it's not.
*/
template<typename T>
class validation_greater_than_error: public validation_failed_error {
public:
validation_greater_than_error<T>(const T& max, const T& actual, kstream* io, const std::string src_path):
validation_failed_error("not in range", io, src_path),
m_max(max),
m_actual(actual)
{
}
// "not in range, max #{max.inspect}, but got #{actual.inspect}"
virtual ~validation_greater_than_error<T>() KS_NOEXCEPT {};
protected:
const T& m_max;
const T& m_actual;
};
/**
* Signals validation failure: we required "actual" value to be from
* the list, but it turned out that it's not.
*/
template<typename T>
class validation_not_any_of_error: public validation_failed_error {
public:
validation_not_any_of_error<T>(const T& actual, kstream* io, const std::string src_path):
validation_failed_error("not any of the list", io, src_path),
m_actual(actual)
{
}
// "not any of the list, got #{actual.inspect}"
virtual ~validation_not_any_of_error<T>() KS_NOEXCEPT {};
protected:
const T& m_actual;
};
/**
* Signals validation failure: we required "actual" value to match
* the expression, but it turned out that it doesn't.
*/
template<typename T>
class validation_expr_error: public validation_failed_error {
public:
validation_expr_error<T>(const T& actual, kstream* io, const std::string src_path):
validation_failed_error("not matching the expression", io, src_path),
m_actual(actual)
{
}
// "not matching the expression, got #{actual.inspect}"
virtual ~validation_expr_error<T>() KS_NOEXCEPT {};
protected:
const T& m_actual;
};
}
#endif

@ -1,689 +0,0 @@
#include <kaitai/kaitaistream.h>
#if defined(__APPLE__)
#include <machine/endian.h>
#include <libkern/OSByteOrder.h>
#define bswap_16(x) OSSwapInt16(x)
#define bswap_32(x) OSSwapInt32(x)
#define bswap_64(x) OSSwapInt64(x)
#define __BYTE_ORDER BYTE_ORDER
#define __BIG_ENDIAN BIG_ENDIAN
#define __LITTLE_ENDIAN LITTLE_ENDIAN
#elif defined(_MSC_VER) // !__APPLE__
#include <stdlib.h>
#define __LITTLE_ENDIAN 1234
#define __BIG_ENDIAN 4321
#define __BYTE_ORDER __LITTLE_ENDIAN
#define bswap_16(x) _byteswap_ushort(x)
#define bswap_32(x) _byteswap_ulong(x)
#define bswap_64(x) _byteswap_uint64(x)
#else // !__APPLE__ or !_MSC_VER
#include <endian.h>
#include <byteswap.h>
#endif
#include <iostream>
#include <vector>
#include <stdexcept>
kaitai::kstream::kstream(std::istream* io) {
m_io = io;
init();
}
kaitai::kstream::kstream(std::string& data): m_io_str(data) {
m_io = &m_io_str;
init();
}
void kaitai::kstream::init() {
exceptions_enable();
align_to_byte();
}
void kaitai::kstream::close() {
// m_io->close();
}
void kaitai::kstream::exceptions_enable() const {
m_io->exceptions(
std::istream::eofbit |
std::istream::failbit |
std::istream::badbit
);
}
// ========================================================================
// Stream positioning
// ========================================================================
bool kaitai::kstream::is_eof() const {
if (m_bits_left > 0) {
return false;
}
char t;
m_io->exceptions(
std::istream::badbit
);
m_io->get(t);
if (m_io->eof()) {
m_io->clear();
exceptions_enable();
return true;
} else {
m_io->unget();
exceptions_enable();
return false;
}
}
void kaitai::kstream::seek(uint64_t pos) {
m_io->seekg(pos);
}
uint64_t kaitai::kstream::pos() {
return m_io->tellg();
}
uint64_t kaitai::kstream::size() {
std::iostream::pos_type cur_pos = m_io->tellg();
m_io->seekg(0, std::ios::end);
std::iostream::pos_type len = m_io->tellg();
m_io->seekg(cur_pos);
return len;
}
// ========================================================================
// Integer numbers
// ========================================================================
// ------------------------------------------------------------------------
// Signed
// ------------------------------------------------------------------------
int8_t kaitai::kstream::read_s1() {
char t;
m_io->get(t);
return t;
}
// ........................................................................
// Big-endian
// ........................................................................
int16_t kaitai::kstream::read_s2be() {
int16_t t;
m_io->read(reinterpret_cast<char *>(&t), 2);
#if __BYTE_ORDER == __LITTLE_ENDIAN
t = bswap_16(t);
#endif
return t;
}
int32_t kaitai::kstream::read_s4be() {
int32_t t;
m_io->read(reinterpret_cast<char *>(&t), 4);
#if __BYTE_ORDER == __LITTLE_ENDIAN
t = bswap_32(t);
#endif
return t;
}
int64_t kaitai::kstream::read_s8be() {
int64_t t;
m_io->read(reinterpret_cast<char *>(&t), 8);
#if __BYTE_ORDER == __LITTLE_ENDIAN
t = bswap_64(t);
#endif
return t;
}
// ........................................................................
// Little-endian
// ........................................................................
int16_t kaitai::kstream::read_s2le() {
int16_t t;
m_io->read(reinterpret_cast<char *>(&t), 2);
#if __BYTE_ORDER == __BIG_ENDIAN
t = bswap_16(t);
#endif
return t;
}
int32_t kaitai::kstream::read_s4le() {
int32_t t;
m_io->read(reinterpret_cast<char *>(&t), 4);
#if __BYTE_ORDER == __BIG_ENDIAN
t = bswap_32(t);
#endif
return t;
}
int64_t kaitai::kstream::read_s8le() {
int64_t t;
m_io->read(reinterpret_cast<char *>(&t), 8);
#if __BYTE_ORDER == __BIG_ENDIAN
t = bswap_64(t);
#endif
return t;
}
// ------------------------------------------------------------------------
// Unsigned
// ------------------------------------------------------------------------
uint8_t kaitai::kstream::read_u1() {
char t;
m_io->get(t);
return t;
}
// ........................................................................
// Big-endian
// ........................................................................
uint16_t kaitai::kstream::read_u2be() {
uint16_t t;
m_io->read(reinterpret_cast<char *>(&t), 2);
#if __BYTE_ORDER == __LITTLE_ENDIAN
t = bswap_16(t);
#endif
return t;
}
uint32_t kaitai::kstream::read_u4be() {
uint32_t t;
m_io->read(reinterpret_cast<char *>(&t), 4);
#if __BYTE_ORDER == __LITTLE_ENDIAN
t = bswap_32(t);
#endif
return t;
}
uint64_t kaitai::kstream::read_u8be() {
uint64_t t;
m_io->read(reinterpret_cast<char *>(&t), 8);
#if __BYTE_ORDER == __LITTLE_ENDIAN
t = bswap_64(t);
#endif
return t;
}
// ........................................................................
// Little-endian
// ........................................................................
uint16_t kaitai::kstream::read_u2le() {
uint16_t t;
m_io->read(reinterpret_cast<char *>(&t), 2);
#if __BYTE_ORDER == __BIG_ENDIAN
t = bswap_16(t);
#endif
return t;
}
uint32_t kaitai::kstream::read_u4le() {
uint32_t t;
m_io->read(reinterpret_cast<char *>(&t), 4);
#if __BYTE_ORDER == __BIG_ENDIAN
t = bswap_32(t);
#endif
return t;
}
uint64_t kaitai::kstream::read_u8le() {
uint64_t t;
m_io->read(reinterpret_cast<char *>(&t), 8);
#if __BYTE_ORDER == __BIG_ENDIAN
t = bswap_64(t);
#endif
return t;
}
// ========================================================================
// Floating point numbers
// ========================================================================
// ........................................................................
// Big-endian
// ........................................................................
float kaitai::kstream::read_f4be() {
uint32_t t;
m_io->read(reinterpret_cast<char *>(&t), 4);
#if __BYTE_ORDER == __LITTLE_ENDIAN
t = bswap_32(t);
#endif
return reinterpret_cast<float&>(t);
}
double kaitai::kstream::read_f8be() {
uint64_t t;
m_io->read(reinterpret_cast<char *>(&t), 8);
#if __BYTE_ORDER == __LITTLE_ENDIAN
t = bswap_64(t);
#endif
return reinterpret_cast<double&>(t);
}
// ........................................................................
// Little-endian
// ........................................................................
float kaitai::kstream::read_f4le() {
uint32_t t;
m_io->read(reinterpret_cast<char *>(&t), 4);
#if __BYTE_ORDER == __BIG_ENDIAN
t = bswap_32(t);
#endif
return reinterpret_cast<float&>(t);
}
double kaitai::kstream::read_f8le() {
uint64_t t;
m_io->read(reinterpret_cast<char *>(&t), 8);
#if __BYTE_ORDER == __BIG_ENDIAN
t = bswap_64(t);
#endif
return reinterpret_cast<double&>(t);
}
// ========================================================================
// Unaligned bit values
// ========================================================================
void kaitai::kstream::align_to_byte() {
m_bits_left = 0;
m_bits = 0;
}
uint64_t kaitai::kstream::read_bits_int_be(int n) {
int bits_needed = n - m_bits_left;
if (bits_needed > 0) {
// 1 bit => 1 byte
// 8 bits => 1 byte
// 9 bits => 2 bytes
int bytes_needed = ((bits_needed - 1) / 8) + 1;
if (bytes_needed > 8)
throw std::runtime_error("read_bits_int: more than 8 bytes requested");
char buf[8];
m_io->read(buf, bytes_needed);
for (int i = 0; i < bytes_needed; i++) {
uint8_t b = buf[i];
m_bits <<= 8;
m_bits |= b;
m_bits_left += 8;
}
}
// raw mask with required number of 1s, starting from lowest bit
uint64_t mask = get_mask_ones(n);
// shift mask to align with highest bits available in @bits
int shift_bits = m_bits_left - n;
mask <<= shift_bits;
// derive reading result
uint64_t res = (m_bits & mask) >> shift_bits;
// clear top bits that we've just read => AND with 1s
m_bits_left -= n;
mask = get_mask_ones(m_bits_left);
m_bits &= mask;
return res;
}
// Deprecated, use read_bits_int_be() instead.
uint64_t kaitai::kstream::read_bits_int(int n) {
return read_bits_int_be(n);
}
uint64_t kaitai::kstream::read_bits_int_le(int n) {
int bits_needed = n - m_bits_left;
if (bits_needed > 0) {
// 1 bit => 1 byte
// 8 bits => 1 byte
// 9 bits => 2 bytes
int bytes_needed = ((bits_needed - 1) / 8) + 1;
if (bytes_needed > 8)
throw std::runtime_error("read_bits_int_le: more than 8 bytes requested");
char buf[8];
m_io->read(buf, bytes_needed);
for (int i = 0; i < bytes_needed; i++) {
uint8_t b = buf[i];
m_bits |= (static_cast<uint64_t>(b) << m_bits_left);
m_bits_left += 8;
}
}
// raw mask with required number of 1s, starting from lowest bit
uint64_t mask = get_mask_ones(n);
// derive reading result
uint64_t res = m_bits & mask;
// remove bottom bits that we've just read by shifting
m_bits >>= n;
m_bits_left -= n;
return res;
}
uint64_t kaitai::kstream::get_mask_ones(int n) {
if (n == 64) {
return 0xFFFFFFFFFFFFFFFF;
} else {
return ((uint64_t) 1 << n) - 1;
}
}
// ========================================================================
// Byte arrays
// ========================================================================
std::string kaitai::kstream::read_bytes(std::streamsize len) {
std::vector<char> result(len);
// NOTE: streamsize type is signed, negative values are only *supposed* to not be used.
// http://en.cppreference.com/w/cpp/io/streamsize
if (len < 0) {
throw std::runtime_error("read_bytes: requested a negative amount");
}
if (len > 0) {
m_io->read(&result[0], len);
}
return std::string(result.begin(), result.end());
}
std::string kaitai::kstream::read_bytes_full() {
std::iostream::pos_type p1 = m_io->tellg();
m_io->seekg(0, std::ios::end);
std::iostream::pos_type p2 = m_io->tellg();
size_t len = p2 - p1;
// Note: this requires a std::string to be backed with a
// contiguous buffer. Officially, it's a only requirement since
// C++11 (C++98 and C++03 didn't have this requirement), but all
// major implementations had contiguous buffers anyway.
std::string result(len, ' ');
m_io->seekg(p1);
m_io->read(&result[0], len);
return result;
}
std::string kaitai::kstream::read_bytes_term(char term, bool include, bool consume, bool eos_error) {
std::string result;
std::getline(*m_io, result, term);
if (m_io->eof()) {
// encountered EOF
if (eos_error) {
throw std::runtime_error("read_bytes_term: encountered EOF");
}
} else {
// encountered terminator
if (include)
result.push_back(term);
if (!consume)
m_io->unget();
}
return result;
}
std::string kaitai::kstream::ensure_fixed_contents(std::string expected) {
std::string actual = read_bytes(expected.length());
if (actual != expected) {
// NOTE: I think printing it outright is not best idea, it could contain non-ascii charactes like backspace and beeps and whatnot. It would be better to print hexlified version, and also to redirect it to stderr.
throw std::runtime_error("ensure_fixed_contents: actual data does not match expected data");
}
return actual;
}
std::string kaitai::kstream::bytes_strip_right(std::string src, char pad_byte) {
std::size_t new_len = src.length();
while (new_len > 0 && src[new_len - 1] == pad_byte)
new_len--;
return src.substr(0, new_len);
}
std::string kaitai::kstream::bytes_terminate(std::string src, char term, bool include) {
std::size_t new_len = 0;
std::size_t max_len = src.length();
while (new_len < max_len && src[new_len] != term)
new_len++;
if (include && new_len < max_len)
new_len++;
return src.substr(0, new_len);
}
// ========================================================================
// Byte array processing
// ========================================================================
std::string kaitai::kstream::process_xor_one(std::string data, uint8_t key) {
size_t len = data.length();
std::string result(len, ' ');
for (size_t i = 0; i < len; i++)
result[i] = data[i] ^ key;
return result;
}
std::string kaitai::kstream::process_xor_many(std::string data, std::string key) {
size_t len = data.length();
size_t kl = key.length();
std::string result(len, ' ');
size_t ki = 0;
for (size_t i = 0; i < len; i++) {
result[i] = data[i] ^ key[ki];
ki++;
if (ki >= kl)
ki = 0;
}
return result;
}
std::string kaitai::kstream::process_rotate_left(std::string data, int amount) {
size_t len = data.length();
std::string result(len, ' ');
for (size_t i = 0; i < len; i++) {
uint8_t bits = data[i];
result[i] = (bits << amount) | (bits >> (8 - amount));
}
return result;
}
#ifdef KS_ZLIB
#include <zlib.h>
std::string kaitai::kstream::process_zlib(std::string data) {
int ret;
unsigned char *src_ptr = reinterpret_cast<unsigned char*>(&data[0]);
std::stringstream dst_strm;
z_stream strm;
strm.zalloc = Z_NULL;
strm.zfree = Z_NULL;
strm.opaque = Z_NULL;
ret = inflateInit(&strm);
if (ret != Z_OK)
throw std::runtime_error("process_zlib: inflateInit error");
strm.next_in = src_ptr;
strm.avail_in = data.length();
unsigned char outbuffer[ZLIB_BUF_SIZE];
std::string outstring;
// get the decompressed bytes blockwise using repeated calls to inflate
do {
strm.next_out = reinterpret_cast<Bytef*>(outbuffer);
strm.avail_out = sizeof(outbuffer);
ret = inflate(&strm, 0);
if (outstring.size() < strm.total_out)
outstring.append(reinterpret_cast<char*>(outbuffer), strm.total_out - outstring.size());
} while (ret == Z_OK);
if (ret != Z_STREAM_END) { // an error occurred that was not EOF
std::ostringstream exc_msg;
exc_msg << "process_zlib: error #" << ret << "): " << strm.msg;
throw std::runtime_error(exc_msg.str());
}
if (inflateEnd(&strm) != Z_OK)
throw std::runtime_error("process_zlib: inflateEnd error");
return outstring;
}
#endif
// ========================================================================
// Misc utility methods
// ========================================================================
int kaitai::kstream::mod(int a, int b) {
if (b <= 0)
throw std::invalid_argument("mod: divisor b <= 0");
int r = a % b;
if (r < 0)
r += b;
return r;
}
#include <stdio.h>
std::string kaitai::kstream::to_string(int val) {
// if int is 32 bits, "-2147483648" is the longest string representation
// => 11 chars + zero => 12 chars
// if int is 64 bits, "-9223372036854775808" is the longest
// => 20 chars + zero => 21 chars
char buf[25];
int got_len = snprintf(buf, sizeof(buf), "%d", val);
// should never happen, but check nonetheless
if (got_len > sizeof(buf))
throw std::invalid_argument("to_string: integer is longer than string buffer");
return std::string(buf);
}
#include <algorithm>
std::string kaitai::kstream::reverse(std::string val) {
std::reverse(val.begin(), val.end());
return val;
}
uint8_t kaitai::kstream::byte_array_min(const std::string val) {
uint8_t min = 0xff; // UINT8_MAX
std::string::const_iterator end = val.end();
for (std::string::const_iterator it = val.begin(); it != end; ++it) {
uint8_t cur = static_cast<uint8_t>(*it);
if (cur < min) {
min = cur;
}
}
return min;
}
uint8_t kaitai::kstream::byte_array_max(const std::string val) {
uint8_t max = 0; // UINT8_MIN
std::string::const_iterator end = val.end();
for (std::string::const_iterator it = val.begin(); it != end; ++it) {
uint8_t cur = static_cast<uint8_t>(*it);
if (cur > max) {
max = cur;
}
}
return max;
}
// ========================================================================
// Other internal methods
// ========================================================================
#ifndef KS_STR_DEFAULT_ENCODING
#define KS_STR_DEFAULT_ENCODING "UTF-8"
#endif
#ifdef KS_STR_ENCODING_ICONV
#include <iconv.h>
#include <cerrno>
#include <stdexcept>
std::string kaitai::kstream::bytes_to_str(std::string src, std::string src_enc) {
iconv_t cd = iconv_open(KS_STR_DEFAULT_ENCODING, src_enc.c_str());
if (cd == (iconv_t) -1) {
if (errno == EINVAL) {
throw std::runtime_error("bytes_to_str: invalid encoding pair conversion requested");
} else {
throw std::runtime_error("bytes_to_str: error opening iconv");
}
}
size_t src_len = src.length();
size_t src_left = src_len;
// Start with a buffer length of double the source length.
size_t dst_len = src_len * 2;
std::string dst(dst_len, ' ');
size_t dst_left = dst_len;
char *src_ptr = &src[0];
char *dst_ptr = &dst[0];
while (true) {
size_t res = iconv(cd, &src_ptr, &src_left, &dst_ptr, &dst_left);
if (res == (size_t) -1) {
if (errno == E2BIG) {
// dst buffer is not enough to accomodate whole string
// enlarge the buffer and try again
size_t dst_used = dst_len - dst_left;
dst_left += dst_len;
dst_len += dst_len;
dst.resize(dst_len);
// dst.resize might have allocated destination buffer in another area
// of memory, thus our previous pointer "dst" will be invalid; re-point
// it using "dst_used".
dst_ptr = &dst[dst_used];
} else {
throw std::runtime_error("bytes_to_str: iconv error");
}
} else {
// conversion successful
dst.resize(dst_len - dst_left);
break;
}
}
if (iconv_close(cd) != 0) {
throw std::runtime_error("bytes_to_str: iconv close error");
}
return dst;
}
#elif defined(KS_STR_ENCODING_NONE)
std::string kaitai::kstream::bytes_to_str(std::string src, std::string src_enc) {
return src;
}
#else
#error Need to decide how to handle strings: please define one of: KS_STR_ENCODING_ICONV, KS_STR_ENCODING_NONE
#endif

@ -1,268 +0,0 @@
#ifndef KAITAI_STREAM_H
#define KAITAI_STREAM_H
// Kaitai Struct runtime API version: x.y.z = 'xxxyyyzzz' decimal
#define KAITAI_STRUCT_VERSION 9000L
#include <istream>
#include <sstream>
#include <stdint.h>
#include <sys/types.h>
namespace kaitai {
/**
* Kaitai Stream class (kaitai::kstream) is an implementation of
* <a href="https://doc.kaitai.io/stream_api.html">Kaitai Struct stream API</a>
* for C++/STL. It's implemented as a wrapper over generic STL std::istream.
*
* It provides a wide variety of simple methods to read (parse) binary
* representations of primitive types, such as integer and floating
* point numbers, byte arrays and strings, and also provides stream
* positioning / navigation methods with unified cross-language and
* cross-toolkit semantics.
*
* Typically, end users won't access Kaitai Stream class manually, but would
* describe a binary structure format using .ksy language and then would use
* Kaitai Struct compiler to generate source code in desired target language.
* That code, in turn, would use this class and API to do the actual parsing
* job.
*/
class kstream {
public:
/**
* Constructs new Kaitai Stream object, wrapping a given std::istream.
* \param io istream object to use for this Kaitai Stream
*/
kstream(std::istream* io);
/**
* Constructs new Kaitai Stream object, wrapping a given in-memory data
* buffer.
* \param data data buffer to use for this Kaitai Stream
*/
kstream(std::string& data);
void close();
/** @name Stream positioning */
//@{
/**
* Check if stream pointer is at the end of stream. Note that the semantics
* are different from traditional STL semantics: one does *not* need to do a
* read (which will fail) after the actual end of the stream to trigger EOF
* flag, which can be accessed after that read. It is sufficient to just be
* at the end of the stream for this method to return true.
* \return "true" if we are located at the end of the stream.
*/
bool is_eof() const;
/**
* Set stream pointer to designated position.
* \param pos new position (offset in bytes from the beginning of the stream)
*/
void seek(uint64_t pos);
/**
* Get current position of a stream pointer.
* \return pointer position, number of bytes from the beginning of the stream
*/
uint64_t pos();
/**
* Get total size of the stream in bytes.
* \return size of the stream in bytes
*/
uint64_t size();
//@}
/** @name Integer numbers */
//@{
// ------------------------------------------------------------------------
// Signed
// ------------------------------------------------------------------------
int8_t read_s1();
// ........................................................................
// Big-endian
// ........................................................................
int16_t read_s2be();
int32_t read_s4be();
int64_t read_s8be();
// ........................................................................
// Little-endian
// ........................................................................
int16_t read_s2le();
int32_t read_s4le();
int64_t read_s8le();
// ------------------------------------------------------------------------
// Unsigned
// ------------------------------------------------------------------------
uint8_t read_u1();
// ........................................................................
// Big-endian
// ........................................................................
uint16_t read_u2be();
uint32_t read_u4be();
uint64_t read_u8be();
// ........................................................................
// Little-endian
// ........................................................................
uint16_t read_u2le();
uint32_t read_u4le();
uint64_t read_u8le();
//@}
/** @name Floating point numbers */
//@{
// ........................................................................
// Big-endian
// ........................................................................
float read_f4be();
double read_f8be();
// ........................................................................
// Little-endian
// ........................................................................
float read_f4le();
double read_f8le();
//@}
/** @name Unaligned bit values */
//@{
void align_to_byte();
uint64_t read_bits_int_be(int n);
uint64_t read_bits_int(int n);
uint64_t read_bits_int_le(int n);
//@}
/** @name Byte arrays */
//@{
std::string read_bytes(std::streamsize len);
std::string read_bytes_full();
std::string read_bytes_term(char term, bool include, bool consume, bool eos_error);
std::string ensure_fixed_contents(std::string expected);
static std::string bytes_strip_right(std::string src, char pad_byte);
static std::string bytes_terminate(std::string src, char term, bool include);
static std::string bytes_to_str(std::string src, std::string src_enc);
//@}
/** @name Byte array processing */
//@{
/**
* Performs a XOR processing with given data, XORing every byte of input with a single
* given value.
* @param data data to process
* @param key value to XOR with
* @return processed data
*/
static std::string process_xor_one(std::string data, uint8_t key);
/**
* Performs a XOR processing with given data, XORing every byte of input with a key
* array, repeating key array many times, if necessary (i.e. if data array is longer
* than key array).
* @param data data to process
* @param key array of bytes to XOR with
* @return processed data
*/
static std::string process_xor_many(std::string data, std::string key);
/**
* Performs a circular left rotation shift for a given buffer by a given amount of bits,
* using groups of 1 bytes each time. Right circular rotation should be performed
* using this procedure with corrected amount.
* @param data source data to process
* @param amount number of bits to shift by
* @return copy of source array with requested shift applied
*/
static std::string process_rotate_left(std::string data, int amount);
#ifdef KS_ZLIB
/**
* Performs an unpacking ("inflation") of zlib-compressed data with usual zlib headers.
* @param data data to unpack
* @return unpacked data
* @throws IOException
*/
static std::string process_zlib(std::string data);
#endif
//@}
/**
* Performs modulo operation between two integers: dividend `a`
* and divisor `b`. Divisor `b` is expected to be positive. The
* result is always 0 <= x <= b - 1.
*/
static int mod(int a, int b);
/**
* Converts given integer `val` to a decimal string representation.
* Should be used in place of std::to_string() (which is available only
* since C++11) in older C++ implementations.
*/
static std::string to_string(int val);
/**
* Reverses given string `val`, so that the first character becomes the
* last and the last one becomes the first. This should be used to avoid
* the need of local variables at the caller.
*/
static std::string reverse(std::string val);
/**
* Finds the minimal byte in a byte array, treating bytes as
* unsigned values.
* @param val byte array to scan
* @return minimal byte in byte array as integer
*/
static uint8_t byte_array_min(const std::string val);
/**
* Finds the maximal byte in a byte array, treating bytes as
* unsigned values.
* @param val byte array to scan
* @return maximal byte in byte array as integer
*/
static uint8_t byte_array_max(const std::string val);
private:
std::istream* m_io;
std::istringstream m_io_str;
int m_bits_left;
uint64_t m_bits;
void init();
void exceptions_enable() const;
static uint64_t get_mask_ones(int n);
static const int ZLIB_BUF_SIZE = 128 * 1024;
};
}
#endif

@ -1,20 +0,0 @@
#ifndef KAITAI_STRUCT_H
#define KAITAI_STRUCT_H
#include <kaitai/kaitaistream.h>
namespace kaitai {
class kstruct {
public:
kstruct(kstream *_io) { m__io = _io; }
virtual ~kstruct() {}
protected:
kstream *m__io;
public:
kstream *_io() { return m__io; }
};
}
#endif

@ -0,0 +1,311 @@
import numpy as np
import threading
import multiprocessing
import bisect
from collections import defaultdict
import tqdm
from openpilot.common.swaglog import cloudlog
from openpilot.tools.lib.logreader import _LogFileReader, LogReader
def flatten_dict(d: dict, sep: str = "/", prefix: str = None) -> dict:
result = {}
stack: list[tuple] = [(d, prefix)]
while stack:
obj, current_prefix = stack.pop()
if isinstance(obj, dict):
for key, val in obj.items():
new_prefix = key if current_prefix is None else f"{current_prefix}{sep}{key}"
if isinstance(val, (dict, list)):
stack.append((val, new_prefix))
else:
result[new_prefix] = val
elif isinstance(obj, list):
for i, item in enumerate(obj):
new_prefix = f"{current_prefix}{sep}{i}"
if isinstance(item, (dict, list)):
stack.append((item, new_prefix))
else:
result[new_prefix] = item
else:
if current_prefix is not None:
result[current_prefix] = obj
return result
def extract_field_types(schema, prefix, field_types_dict):
stack = [(schema, prefix)]
while stack:
current_schema, current_prefix = stack.pop()
for field in current_schema.fields_list:
field_name = field.proto.name
field_path = f"{current_prefix}/{field_name}"
field_proto = field.proto
field_which = field_proto.which()
field_type = field_proto.slot.type.which() if field_which == 'slot' else field_which
field_types_dict[field_path] = field_type
if field_which == 'slot':
slot_type = field_proto.slot.type
type_which = slot_type.which()
if type_which == 'list':
element_type = slot_type.list.elementType.which()
list_path = f"{field_path}/*"
field_types_dict[list_path] = element_type
if element_type == 'struct':
stack.append((field.schema.elementType, list_path))
elif type_which == 'struct':
stack.append((field.schema, field_path))
elif field_which == 'group':
stack.append((field.schema, field_path))
def _convert_to_optimal_dtype(values_list, capnp_type):
if not values_list:
return np.array([])
dtype_mapping = {
'bool': np.bool_, 'int8': np.int8, 'int16': np.int16, 'int32': np.int32, 'int64': np.int64,
'uint8': np.uint8, 'uint16': np.uint16, 'uint32': np.uint32, 'uint64': np.uint64,
'float32': np.float32, 'float64': np.float64, 'text': object, 'data': object,
'enum': object, 'anyPointer': object,
}
target_dtype = dtype_mapping.get(capnp_type)
return np.array(values_list, dtype=target_dtype) if target_dtype else np.array(values_list)
def _match_field_type(field_path, field_types):
if field_path in field_types:
return field_types[field_path]
path_parts = field_path.split('/')
template_parts = [p if not p.isdigit() else '*' for p in path_parts]
template_path = '/'.join(template_parts)
return field_types.get(template_path)
def msgs_to_time_series(msgs):
"""Extract scalar fields and return (time_series_data, start_time, end_time)."""
collected_data = defaultdict(lambda: {'timestamps': [], 'columns': defaultdict(list), 'sparse_fields': set()})
field_types = {}
extracted_schemas = set()
min_time = max_time = None
for msg in msgs:
typ = msg.which()
timestamp = msg.logMonoTime * 1e-9
if typ != 'initData':
if min_time is None:
min_time = timestamp
max_time = timestamp
sub_msg = getattr(msg, typ)
if not hasattr(sub_msg, 'to_dict') or typ in ('qcomGnss', 'ubloxGnss'):
continue
if hasattr(sub_msg, 'schema') and typ not in extracted_schemas:
extract_field_types(sub_msg.schema, typ, field_types)
extracted_schemas.add(typ)
msg_dict = sub_msg.to_dict(verbose=True)
flat_dict = flatten_dict(msg_dict)
flat_dict['_valid'] = msg.valid
type_data = collected_data[typ]
columns, sparse_fields = type_data['columns'], type_data['sparse_fields']
known_fields = set(columns.keys())
missing_fields = known_fields - flat_dict.keys()
for field, value in flat_dict.items():
if field not in known_fields and type_data['timestamps']:
sparse_fields.add(field)
columns[field].append(value)
if value is None:
sparse_fields.add(field)
for field in missing_fields:
columns[field].append(None)
sparse_fields.add(field)
type_data['timestamps'].append(timestamp)
final_result = {}
for typ, data in collected_data.items():
if not data['timestamps']:
continue
typ_result = {'t': np.array(data['timestamps'], dtype=np.float64)}
sparse_fields = data['sparse_fields']
for field_name, values in data['columns'].items():
if len(values) < len(data['timestamps']):
values = [None] * (len(data['timestamps']) - len(values)) + values
sparse_fields.add(field_name)
if field_name in sparse_fields:
typ_result[field_name] = np.array(values, dtype=object)
else:
capnp_type = _match_field_type(f"{typ}/{field_name}", field_types)
typ_result[field_name] = _convert_to_optimal_dtype(values, capnp_type)
final_result[typ] = typ_result
return final_result, min_time or 0.0, max_time or 0.0
def _process_segment(segment_identifier: str):
try:
lr = _LogFileReader(segment_identifier, sort_by_time=True)
return msgs_to_time_series(lr)
except Exception as e:
cloudlog.warning(f"Warning: Failed to process segment {segment_identifier}: {e}")
return {}, 0.0, 0.0
class DataManager:
def __init__(self):
self._segments = []
self._segment_starts = []
self._start_time = 0.0
self._duration = 0.0
self._paths = set()
self._observers = []
self._loading = False
self._lock = threading.RLock()
def load_route(self, route: str) -> None:
if self._loading:
return
self._reset()
threading.Thread(target=self._load_async, args=(route,), daemon=True).start()
def get_timeseries(self, path: str):
with self._lock:
msg_type, field = path.split('/', 1)
times, values = [], []
for segment in self._segments:
if msg_type in segment and field in segment[msg_type]:
times.append(segment[msg_type]['t'])
values.append(segment[msg_type][field])
if not times:
return [], []
combined_times = np.concatenate(times) - self._start_time
if len(values) > 1 and any(arr.dtype != values[0].dtype for arr in values):
values = [arr.astype(object) for arr in values]
return combined_times, np.concatenate(values)
def get_value_at(self, path: str, time: float):
with self._lock:
MAX_LOOKBACK = 5.0 # seconds
absolute_time = self._start_time + time
message_type, field = path.split('/', 1)
current_index = bisect.bisect_right(self._segment_starts, absolute_time) - 1
for index in (current_index, current_index - 1):
if not 0 <= index < len(self._segments):
continue
segment = self._segments[index].get(message_type)
if not segment or field not in segment:
continue
times = segment['t']
if len(times) == 0 or (index != current_index and absolute_time - times[-1] > MAX_LOOKBACK):
continue
position = np.searchsorted(times, absolute_time, 'right') - 1
if position >= 0 and absolute_time - times[position] <= MAX_LOOKBACK:
return segment[field][position]
return None
def get_all_paths(self):
with self._lock:
return sorted(self._paths)
def get_duration(self):
with self._lock:
return self._duration
def is_plottable(self, path: str):
data = self.get_timeseries(path)
if data is None:
return False
_, values = data
return np.issubdtype(values.dtype, np.number) or np.issubdtype(values.dtype, np.bool_)
def add_observer(self, callback):
with self._lock:
self._observers.append(callback)
def remove_observer(self, callback):
with self._lock:
if callback in self._observers:
self._observers.remove(callback)
def _reset(self):
with self._lock:
self._loading = True
self._segments.clear()
self._segment_starts.clear()
self._paths.clear()
self._start_time = self._duration = 0.0
observers = self._observers.copy()
for callback in observers:
callback({'reset': True})
def _load_async(self, route: str):
try:
lr = LogReader(route, sort_by_time=True)
if not lr.logreader_identifiers:
cloudlog.warning(f"Warning: No log segments found for route: {route}")
return
num_processes = max(1, multiprocessing.cpu_count() // 2)
with multiprocessing.Pool(processes=num_processes) as pool, tqdm.tqdm(total=len(lr.logreader_identifiers), desc="Processing Segments") as pbar:
for segment_result, start_time, end_time in pool.imap(_process_segment, lr.logreader_identifiers):
pbar.update(1)
if segment_result:
self._add_segment(segment_result, start_time, end_time)
except Exception:
cloudlog.exception(f"Error loading route {route}:")
finally:
self._finalize_loading()
def _add_segment(self, segment_data: dict, start_time: float, end_time: float):
with self._lock:
self._segments.append(segment_data)
self._segment_starts.append(start_time)
if len(self._segments) == 1:
self._start_time = start_time
self._duration = end_time - self._start_time
for msg_type, data in segment_data.items():
for field in data.keys():
if field != 't':
self._paths.add(f"{msg_type}/{field}")
observers = self._observers.copy()
for callback in observers:
callback({'segment_added': True, 'duration': self._duration, 'segment_count': len(self._segments)})
def _finalize_loading(self):
with self._lock:
self._loading = False
observers = self._observers.copy()
duration = self._duration
for callback in observers:
callback({'loading_complete': True, 'duration': duration})

@ -0,0 +1,266 @@
import os
import re
import threading
import numpy as np
from collections import deque
import dearpygui.dearpygui as dpg
class DataTreeNode:
def __init__(self, name: str, full_path: str = "", parent=None):
self.name = name
self.full_path = full_path
self.parent = parent
self.children: dict[str, DataTreeNode] = {}
self.is_leaf = False
self.child_count = 0
self.is_plottable: bool | None = None
self.ui_created = False
self.children_ui_created = False
self.ui_tag: str | None = None
class DataTree:
MAX_NODES_PER_FRAME = 50
def __init__(self, data_manager, playback_manager):
self.data_manager = data_manager
self.playback_manager = playback_manager
self.current_search = ""
self.data_tree = DataTreeNode(name="root")
self._build_queue: deque[tuple[DataTreeNode, str | None, str | int]] = deque()
self._all_paths_cache: set[str] = set()
self._item_handlers: set[str] = set()
self._avg_char_width = None
self._queued_search = None
self._new_data = False
self._ui_lock = threading.RLock()
self.data_manager.add_observer(self._on_data_loaded)
def create_ui(self, parent_tag: str):
with dpg.child_window(parent=parent_tag, border=False, width=-1, height=-1):
dpg.add_text("Available Data")
dpg.add_separator()
dpg.add_input_text(tag="search_input", width=-1, hint="Search fields...", callback=self.search_data)
dpg.add_separator()
with dpg.group(tag="data_tree_container"):
pass
def _on_data_loaded(self, data: dict):
with self._ui_lock:
if data.get('segment_added'):
self._new_data = True
elif data.get('reset'):
self._all_paths_cache = set()
self._new_data = True
def _populate_tree(self):
self._clear_ui()
self.data_tree = self._add_paths_to_tree(self._all_paths_cache, incremental=False)
if self.data_tree:
self._request_children_build(self.data_tree)
def _add_paths_to_tree(self, paths, incremental=False):
search_term = self.current_search.strip().lower()
filtered_paths = [path for path in paths if self._should_show_path(path, search_term)]
target_tree = self.data_tree if incremental else DataTreeNode(name="root")
if not filtered_paths:
return target_tree
parent_nodes_to_recheck = set()
for path in sorted(filtered_paths):
parts = path.split('/')
current_node = target_tree
current_path_prefix = ""
for i, part in enumerate(parts):
current_path_prefix = f"{current_path_prefix}/{part}" if current_path_prefix else part
if i < len(parts) - 1:
parent_nodes_to_recheck.add(current_node) # for incremental changes from new data
if part not in current_node.children:
current_node.children[part] = DataTreeNode(name=part, full_path=current_path_prefix, parent=current_node)
current_node = current_node.children[part]
if not current_node.is_leaf:
current_node.is_leaf = True
self._calculate_child_counts(target_tree)
if incremental:
for p_node in parent_nodes_to_recheck:
p_node.children_ui_created = False
self._request_children_build(p_node)
return target_tree
def update_frame(self, font):
with self._ui_lock:
if self._avg_char_width is None and dpg.is_dearpygui_running():
self._avg_char_width = self.calculate_avg_char_width(font)
if self._new_data:
current_paths = set(self.data_manager.get_all_paths())
new_paths = current_paths - self._all_paths_cache
all_paths_empty = not self._all_paths_cache
self._all_paths_cache = current_paths
if all_paths_empty:
self._populate_tree()
elif new_paths:
self._add_paths_to_tree(new_paths, incremental=True)
self._new_data = False
return
if self._queued_search is not None:
self.current_search = self._queued_search
self._all_paths_cache = set(self.data_manager.get_all_paths())
self._populate_tree()
self._queued_search = None
return
nodes_processed = 0
while self._build_queue and nodes_processed < self.MAX_NODES_PER_FRAME:
child_node, parent_tag, before_tag = self._build_queue.popleft()
if not child_node.ui_created:
if child_node.is_leaf:
self._create_leaf_ui(child_node, parent_tag, before_tag)
else:
self._create_tree_node_ui(child_node, parent_tag, before_tag)
nodes_processed += 1
def search_data(self):
self._queued_search = dpg.get_value("search_input")
def _clear_ui(self):
for handler_tag in self._item_handlers:
dpg.configure_item(handler_tag, show=False)
dpg.set_frame_callback(dpg.get_frame_count() + 1, callback=self._delete_handlers, user_data=list(self._item_handlers))
self._item_handlers.clear()
if dpg.does_item_exist("data_tree_container"):
dpg.delete_item("data_tree_container", children_only=True)
self._build_queue.clear()
def _delete_handlers(self, sender, app_data, user_data):
for handler in user_data:
dpg.delete_item(handler)
def _calculate_child_counts(self, node: DataTreeNode):
if node.is_leaf:
node.child_count = 0
else:
node.child_count = len(node.children)
for child in node.children.values():
self._calculate_child_counts(child)
def _create_tree_node_ui(self, node: DataTreeNode, parent_tag: str, before: str | int):
tag = f"tree_{node.full_path}"
node.ui_tag = tag
label = f"{node.name} ({node.child_count} fields)"
search_term = self.current_search.strip().lower()
expand = bool(search_term) and len(search_term) > 1 and any(search_term in path for path in self._get_descendant_paths(node))
if expand and node.parent and node.parent.child_count > 100 and node.child_count > 2: # don't fully autoexpand large lists (only affects procLog rn)
label += " (+)"
expand = False
with dpg.tree_node(
label=label, parent=parent_tag, tag=tag, default_open=expand, open_on_arrow=True, open_on_double_click=True, before=before, delay_search=True
):
with dpg.item_handler_registry() as handler_tag:
dpg.add_item_toggled_open_handler(callback=lambda s, a, u: self._request_children_build(node))
dpg.add_item_visible_handler(callback=lambda s, a, u: self._request_children_build(node))
dpg.bind_item_handler_registry(tag, handler_tag)
self._item_handlers.add(handler_tag)
node.ui_created = True
def _create_leaf_ui(self, node: DataTreeNode, parent_tag: str, before: str | int):
with dpg.group(parent=parent_tag, tag=f"leaf_{node.full_path}", before=before, delay_search=True) as draggable_group:
with dpg.table(header_row=False, policy=dpg.mvTable_SizingStretchProp, delay_search=True):
dpg.add_table_column(init_width_or_weight=0.5)
dpg.add_table_column(init_width_or_weight=0.5)
with dpg.table_row():
dpg.add_text(node.name)
dpg.add_text("N/A", tag=f"value_{node.full_path}")
if node.is_plottable is None:
node.is_plottable = self.data_manager.is_plottable(node.full_path)
if node.is_plottable:
with dpg.drag_payload(parent=draggable_group, drag_data=node.full_path, payload_type="TIMESERIES_PAYLOAD"):
dpg.add_text(f"Plot: {node.full_path}")
with dpg.item_handler_registry() as handler_tag:
dpg.add_item_visible_handler(callback=self._on_item_visible, user_data=node.full_path)
dpg.bind_item_handler_registry(draggable_group, handler_tag)
self._item_handlers.add(handler_tag)
node.ui_created = True
node.ui_tag = f"value_{node.full_path}"
def _on_item_visible(self, sender, app_data, user_data):
with self._ui_lock:
path = user_data
value_tag = f"value_{path}"
value_column_width = dpg.get_item_rect_size("sidebar_window")[0] // 2
value = self.data_manager.get_value_at(path, self.playback_manager.current_time_s)
if value is not None:
formatted_value = self.format_and_truncate(value, value_column_width, self._avg_char_width)
dpg.set_value(value_tag, formatted_value)
else:
dpg.set_value(value_tag, "N/A")
def _request_children_build(self, node: DataTreeNode):
with self._ui_lock:
if not node.children_ui_created and (node.name == "root" or (node.ui_tag is not None and dpg.get_value(node.ui_tag))): # check root or node expanded
parent_tag = "data_tree_container" if node.name == "root" else node.ui_tag
sorted_children = sorted(node.children.values(), key=self._natural_sort_key)
next_existing: list[int | str] = [0] * len(sorted_children)
current_before_tag: int | str = 0
for i in range(len(sorted_children) - 1, -1, -1): # calculate "before_tag" for correct ordering when incrementally building tree
child = sorted_children[i]
next_existing[i] = current_before_tag
if child.ui_created:
candidate_tag = f"leaf_{child.full_path}" if child.is_leaf else f"tree_{child.full_path}"
if dpg.does_item_exist(candidate_tag):
current_before_tag = candidate_tag
for i, child_node in enumerate(sorted_children):
if not child_node.ui_created:
before_tag = next_existing[i]
self._build_queue.append((child_node, parent_tag, before_tag))
node.children_ui_created = True
def _should_show_path(self, path: str, search_term: str) -> bool:
if 'DEPRECATED' in path and not os.environ.get('SHOW_DEPRECATED'):
return False
return not search_term or search_term in path.lower()
def _natural_sort_key(self, node: DataTreeNode):
node_type_key = node.is_leaf
parts = [int(p) if p.isdigit() else p.lower() for p in re.split(r'(\d+)', node.name) if p]
return (node_type_key, parts)
def _get_descendant_paths(self, node: DataTreeNode):
for child_name, child_node in node.children.items():
child_name_lower = child_name.lower()
if child_node.is_leaf:
yield child_name_lower
else:
for path in self._get_descendant_paths(child_node):
yield f"{child_name_lower}/{path}"
@staticmethod
def calculate_avg_char_width(font):
sample_text = "abcdefghijklmnopqrstuvwxyz0123456789"
if size := dpg.get_text_size(sample_text, font=font):
return size[0] / len(sample_text)
return None
@staticmethod
def format_and_truncate(value, available_width: float, avg_char_width: float) -> str:
s = f"{value:.5f}" if np.issubdtype(type(value), np.floating) else str(value)
max_chars = int(available_width / avg_char_width) - 3
if len(s) > max_chars:
return s[: max(0, max_chars)] + "..."
return s

@ -0,0 +1,262 @@
import dearpygui.dearpygui as dpg
from openpilot.tools.jotpluggler.data import DataManager
from openpilot.tools.jotpluggler.views import TimeSeriesPanel
GRIP_SIZE = 4
MIN_PANE_SIZE = 60
class PlotLayoutManager:
def __init__(self, data_manager: DataManager, playback_manager, worker_manager, scale: float = 1.0):
self.data_manager = data_manager
self.playback_manager = playback_manager
self.worker_manager = worker_manager
self.scale = scale
self.container_tag = "plot_layout_container"
self.active_panels: list = []
self.grip_size = int(GRIP_SIZE * self.scale)
self.min_pane_size = int(MIN_PANE_SIZE * self.scale)
initial_panel = TimeSeriesPanel(data_manager, playback_manager, worker_manager)
self.layout: dict = {"type": "panel", "panel": initial_panel}
def create_ui(self, parent_tag: str):
if dpg.does_item_exist(self.container_tag):
dpg.delete_item(self.container_tag)
with dpg.child_window(tag=self.container_tag, parent=parent_tag, border=False, width=-1, height=-1, no_scrollbar=True):
container_width, container_height = dpg.get_item_rect_size(self.container_tag)
self._create_ui_recursive(self.layout, self.container_tag, [], container_width, container_height)
def _create_ui_recursive(self, layout: dict, parent_tag: str, path: list[int], width: int, height: int):
if layout["type"] == "panel":
self._create_panel_ui(layout, parent_tag, path)
else:
self._create_split_ui(layout, parent_tag, path, width, height)
def _create_panel_ui(self, layout: dict, parent_tag: str, path: list[int]):
panel_tag = self._path_to_tag(path, "panel")
panel = layout["panel"]
self.active_panels.append(panel)
with dpg.child_window(tag=panel_tag, parent=parent_tag, border=True, width=-1, height=-1, no_scrollbar=True):
with dpg.group(horizontal=True):
dpg.add_input_text(default_value=panel.title, width=int(100 * self.scale), callback=lambda s, v: setattr(panel, "title", v))
dpg.add_combo(items=["Time Series"], default_value="Time Series", width=int(100 * self.scale))
dpg.add_button(label="Clear", callback=lambda: self.clear_panel(panel), width=int(40 * self.scale))
dpg.add_button(label="Delete", callback=lambda: self.delete_panel(path), width=int(40 * self.scale))
dpg.add_button(label="Split H", callback=lambda: self.split_panel(path, 0), width=int(40 * self.scale))
dpg.add_button(label="Split V", callback=lambda: self.split_panel(path, 1), width=int(40 * self.scale))
dpg.add_separator()
content_tag = self._path_to_tag(path, "content")
with dpg.child_window(tag=content_tag, border=False, height=-1, width=-1, no_scrollbar=True):
panel.create_ui(content_tag)
def _create_split_ui(self, layout: dict, parent_tag: str, path: list[int], width: int, height: int):
split_tag = self._path_to_tag(path, "split")
orientation, _, pane_sizes = self._get_split_geometry(layout, (width, height))
with dpg.group(tag=split_tag, parent=parent_tag, horizontal=orientation == 0):
for i, child_layout in enumerate(layout["children"]):
child_path = path + [i]
container_tag = self._path_to_tag(child_path, "container")
pane_width, pane_height = [(pane_sizes[i], -1), (-1, pane_sizes[i])][orientation] # fill 2nd dim up to the border
with dpg.child_window(tag=container_tag, width=pane_width, height=pane_height, border=False, no_scrollbar=True):
child_width, child_height = [(pane_sizes[i], height), (width, pane_sizes[i])][orientation]
self._create_ui_recursive(child_layout, container_tag, child_path, child_width, child_height)
if i < len(layout["children"]) - 1:
self._create_grip(split_tag, path, i, orientation)
def clear_panel(self, panel):
panel.clear()
def delete_panel(self, panel_path: list[int]):
if not panel_path: # Root deletion
old_panel = self.layout["panel"]
old_panel.destroy_ui()
self.active_panels.remove(old_panel)
new_panel = TimeSeriesPanel(self.data_manager, self.playback_manager, self.worker_manager)
self.layout = {"type": "panel", "panel": new_panel}
self._rebuild_ui_at_path([])
return
parent, child_index = self._get_parent_and_index(panel_path)
layout_to_delete = parent["children"][child_index]
self._cleanup_ui_recursive(layout_to_delete, panel_path)
parent["children"].pop(child_index)
parent["proportions"].pop(child_index)
if len(parent["children"]) == 1: # remove parent and collapse
remaining_child = parent["children"][0]
if len(panel_path) == 1: # parent is at root level - promote remaining child to root
self.layout = remaining_child
self._rebuild_ui_at_path([])
else: # replace parent with remaining child in grandparent
grandparent_path = panel_path[:-2]
parent_index = panel_path[-2]
self._replace_layout_at_path(grandparent_path + [parent_index], remaining_child)
self._rebuild_ui_at_path(grandparent_path + [parent_index])
else: # redistribute proportions
equal_prop = 1.0 / len(parent["children"])
parent["proportions"] = [equal_prop] * len(parent["children"])
self._rebuild_ui_at_path(panel_path[:-1])
def split_panel(self, panel_path: list[int], orientation: int):
current_layout = self._get_layout_at_path(panel_path)
existing_panel = current_layout["panel"]
new_panel = TimeSeriesPanel(self.data_manager, self.playback_manager, self.worker_manager)
parent, child_index = self._get_parent_and_index(panel_path)
if parent is None: # Root split
self.layout = {
"type": "split",
"orientation": orientation,
"children": [{"type": "panel", "panel": existing_panel}, {"type": "panel", "panel": new_panel}],
"proportions": [0.5, 0.5],
}
self._rebuild_ui_at_path([])
elif parent["type"] == "split" and parent["orientation"] == orientation: # Same orientation - insert into existing split
parent["children"].insert(child_index + 1, {"type": "panel", "panel": new_panel})
parent["proportions"] = [1.0 / len(parent["children"])] * len(parent["children"])
self._rebuild_ui_at_path(panel_path[:-1])
else: # Different orientation - create new split level
new_split = {"type": "split", "orientation": orientation, "children": [current_layout, {"type": "panel", "panel": new_panel}], "proportions": [0.5, 0.5]}
self._replace_layout_at_path(panel_path, new_split)
self._rebuild_ui_at_path(panel_path)
def _rebuild_ui_at_path(self, path: list[int]):
layout = self._get_layout_at_path(path)
if path:
container_tag = self._path_to_tag(path, "container")
else: # Root update
container_tag = self.container_tag
self._cleanup_ui_recursive(layout, path)
dpg.delete_item(container_tag, children_only=True)
width, height = dpg.get_item_rect_size(container_tag)
self._create_ui_recursive(layout, container_tag, path, width, height)
def _cleanup_ui_recursive(self, layout: dict, path: list[int]):
if layout["type"] == "panel":
panel = layout["panel"]
panel.destroy_ui()
if panel in self.active_panels:
self.active_panels.remove(panel)
else:
for i in range(len(layout["children"]) - 1):
handler_tag = f"{self._path_to_tag(path, f'grip_{i}')}_handler"
if dpg.does_item_exist(handler_tag):
dpg.delete_item(handler_tag)
for i, child in enumerate(layout["children"]):
self._cleanup_ui_recursive(child, path + [i])
def update_all_panels(self):
for panel in self.active_panels:
panel.update()
def on_viewport_resize(self):
self._resize_splits_recursive(self.layout, [])
def _resize_splits_recursive(self, layout: dict, path: list[int], width: int | None = None, height: int | None = None):
if layout["type"] == "split":
split_tag = self._path_to_tag(path, "split")
if dpg.does_item_exist(split_tag):
available_sizes = (width, height) if width and height else dpg.get_item_rect_size(dpg.get_item_parent(split_tag))
orientation, _, pane_sizes = self._get_split_geometry(layout, available_sizes)
size_properties = ("width", "height")
for i, child_layout in enumerate(layout["children"]):
child_path = path + [i]
container_tag = self._path_to_tag(child_path, "container")
if dpg.does_item_exist(container_tag):
dpg.configure_item(container_tag, **{size_properties[orientation]: pane_sizes[i]})
child_width, child_height = [(pane_sizes[i], available_sizes[1]), (available_sizes[0], pane_sizes[i])][orientation]
self._resize_splits_recursive(child_layout, child_path, child_width, child_height)
def _get_split_geometry(self, layout: dict, available_size: tuple[int, int]) -> tuple[int, int, list[int]]:
orientation = layout["orientation"]
num_grips = len(layout["children"]) - 1
usable_size = max(self.min_pane_size, available_size[orientation] - (num_grips * self.grip_size))
pane_sizes = [max(self.min_pane_size, int(usable_size * prop)) for prop in layout["proportions"]]
return orientation, usable_size, pane_sizes
def _get_layout_at_path(self, path: list[int]) -> dict:
current = self.layout
for index in path:
current = current["children"][index]
return current
def _get_parent_and_index(self, path: list[int]) -> tuple:
return (None, -1) if not path else (self._get_layout_at_path(path[:-1]), path[-1])
def _replace_layout_at_path(self, path: list[int], new_layout: dict):
if not path:
self.layout = new_layout
else:
parent, index = self._get_parent_and_index(path)
parent["children"][index] = new_layout
def _path_to_tag(self, path: list[int], prefix: str = "") -> str:
path_str = "_".join(map(str, path)) if path else "root"
return f"{prefix}_{path_str}" if prefix else path_str
def _create_grip(self, parent_tag: str, path: list[int], grip_index: int, orientation: int):
grip_tag = self._path_to_tag(path, f"grip_{grip_index}")
width, height = [(self.grip_size, -1), (-1, self.grip_size)][orientation]
with dpg.child_window(tag=grip_tag, parent=parent_tag, width=width, height=height, no_scrollbar=True, border=False):
button_tag = dpg.add_button(label="", width=-1, height=-1)
with dpg.item_handler_registry(tag=f"{grip_tag}_handler"):
user_data = (path, grip_index, orientation)
dpg.add_item_active_handler(callback=self._on_grip_drag, user_data=user_data)
dpg.add_item_deactivated_handler(callback=self._on_grip_end, user_data=user_data)
dpg.bind_item_handler_registry(button_tag, f"{grip_tag}_handler")
def _on_grip_drag(self, sender, app_data, user_data):
path, grip_index, orientation = user_data
layout = self._get_layout_at_path(path)
if "_drag_data" not in layout:
layout["_drag_data"] = {"initial_proportions": layout["proportions"][:], "start_mouse": dpg.get_mouse_pos(local=False)[orientation]}
return
drag_data = layout["_drag_data"]
split_tag = self._path_to_tag(path, "split")
if not dpg.does_item_exist(split_tag):
return
_, usable_size, _ = self._get_split_geometry(layout, dpg.get_item_rect_size(split_tag))
current_coord = dpg.get_mouse_pos(local=False)[orientation]
delta = current_coord - drag_data["start_mouse"]
delta_prop = delta / usable_size
left_idx = grip_index
right_idx = left_idx + 1
initial = drag_data["initial_proportions"]
min_prop = self.min_pane_size / usable_size
new_left = max(min_prop, initial[left_idx] + delta_prop)
new_right = max(min_prop, initial[right_idx] - delta_prop)
total_available = initial[left_idx] + initial[right_idx]
if new_left + new_right > total_available:
if new_left > new_right:
new_left = total_available - new_right
else:
new_right = total_available - new_left
layout["proportions"] = initial[:]
layout["proportions"][left_idx] = new_left
layout["proportions"][right_idx] = new_right
self._resize_splits_recursive(layout, path)
def _on_grip_end(self, sender, app_data, user_data):
path, _, _ = user_data
self._get_layout_at_path(path).pop("_drag_data", None)

@ -0,0 +1,247 @@
#!/usr/bin/env python3
import argparse
import os
import pyautogui
import subprocess
import dearpygui.dearpygui as dpg
import multiprocessing
import uuid
import signal
from openpilot.common.basedir import BASEDIR
from openpilot.tools.jotpluggler.data import DataManager
from openpilot.tools.jotpluggler.datatree import DataTree
from openpilot.tools.jotpluggler.layout import PlotLayoutManager
DEMO_ROUTE = "a2a0ccea32023010|2023-07-27--13-01-19"
class WorkerManager:
def __init__(self, max_workers=None):
self.pool = multiprocessing.Pool(max_workers or min(4, multiprocessing.cpu_count()), initializer=WorkerManager.worker_initializer)
self.active_tasks = {}
def submit_task(self, func, args_list, callback=None, task_id=None):
task_id = task_id or str(uuid.uuid4())
if task_id in self.active_tasks:
try:
self.active_tasks[task_id].terminate()
except Exception:
pass
def handle_success(result):
self.active_tasks.pop(task_id, None)
if callback:
try:
callback(result)
except Exception as e:
print(f"Callback for task {task_id} failed: {e}")
def handle_error(error):
self.active_tasks.pop(task_id, None)
print(f"Task {task_id} failed: {error}")
async_result = self.pool.starmap_async(func, args_list, callback=handle_success, error_callback=handle_error)
self.active_tasks[task_id] = async_result
return task_id
@staticmethod
def worker_initializer():
signal.signal(signal.SIGINT, signal.SIG_IGN)
def shutdown(self):
for task in self.active_tasks.values():
try:
task.terminate()
except Exception:
pass
self.pool.terminate()
self.pool.join()
class PlaybackManager:
def __init__(self):
self.is_playing = False
self.current_time_s = 0.0
self.duration_s = 0.0
def set_route_duration(self, duration: float):
self.duration_s = duration
self.seek(min(self.current_time_s, duration))
def toggle_play_pause(self):
if not self.is_playing and self.current_time_s >= self.duration_s:
self.seek(0.0)
self.is_playing = not self.is_playing
def seek(self, time_s: float):
self.is_playing = False
self.current_time_s = max(0.0, min(time_s, self.duration_s))
def update_time(self, delta_t: float):
if self.is_playing:
self.current_time_s = min(self.current_time_s + delta_t, self.duration_s)
if self.current_time_s >= self.duration_s:
self.is_playing = False
return self.current_time_s
class MainController:
def __init__(self, scale: float = 1.0):
self.scale = scale
self.data_manager = DataManager()
self.playback_manager = PlaybackManager()
self.worker_manager = WorkerManager()
self._create_global_themes()
self.data_tree = DataTree(self.data_manager, self.playback_manager)
self.plot_layout_manager = PlotLayoutManager(self.data_manager, self.playback_manager, self.worker_manager, scale=self.scale)
self.data_manager.add_observer(self.on_data_loaded)
def _create_global_themes(self):
with dpg.theme(tag="global_line_theme"):
with dpg.theme_component(dpg.mvLineSeries):
scaled_thickness = max(1.0, self.scale)
dpg.add_theme_style(dpg.mvPlotStyleVar_LineWeight, scaled_thickness, category=dpg.mvThemeCat_Plots)
with dpg.theme(tag="global_timeline_theme"):
with dpg.theme_component(dpg.mvInfLineSeries):
scaled_thickness = max(1.0, self.scale)
dpg.add_theme_style(dpg.mvPlotStyleVar_LineWeight, scaled_thickness, category=dpg.mvThemeCat_Plots)
dpg.add_theme_color(dpg.mvPlotCol_Line, (255, 0, 0, 128), category=dpg.mvThemeCat_Plots)
def on_data_loaded(self, data: dict):
duration = data.get('duration', 0.0)
self.playback_manager.set_route_duration(duration)
if data.get('reset'):
self.playback_manager.current_time_s = 0.0
self.playback_manager.duration_s = 0.0
self.playback_manager.is_playing = False
dpg.set_value("load_status", "Loading...")
dpg.set_value("timeline_slider", 0.0)
dpg.configure_item("timeline_slider", max_value=0.0)
dpg.configure_item("play_pause_button", label="Play")
dpg.configure_item("load_button", enabled=True)
elif data.get('loading_complete'):
num_paths = len(self.data_manager.get_all_paths())
dpg.set_value("load_status", f"Loaded {num_paths} data paths")
dpg.configure_item("load_button", enabled=True)
elif data.get('segment_added'):
segment_count = data.get('segment_count', 0)
dpg.set_value("load_status", f"Loading... {segment_count} segments processed")
dpg.configure_item("timeline_slider", max_value=duration)
def setup_ui(self):
with dpg.window(tag="Primary Window"):
with dpg.group(horizontal=True):
# Left panel - Data tree
with dpg.child_window(label="Sidebar", width=300 * self.scale, tag="sidebar_window", border=True, resizable_x=True):
with dpg.group(horizontal=True):
dpg.add_input_text(tag="route_input", width=-75 * self.scale, hint="Enter route name...")
dpg.add_button(label="Load", callback=self.load_route, tag="load_button", width=-1)
dpg.add_text("Ready to load route", tag="load_status")
dpg.add_separator()
self.data_tree.create_ui("sidebar_window")
# Right panel - Plots and timeline
with dpg.group(tag="right_panel"):
with dpg.child_window(label="Plot Window", border=True, height=-(30 + 13 * self.scale), tag="main_plot_area"):
self.plot_layout_manager.create_ui("main_plot_area")
with dpg.child_window(label="Timeline", border=True):
with dpg.table(header_row=False, borders_innerH=False, borders_innerV=False, borders_outerH=False, borders_outerV=False):
dpg.add_table_column(width_fixed=True, init_width_or_weight=int(50 * self.scale)) # Play button
dpg.add_table_column(width_stretch=True) # Timeline slider
dpg.add_table_column(width_fixed=True, init_width_or_weight=int(50 * self.scale)) # FPS counter
with dpg.table_row():
dpg.add_button(label="Play", tag="play_pause_button", callback=self.toggle_play_pause, width=int(50 * self.scale))
dpg.add_slider_float(tag="timeline_slider", default_value=0.0, label="", width=-1, callback=self.timeline_drag)
dpg.add_text("", tag="fps_counter")
with dpg.item_handler_registry(tag="plot_resize_handler"):
dpg.add_item_resize_handler(callback=self.on_plot_resize)
dpg.bind_item_handler_registry("right_panel", "plot_resize_handler")
dpg.set_primary_window("Primary Window", True)
def on_plot_resize(self, sender, app_data, user_data):
self.plot_layout_manager.on_viewport_resize()
def load_route(self):
route_name = dpg.get_value("route_input").strip()
if route_name:
dpg.set_value("load_status", "Loading route...")
dpg.configure_item("load_button", enabled=False)
self.data_manager.load_route(route_name)
def toggle_play_pause(self, sender):
self.playback_manager.toggle_play_pause()
label = "Pause" if self.playback_manager.is_playing else "Play"
dpg.configure_item(sender, label=label)
def timeline_drag(self, sender, app_data):
self.playback_manager.seek(app_data)
dpg.configure_item("play_pause_button", label="Play")
def update_frame(self, font):
self.data_tree.update_frame(font)
new_time = self.playback_manager.update_time(dpg.get_delta_time())
if not dpg.is_item_active("timeline_slider"):
dpg.set_value("timeline_slider", new_time)
self.plot_layout_manager.update_all_panels()
dpg.set_value("fps_counter", f"{dpg.get_frame_rate():.1f} FPS")
def shutdown(self):
self.worker_manager.shutdown()
def main(route_to_load=None):
dpg.create_context()
# TODO: find better way of calculating display scaling
try:
w, h = next(tuple(map(int, l.split()[0].split('x'))) for l in subprocess.check_output(['xrandr']).decode().split('\n') if '*' in l) # actual resolution
scale = pyautogui.size()[0] / w # scaled resolution
except Exception:
scale = 1
with dpg.font_registry():
default_font = dpg.add_font(os.path.join(BASEDIR, "selfdrive/assets/fonts/Inter-Regular.ttf"), int(13 * scale))
dpg.bind_font(default_font)
viewport_width, viewport_height = int(1200 * scale), int(800 * scale)
mouse_x, mouse_y = pyautogui.position() # TODO: find better way of creating the window where the user is (default dpg behavior annoying on multiple displays)
dpg.create_viewport(
title='JotPluggler', width=viewport_width, height=viewport_height, x_pos=mouse_x - viewport_width // 2, y_pos=mouse_y - viewport_height // 2
)
dpg.setup_dearpygui()
controller = MainController(scale=scale)
controller.setup_ui()
if route_to_load:
dpg.set_value("route_input", route_to_load)
controller.load_route()
dpg.show_viewport()
# Main loop
try:
while dpg.is_dearpygui_running():
controller.update_frame(default_font)
dpg.render_dearpygui_frame()
finally:
controller.shutdown()
dpg.destroy_context()
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="A tool for visualizing openpilot logs.")
parser.add_argument("--demo", action="store_true", help="Use the demo route instead of providing one")
parser.add_argument("route", nargs='?', default=None, help="Optional route name to load on startup.")
args = parser.parse_args()
route = DEMO_ROUTE if args.demo else args.route
main(route_to_load=route)

@ -0,0 +1,195 @@
import uuid
import threading
import numpy as np
from collections import deque
import dearpygui.dearpygui as dpg
from abc import ABC, abstractmethod
class ViewPanel(ABC):
"""Abstract base class for all view panels that can be displayed in a plot container"""
def __init__(self, panel_id: str = None):
self.panel_id = panel_id or str(uuid.uuid4())
self.title = "Untitled Panel"
@abstractmethod
def clear(self):
pass
@abstractmethod
def create_ui(self, parent_tag: str):
pass
@abstractmethod
def destroy_ui(self):
pass
@abstractmethod
def get_panel_type(self) -> str:
pass
@abstractmethod
def update(self):
pass
class TimeSeriesPanel(ViewPanel):
def __init__(self, data_manager, playback_manager, worker_manager, panel_id: str | None = None):
super().__init__(panel_id)
self.data_manager = data_manager
self.playback_manager = playback_manager
self.worker_manager = worker_manager
self.title = "Time Series Plot"
self.plot_tag = f"plot_{self.panel_id}"
self.x_axis_tag = f"{self.plot_tag}_x_axis"
self.y_axis_tag = f"{self.plot_tag}_y_axis"
self.timeline_indicator_tag = f"{self.plot_tag}_timeline"
self._ui_created = False
self._series_data: dict[str, tuple[list, list]] = {}
self._last_plot_duration = 0
self._update_lock = threading.RLock()
self.results_deque: deque[tuple[str, list, list]] = deque()
self._new_data = False
def create_ui(self, parent_tag: str):
self.data_manager.add_observer(self.on_data_loaded)
with dpg.plot(height=-1, width=-1, tag=self.plot_tag, parent=parent_tag, drop_callback=self._on_series_drop, payload_type="TIMESERIES_PAYLOAD"):
dpg.add_plot_legend()
dpg.add_plot_axis(dpg.mvXAxis, no_label=True, tag=self.x_axis_tag)
dpg.add_plot_axis(dpg.mvYAxis, no_label=True, tag=self.y_axis_tag)
timeline_series_tag = dpg.add_inf_line_series(x=[0], label="Timeline", parent=self.y_axis_tag, tag=self.timeline_indicator_tag)
dpg.bind_item_theme(timeline_series_tag, "global_timeline_theme")
for series_path in list(self._series_data.keys()):
self.add_series(series_path)
self._ui_created = True
def update(self):
with self._update_lock:
if not self._ui_created:
return
if self._new_data: # handle new data in main thread
self._new_data = False
for series_path in list(self._series_data.keys()):
self.add_series(series_path, update=True)
while self.results_deque: # handle downsampled results in main thread
results = self.results_deque.popleft()
for series_path, downsampled_time, downsampled_values in results:
series_tag = f"series_{self.panel_id}_{series_path}"
if dpg.does_item_exist(series_tag):
dpg.set_value(series_tag, [downsampled_time, downsampled_values])
# update timeline
current_time_s = self.playback_manager.current_time_s
dpg.set_value(self.timeline_indicator_tag, [[current_time_s], [0]])
# update timeseries legend label
for series_path, (time_array, value_array) in self._series_data.items():
position = np.searchsorted(time_array, current_time_s, side='right') - 1
if position >= 0 and (current_time_s - time_array[position]) <= 1.0:
value = value_array[position]
formatted_value = f"{value:.5f}" if np.issubdtype(type(value), np.floating) else str(value)
series_tag = f"series_{self.panel_id}_{series_path}"
if dpg.does_item_exist(series_tag):
dpg.configure_item(series_tag, label=f"{series_path}: {formatted_value}")
# downsample if plot zoom changed significantly
plot_duration = dpg.get_axis_limits(self.x_axis_tag)[1] - dpg.get_axis_limits(self.x_axis_tag)[0]
if plot_duration > self._last_plot_duration * 2 or plot_duration < self._last_plot_duration * 0.5:
self._downsample_all_series(plot_duration)
def _downsample_all_series(self, plot_duration):
plot_width = dpg.get_item_rect_size(self.plot_tag)[0]
if plot_width <= 0 or plot_duration <= 0:
return
self._last_plot_duration = plot_duration
target_points_per_second = plot_width / plot_duration
work_items = []
for series_path, (time_array, value_array) in self._series_data.items():
if len(time_array) == 0:
continue
series_duration = time_array[-1] - time_array[0] if len(time_array) > 1 else 1
points_per_second = len(time_array) / series_duration
if points_per_second > target_points_per_second * 2:
target_points = max(int(target_points_per_second * series_duration), plot_width)
work_items.append((series_path, time_array, value_array, target_points))
elif dpg.does_item_exist(f"series_{self.panel_id}_{series_path}"):
dpg.set_value(f"series_{self.panel_id}_{series_path}", [time_array, value_array])
if work_items:
self.worker_manager.submit_task(
TimeSeriesPanel._downsample_worker, work_items, callback=lambda results: self.results_deque.append(results), task_id=f"downsample_{self.panel_id}"
)
def add_series(self, series_path: str, update: bool = False):
with self._update_lock:
if update or series_path not in self._series_data:
self._series_data[series_path] = self.data_manager.get_timeseries(series_path)
time_array, value_array = self._series_data[series_path]
series_tag = f"series_{self.panel_id}_{series_path}"
if dpg.does_item_exist(series_tag):
dpg.set_value(series_tag, [time_array, value_array])
else:
line_series_tag = dpg.add_line_series(x=time_array, y=value_array, label=series_path, parent=self.y_axis_tag, tag=series_tag)
dpg.bind_item_theme(line_series_tag, "global_line_theme")
dpg.fit_axis_data(self.x_axis_tag)
dpg.fit_axis_data(self.y_axis_tag)
plot_duration = dpg.get_axis_limits(self.x_axis_tag)[1] - dpg.get_axis_limits(self.x_axis_tag)[0]
self._downsample_all_series(plot_duration)
def destroy_ui(self):
with self._update_lock:
self.data_manager.remove_observer(self.on_data_loaded)
if dpg.does_item_exist(self.plot_tag):
dpg.delete_item(self.plot_tag)
self._ui_created = False
def get_panel_type(self) -> str:
return "timeseries"
def clear(self):
with self._update_lock:
for series_path in list(self._series_data.keys()):
self.remove_series(series_path)
def remove_series(self, series_path: str):
with self._update_lock:
if series_path in self._series_data:
if dpg.does_item_exist(f"series_{self.panel_id}_{series_path}"):
dpg.delete_item(f"series_{self.panel_id}_{series_path}")
del self._series_data[series_path]
def on_data_loaded(self, data: dict):
self._new_data = True
def _on_series_drop(self, sender, app_data, user_data):
self.add_series(app_data)
@staticmethod
def _downsample_worker(series_path, time_array, value_array, target_points):
if len(time_array) <= target_points:
return series_path, time_array, value_array
step = len(time_array) / target_points
indices = []
for i in range(target_points):
start_idx = int(i * step)
end_idx = int((i + 1) * step)
if start_idx == end_idx:
indices.append(start_idx)
else:
bucket_values = value_array[start_idx:end_idx]
min_idx = start_idx + np.argmin(bucket_values)
max_idx = start_idx + np.argmax(bucket_values)
if min_idx != max_idx:
indices.extend([min(min_idx, max_idx), max(min_idx, max_idx)])
else:
indices.append(min_idx)
indices = sorted(set(indices))
return series_path, time_array[indices], value_array[indices]

@ -12,7 +12,7 @@ else:
base_libs.append('OpenCL')
replay_lib_src = ["replay.cc", "consoleui.cc", "camera.cc", "filereader.cc", "logreader.cc", "framereader.cc",
"route.cc", "util.cc", "seg_mgr.cc", "timeline.cc", "api.cc"]
"route.cc", "util.cc", "seg_mgr.cc", "timeline.cc", "api.cc", "qcom_decoder.cc"]
replay_lib = replay_env.Library("replay", replay_lib_src, LIBS=base_libs, FRAMEWORKS=base_frameworks)
Export('replay_lib')
replay_libs = [replay_lib, 'avutil', 'avcodec', 'avformat', 'bz2', 'zstd', 'curl', 'yuv', 'ncurses'] + base_libs

@ -8,6 +8,7 @@
#include "common/util.h"
#include "third_party/libyuv/include/libyuv.h"
#include "tools/replay/util.h"
#include "system/hardware/hw.h"
#ifdef __APPLE__
#define HW_DEVICE_TYPE AV_HWDEVICE_TYPE_VIDEOTOOLBOX
@ -37,7 +38,13 @@ struct DecoderManager {
return it->second.get();
}
auto decoder = std::make_unique<VideoDecoder>();
std::unique_ptr<VideoDecoder> decoder;
if (Hardware::TICI() && hw_decoder) {
decoder = std::make_unique<QcomVideoDecoder>();
} else {
decoder = std::make_unique<FFmpegVideoDecoder>();
}
if (!decoder->open(codecpar, hw_decoder)) {
decoder.reset(nullptr);
}
@ -114,19 +121,19 @@ bool FrameReader::get(int idx, VisionBuf *buf) {
// class VideoDecoder
VideoDecoder::VideoDecoder() {
FFmpegVideoDecoder::FFmpegVideoDecoder() {
av_frame_ = av_frame_alloc();
hw_frame_ = av_frame_alloc();
}
VideoDecoder::~VideoDecoder() {
FFmpegVideoDecoder::~FFmpegVideoDecoder() {
if (hw_device_ctx) av_buffer_unref(&hw_device_ctx);
if (decoder_ctx) avcodec_free_context(&decoder_ctx);
av_frame_free(&av_frame_);
av_frame_free(&hw_frame_);
}
bool VideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
bool FFmpegVideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
const AVCodec *decoder = avcodec_find_decoder(codecpar->codec_id);
if (!decoder) return false;
@ -149,7 +156,7 @@ bool VideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
return true;
}
bool VideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) {
bool FFmpegVideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) {
const AVCodecHWConfig *config = nullptr;
for (int i = 0; (config = avcodec_get_hw_config(decoder_ctx->codec, i)) != nullptr; i++) {
if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && config->device_type == hw_device_type) {
@ -175,7 +182,7 @@ bool VideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) {
return true;
}
bool VideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
bool FFmpegVideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
int current_idx = idx;
if (idx != reader->prev_idx + 1) {
// seeking to the nearest key frame
@ -219,7 +226,7 @@ bool VideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
return false;
}
AVFrame *VideoDecoder::decodeFrame(AVPacket *pkt) {
AVFrame *FFmpegVideoDecoder::decodeFrame(AVPacket *pkt) {
int ret = avcodec_send_packet(decoder_ctx, pkt);
if (ret < 0) {
rError("Error sending a packet for decoding: %d", ret);
@ -239,7 +246,7 @@ AVFrame *VideoDecoder::decodeFrame(AVPacket *pkt) {
return (av_frame_->format == hw_pix_fmt) ? hw_frame_ : av_frame_;
}
bool VideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) {
bool FFmpegVideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) {
if (hw_pix_fmt == HW_PIX_FMT) {
for (int i = 0; i < height/2; i++) {
memcpy(buf->y + (i*2 + 0)*buf->stride, f->data[0] + (i*2 + 0)*f->linesize[0], width);
@ -256,3 +263,45 @@ bool VideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) {
}
return true;
}
bool QcomVideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
if (codecpar->codec_id != AV_CODEC_ID_HEVC) {
rError("Hardware decoder only supports HEVC codec");
return false;
}
width = codecpar->width;
height = codecpar->height;
msm_vidc.init(VIDEO_DEVICE, width, height, V4L2_PIX_FMT_HEVC);
return true;
}
bool QcomVideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
int from_idx = idx;
if (idx != reader->prev_idx + 1) {
// seeking to the nearest key frame
for (int i = idx; i >= 0; --i) {
if (reader->packets_info[i].flags & AV_PKT_FLAG_KEY) {
from_idx = i;
break;
}
}
auto pos = reader->packets_info[from_idx].pos;
int ret = avformat_seek_file(reader->input_ctx, 0, pos, pos, pos, AVSEEK_FLAG_BYTE);
if (ret < 0) {
rError("Failed to seek to byte position %lld: %d", pos, AVERROR(ret));
return false;
}
}
reader->prev_idx = idx;
bool result = false;
AVPacket pkt;
msm_vidc.avctx = reader->input_ctx;
for (int i = from_idx; i <= idx; ++i) {
if (av_read_frame(reader->input_ctx, &pkt) == 0) {
result = msm_vidc.decodeFrame(&pkt, buf) && (i == idx);
av_packet_unref(&pkt);
}
}
return result;
}

@ -6,6 +6,7 @@
#include "msgq/visionipc/visionbuf.h"
#include "tools/replay/filereader.h"
#include "tools/replay/util.h"
#include "tools/replay/qcom_decoder.h"
extern "C" {
#include <libavcodec/avcodec.h>
@ -40,11 +41,18 @@ public:
class VideoDecoder {
public:
VideoDecoder();
~VideoDecoder();
bool open(AVCodecParameters *codecpar, bool hw_decoder);
bool decode(FrameReader *reader, int idx, VisionBuf *buf);
virtual ~VideoDecoder() = default;
virtual bool open(AVCodecParameters *codecpar, bool hw_decoder) = 0;
virtual bool decode(FrameReader *reader, int idx, VisionBuf *buf) = 0;
int width = 0, height = 0;
};
class FFmpegVideoDecoder : public VideoDecoder {
public:
FFmpegVideoDecoder();
~FFmpegVideoDecoder() override;
bool open(AVCodecParameters *codecpar, bool hw_decoder) override;
bool decode(FrameReader *reader, int idx, VisionBuf *buf) override;
private:
bool initHardwareDecoder(AVHWDeviceType hw_device_type);
@ -56,3 +64,14 @@ private:
AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE;
AVBufferRef *hw_device_ctx = nullptr;
};
class QcomVideoDecoder : public VideoDecoder {
public:
QcomVideoDecoder() {};
~QcomVideoDecoder() override {};
bool open(AVCodecParameters *codecpar, bool hw_decoder) override;
bool decode(FrameReader *reader, int idx, VisionBuf *buf) override;
private:
MsmVidc msm_vidc = MsmVidc();
};

@ -0,0 +1,346 @@
#include "qcom_decoder.h"
#include <assert.h>
#include "third_party/linux/include/v4l2-controls.h"
#include <linux/videodev2.h>
#include "common/swaglog.h"
#include "common/util.h"
// echo "0xFFFF" > /sys/kernel/debug/msm_vidc/debug_level
static void copyBuffer(VisionBuf *src_buf, VisionBuf *dst_buf) {
// Copy Y plane
memcpy(dst_buf->y, src_buf->y, src_buf->height * src_buf->stride);
// Copy UV plane
memcpy(dst_buf->uv, src_buf->uv, src_buf->height / 2 * src_buf->stride);
}
static void request_buffers(int fd, v4l2_buf_type buf_type, unsigned int count) {
struct v4l2_requestbuffers reqbuf = {
.count = count,
.type = buf_type,
.memory = V4L2_MEMORY_USERPTR
};
util::safe_ioctl(fd, VIDIOC_REQBUFS, &reqbuf, "VIDIOC_REQBUFS failed");
}
MsmVidc::~MsmVidc() {
if (fd > 0) {
close(fd);
}
}
bool MsmVidc::init(const char* dev, size_t width, size_t height, uint64_t codec) {
LOG("Initializing msm_vidc device %s", dev);
this->w = width;
this->h = height;
this->fd = open(dev, O_RDWR, 0);
if (fd < 0) {
LOGE("failed to open video device %s", dev);
return false;
}
subscribeEvents();
v4l2_buf_type out_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
setPlaneFormat(out_type, V4L2_PIX_FMT_HEVC); // Also allocates the output buffer
setFPS(FPS);
request_buffers(fd, out_type, OUTPUT_BUFFER_COUNT);
util::safe_ioctl(fd, VIDIOC_STREAMON, &out_type, "VIDIOC_STREAMON OUTPUT failed");
restartCapture();
setupPolling();
this->initialized = true;
return true;
}
VisionBuf* MsmVidc::decodeFrame(AVPacket *pkt, VisionBuf *buf) {
assert(initialized && (pkt != nullptr) && (buf != nullptr));
this->frame_ready = false;
this->current_output_buf = buf;
bool sent_packet = false;
while (!this->frame_ready) {
if (!sent_packet) {
int buf_index = getBufferUnlocked();
if (buf_index >= 0) {
assert(buf_index < out_buf_cnt);
sendPacket(buf_index, pkt);
sent_packet = true;
}
}
if (poll(pfd, nfds, -1) < 0) {
LOGE("poll() error: %d", errno);
return nullptr;
}
if (VisionBuf* result = processEvents()) {
return result;
}
}
return buf;
}
VisionBuf* MsmVidc::processEvents() {
for (int idx = 0; idx < nfds; idx++) {
short revents = pfd[idx].revents;
if (!revents) continue;
if (idx == ev[EV_VIDEO]) {
if (revents & (POLLIN | POLLRDNORM)) {
VisionBuf *result = handleCapture();
if (result == this->current_output_buf) {
this->frame_ready = true;
}
}
if (revents & (POLLOUT | POLLWRNORM)) {
handleOutput();
}
if (revents & POLLPRI) {
handleEvent();
}
} else {
LOGE("Unexpected event on fd %d", pfd[idx].fd);
}
}
return nullptr;
}
VisionBuf* MsmVidc::handleCapture() {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[1] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_USERPTR;
buf.m.planes = planes;
buf.length = 1;
util::safe_ioctl(this->fd, VIDIOC_DQBUF, &buf, "VIDIOC_DQBUF CAPTURE failed");
if (this->reconfigure_pending || buf.m.planes[0].bytesused == 0) {
return nullptr;
}
copyBuffer(&cap_bufs[buf.index], this->current_output_buf);
queueCaptureBuffer(buf.index);
return this->current_output_buf;
}
bool MsmVidc::subscribeEvents() {
for (uint32_t event : subscriptions) {
struct v4l2_event_subscription sub = { .type = event};
util::safe_ioctl(fd, VIDIOC_SUBSCRIBE_EVENT, &sub, "VIDIOC_SUBSCRIBE_EVENT failed");
}
return true;
}
bool MsmVidc::setPlaneFormat(enum v4l2_buf_type type, uint32_t fourcc) {
struct v4l2_format fmt = {.type = type};
struct v4l2_pix_format_mplane *pix = &fmt.fmt.pix_mp;
*pix = {
.width = (__u32)this->w,
.height = (__u32)this->h,
.pixelformat = fourcc
};
util::safe_ioctl(fd, VIDIOC_S_FMT, &fmt, "VIDIOC_S_FMT failed");
if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
this->out_buf_size = pix->plane_fmt[0].sizeimage;
int ion_size = this->out_buf_size * OUTPUT_BUFFER_COUNT; // Output (input) buffers are ION buffer.
this->out_buf.allocate(ion_size); // mmap rw
for (int i = 0; i < OUTPUT_BUFFER_COUNT; i++) {
this->out_buf_off[i] = i * this->out_buf_size;
this->out_buf_addr[i] = (char *)this->out_buf.addr + this->out_buf_off[i];
this->out_buf_flag[i] = false;
}
LOGD("Set output buffer size to %d, count %d, addr %p", this->out_buf_size, OUTPUT_BUFFER_COUNT, this->out_buf.addr);
} else if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
request_buffers(this->fd, type, CAPTURE_BUFFER_COUNT);
util::safe_ioctl(fd, VIDIOC_G_FMT, &fmt, "VIDIOC_G_FMT failed");
const __u32 y_size = pix->plane_fmt[0].sizeimage;
const __u32 y_stride = pix->plane_fmt[0].bytesperline;
for (int i = 0; i < CAPTURE_BUFFER_COUNT; i++) {
size_t uv_offset = (size_t)y_stride * pix->height;
size_t required = uv_offset + (y_stride * pix->height / 2); // enough for Y + UV. For linear NV12, UV plane starts at y_stride * height.
size_t alloc_size = std::max<size_t>(y_size, required);
this->cap_bufs[i].allocate(alloc_size);
this->cap_bufs[i].init_yuv(pix->width, pix->height, y_stride, uv_offset);
}
LOGD("Set capture buffer size to %d, count %d, addr %p, extradata size %d",
pix->plane_fmt[0].sizeimage, CAPTURE_BUFFER_COUNT, this->cap_bufs[0].addr, pix->plane_fmt[1].sizeimage);
}
return true;
}
bool MsmVidc::setFPS(uint32_t fps) {
struct v4l2_streamparm streamparam = {
.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE,
.parm.output.timeperframe = {1, fps}
};
util::safe_ioctl(fd, VIDIOC_S_PARM, &streamparam, "VIDIOC_S_PARM failed");
return true;
}
bool MsmVidc::restartCapture() {
// stop if already initialized
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
if (this->initialized) {
LOGD("Restarting capture, flushing buffers...");
util::safe_ioctl(this->fd, VIDIOC_STREAMOFF, &type, "VIDIOC_STREAMOFF CAPTURE failed");
struct v4l2_requestbuffers reqbuf = {.type = type, .memory = V4L2_MEMORY_USERPTR};
util::safe_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf, "VIDIOC_REQBUFS failed");
for (size_t i = 0; i < CAPTURE_BUFFER_COUNT; ++i) {
this->cap_bufs[i].free();
this->cap_buf_flag[i] = false; // mark as not queued
cap_bufs[i].~VisionBuf();
new (&cap_bufs[i]) VisionBuf();
}
}
// setup, start and queue capture buffers
setDBP();
setPlaneFormat(type, V4L2_PIX_FMT_NV12);
util::safe_ioctl(this->fd, VIDIOC_STREAMON, &type, "VIDIOC_STREAMON CAPTURE failed");
for (size_t i = 0; i < CAPTURE_BUFFER_COUNT; ++i) {
queueCaptureBuffer(i);
}
return true;
}
bool MsmVidc::queueCaptureBuffer(int i) {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[1] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
buf.memory = V4L2_MEMORY_USERPTR;
buf.index = i;
buf.m.planes = planes;
buf.length = 1;
// decoded frame plane
planes[0].m.userptr = (unsigned long)this->cap_bufs[i].addr; // no security
planes[0].length = this->cap_bufs[i].len;
planes[0].reserved[0] = this->cap_bufs[i].fd; // ION fd
planes[0].reserved[1] = 0;
planes[0].bytesused = this->cap_bufs[i].len;
planes[0].data_offset = 0;
util::safe_ioctl(this->fd, VIDIOC_QBUF, &buf, "VIDIOC_QBUF failed");
this->cap_buf_flag[i] = true; // mark as queued
return true;
}
bool MsmVidc::queueOutputBuffer(int i, size_t size) {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[1] = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buf.memory = V4L2_MEMORY_USERPTR;
buf.index = i;
buf.m.planes = planes;
buf.length = 1;
// decoded frame plane
planes[0].m.userptr = (unsigned long)this->out_buf_off[i]; // check this
planes[0].length = this->out_buf_size;
planes[0].reserved[0] = this->out_buf.fd; // ION fd
planes[0].reserved[1] = 0;
planes[0].bytesused = size;
planes[0].data_offset = 0;
assert((this->out_buf_off[i] & 0xfff) == 0); // must be 4 KiB aligned
assert(this->out_buf_size % 4096 == 0); // ditto for size
util::safe_ioctl(this->fd, VIDIOC_QBUF, &buf, "VIDIOC_QBUF failed");
this->out_buf_flag[i] = true; // mark as queued
return true;
}
bool MsmVidc::setDBP() {
struct v4l2_ext_control control[2] = {0};
struct v4l2_ext_controls controls = {0};
control[0].id = V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_MODE;
control[0].value = 1; // V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_SECONDARY
control[1].id = V4L2_CID_MPEG_VIDC_VIDEO_DPB_COLOR_FORMAT;
control[1].value = 0; // V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_NONE
controls.count = 2;
controls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
controls.controls = control;
util::safe_ioctl(fd, VIDIOC_S_EXT_CTRLS, &controls, "VIDIOC_S_EXT_CTRLS failed");
return true;
}
bool MsmVidc::setupPolling() {
// Initialize poll array
pfd[EV_VIDEO] = {fd, POLLIN | POLLOUT | POLLWRNORM | POLLRDNORM | POLLPRI, 0};
ev[EV_VIDEO] = EV_VIDEO;
nfds = 1;
return true;
}
bool MsmVidc::sendPacket(int buf_index, AVPacket *pkt) {
assert(buf_index >= 0 && buf_index < out_buf_cnt);
assert(pkt != nullptr && pkt->data != nullptr && pkt->size > 0);
// Prepare output buffer
memset(this->out_buf_addr[buf_index], 0, this->out_buf_size);
uint8_t * data = (uint8_t *)this->out_buf_addr[buf_index];
memcpy(data, pkt->data, pkt->size);
queueOutputBuffer(buf_index, pkt->size);
return true;
}
int MsmVidc::getBufferUnlocked() {
for (int i = 0; i < this->out_buf_cnt; i++) {
if (!out_buf_flag[i]) {
return i;
}
}
return -1;
}
bool MsmVidc::handleOutput() {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[1];
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
buf.memory = V4L2_MEMORY_USERPTR;
buf.m.planes = planes;
buf.length = 1;
util::safe_ioctl(this->fd, VIDIOC_DQBUF, &buf, "VIDIOC_DQBUF OUTPUT failed");
this->out_buf_flag[buf.index] = false; // mark as not queued
return true;
}
bool MsmVidc::handleEvent() {
// dequeue event
struct v4l2_event event = {0};
util::safe_ioctl(this->fd, VIDIOC_DQEVENT, &event, "VIDIOC_DQEVENT failed");
switch (event.type) {
case V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT: {
unsigned int *ptr = (unsigned int *)event.u.data;
unsigned int height = ptr[0];
unsigned int width = ptr[1];
this->w = width;
this->h = height;
LOGD("Port Reconfig received insufficient, new size %ux%u, flushing capture bufs...", width, height); // This is normal
struct v4l2_decoder_cmd dec;
dec.flags = V4L2_QCOM_CMD_FLUSH_CAPTURE;
dec.cmd = V4L2_QCOM_CMD_FLUSH;
util::safe_ioctl(this->fd, VIDIOC_DECODER_CMD, &dec, "VIDIOC_DECODER_CMD FLUSH_CAPTURE failed");
this->reconfigure_pending = true;
LOGD("Waiting for flush done event to reconfigure capture queue");
break;
}
case V4L2_EVENT_MSM_VIDC_FLUSH_DONE: {
unsigned int *ptr = (unsigned int *)event.u.data;
unsigned int flags = ptr[0];
if (flags & V4L2_QCOM_CMD_FLUSH_CAPTURE) {
if (this->reconfigure_pending) {
this->restartCapture();
this->reconfigure_pending = false;
}
}
break;
}
default:
break;
}
return true;
}

@ -0,0 +1,88 @@
#pragma once
#include <linux/videodev2.h>
#include <poll.h>
#include "msgq/visionipc/visionbuf.h"
extern "C" {
#include <libavcodec/avcodec.h>
#include <libavformat/avformat.h>
}
#define V4L2_EVENT_MSM_VIDC_START (V4L2_EVENT_PRIVATE_START + 0x00001000)
#define V4L2_EVENT_MSM_VIDC_FLUSH_DONE (V4L2_EVENT_MSM_VIDC_START + 1)
#define V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT (V4L2_EVENT_MSM_VIDC_START + 3)
#define V4L2_CID_MPEG_MSM_VIDC_BASE 0x00992000
#define V4L2_CID_MPEG_VIDC_VIDEO_DPB_COLOR_FORMAT (V4L2_CID_MPEG_MSM_VIDC_BASE + 44)
#define V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_MODE (V4L2_CID_MPEG_MSM_VIDC_BASE + 22)
#define V4L2_QCOM_CMD_FLUSH_CAPTURE (1 << 1)
#define V4L2_QCOM_CMD_FLUSH (4)
#define VIDEO_DEVICE "/dev/video32"
#define OUTPUT_BUFFER_COUNT 8
#define CAPTURE_BUFFER_COUNT 8
#define FPS 20
class MsmVidc {
public:
MsmVidc() = default;
~MsmVidc();
bool init(const char* dev, size_t width, size_t height, uint64_t codec);
VisionBuf* decodeFrame(AVPacket* pkt, VisionBuf* buf);
AVFormatContext* avctx = nullptr;
int fd = 0;
private:
bool initialized = false;
bool reconfigure_pending = false;
bool frame_ready = false;
VisionBuf* current_output_buf = nullptr;
VisionBuf out_buf; // Single input buffer
VisionBuf cap_bufs[CAPTURE_BUFFER_COUNT]; // Capture (output) buffers
size_t w = 1928, h = 1208;
size_t cap_height = 0, cap_width = 0;
int cap_buf_size = 0;
int out_buf_size = 0;
size_t cap_plane_off[CAPTURE_BUFFER_COUNT] = {0};
size_t cap_plane_stride[CAPTURE_BUFFER_COUNT] = {0};
bool cap_buf_flag[CAPTURE_BUFFER_COUNT] = {false};
size_t out_buf_off[OUTPUT_BUFFER_COUNT] = {0};
void* out_buf_addr[OUTPUT_BUFFER_COUNT] = {0};
bool out_buf_flag[OUTPUT_BUFFER_COUNT] = {false};
const int out_buf_cnt = OUTPUT_BUFFER_COUNT;
const int subscriptions[2] = {
V4L2_EVENT_MSM_VIDC_FLUSH_DONE,
V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT
};
enum { EV_VIDEO, EV_COUNT };
struct pollfd pfd[EV_COUNT] = {0};
int ev[EV_COUNT] = {-1};
int nfds = 0;
VisionBuf* processEvents();
bool setupOutput();
bool subscribeEvents();
bool setPlaneFormat(v4l2_buf_type type, uint32_t fourcc);
bool setFPS(uint32_t fps);
bool restartCapture();
bool queueCaptureBuffer(int i);
bool queueOutputBuffer(int i, size_t size);
bool setDBP();
bool setupPolling();
bool sendPacket(int buf_index, AVPacket* pkt);
int getBufferUnlocked();
VisionBuf* handleCapture();
bool handleOutput();
bool handleEvent();
};

@ -451,6 +451,21 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/d2/fc/c0a3f4c4eaa5a22fbef91713474666e13d0ea2a69c84532579490a9f2cc8/dbus_next-0.2.3-py3-none-any.whl", hash = "sha256:58948f9aff9db08316734c0be2a120f6dc502124d9642f55e90ac82ffb16a18b", size = 57885, upload-time = "2021-07-25T22:11:25.466Z" },
]
[[package]]
name = "dearpygui"
version = "2.1.0"
source = { registry = "https://pypi.org/simple" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/92/fe/66293fc40254a29f060efd3398f2b1001ed79263ae1837db9ec42caa8f1d/dearpygui-2.1.0-cp311-cp311-macosx_10_6_x86_64.whl", hash = "sha256:03e5dc0b3dd2f7965e50bbe41f3316a814408064b582586de994d93afedb125c", size = 2100924, upload-time = "2025-07-07T14:20:00.602Z" },
{ url = "https://files.pythonhosted.org/packages/c4/4d/9fa1c3156ba7bbf4dc89e2e322998752fccfdc3575923a98dd6a4da48911/dearpygui-2.1.0-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:b5b37710c3fa135c48e2347f39ecd1f415146e86db5d404707a0bf72d16bd304", size = 1874441, upload-time = "2025-07-07T14:20:09.165Z" },
{ url = "https://files.pythonhosted.org/packages/5a/3c/af5673b50699e1734296a0b5bcef39bb6989175b001ad1f9b0e7888ad90d/dearpygui-2.1.0-cp311-cp311-manylinux1_x86_64.whl", hash = "sha256:b0cfd7ac7eaa090fc22d6aa60fc4b527fc631cee10c348e4d8df92bb39af03d2", size = 2636574, upload-time = "2025-07-07T14:20:14.951Z" },
{ url = "https://files.pythonhosted.org/packages/7f/db/ed4db0bb3d88e7a8c405472641419086bef9632c4b8b0489dc0c43519c0d/dearpygui-2.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:a9af54f96d3ef30c5db9d12cdf3266f005507396fb0da2e12e6b22b662161070", size = 1810266, upload-time = "2025-07-07T14:19:51.565Z" },
{ url = "https://files.pythonhosted.org/packages/55/9d/20a55786cc9d9266395544463d5db3be3528f7d5244bc52ba760de5dcc2d/dearpygui-2.1.0-cp312-cp312-macosx_10_6_x86_64.whl", hash = "sha256:1270ceb9cdb8ecc047c42477ccaa075b7864b314a5d09191f9280a24c8aa90a0", size = 2101499, upload-time = "2025-07-07T14:20:01.701Z" },
{ url = "https://files.pythonhosted.org/packages/a7/b2/39d820796b7ac4d0ebf93306c1f031bf3516b159408286f1fb495c6babeb/dearpygui-2.1.0-cp312-cp312-macosx_13_0_arm64.whl", hash = "sha256:ce9969eb62057b9d4c88a8baaed13b5fbe4058caa9faf5b19fec89da75aece3d", size = 1874385, upload-time = "2025-07-07T14:20:11.226Z" },
{ url = "https://files.pythonhosted.org/packages/fc/26/c29998ffeb5eb8d638f307851e51a81c8bd4aeaf89ad660fc67ea4d1ac1a/dearpygui-2.1.0-cp312-cp312-manylinux1_x86_64.whl", hash = "sha256:a3ca8cf788db63ef7e2e8d6f277631b607d548b37606f080ca1b42b1f0a9b183", size = 2635863, upload-time = "2025-07-07T14:20:17.186Z" },
{ url = "https://files.pythonhosted.org/packages/28/9c/3ab33927f1d8c839c5b7033a33d44fc9f0aeb00c264fc9772cb7555a03c4/dearpygui-2.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:43f0e4db9402f44fc3683a1f5c703564819de18cc15a042de7f1ed1c8cb5d148", size = 1810460, upload-time = "2025-07-07T14:19:53.13Z" },
]
[[package]]
name = "dictdiffer"
version = "0.9.0"
@ -622,10 +637,10 @@ name = "gymnasium"
version = "1.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cloudpickle" },
{ name = "farama-notifications" },
{ name = "numpy" },
{ name = "typing-extensions" },
{ name = "cloudpickle", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "farama-notifications", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "numpy", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "typing-extensions", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/fd/17/c2a0e15c2cd5a8e788389b280996db927b923410de676ec5c7b2695e9261/gymnasium-1.2.0.tar.gz", hash = "sha256:344e87561012558f603880baf264ebc97f8a5c997a957b0c9f910281145534b0", size = 821142, upload-time = "2025-06-27T08:21:20.262Z" }
wheels = [
@ -720,6 +735,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/94/9e/820c4b086ad01ba7d77369fb8b11470a01fac9b4977f02e18659cf378b6b/json_rpc-1.15.0-py2.py3-none-any.whl", hash = "sha256:4a4668bbbe7116feb4abbd0f54e64a4adcf4b8f648f19ffa0848ad0f6606a9bf", size = 39450, upload-time = "2023-06-11T09:45:47.136Z" },
]
[[package]]
name = "kaitaistruct"
version = "0.10"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/54/04/dd60b9cb65d580ef6cb6eaee975ad1bdd22d46a3f51b07a1e0606710ea88/kaitaistruct-0.10.tar.gz", hash = "sha256:a044dee29173d6afbacf27bcac39daf89b654dd418cfa009ab82d9178a9ae52a", size = 7061, upload-time = "2022-07-09T00:34:06.729Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/4e/bf/88ad23efc08708bda9a2647169828e3553bb2093a473801db61f75356395/kaitaistruct-0.10-py2.py3-none-any.whl", hash = "sha256:a97350919adbf37fda881f75e9365e2fb88d04832b7a4e57106ec70119efb235", size = 7013, upload-time = "2022-07-09T00:34:03.905Z" },
]
[[package]]
name = "kiwisolver"
version = "1.4.9"
@ -903,22 +927,22 @@ name = "metadrive-simulator"
version = "0.4.2.4"
source = { url = "https://github.com/commaai/metadrive/releases/download/MetaDrive-minimal-0.4.2.4/metadrive_simulator-0.4.2.4-py3-none-any.whl" }
dependencies = [
{ name = "filelock" },
{ name = "gymnasium" },
{ name = "lxml" },
{ name = "matplotlib" },
{ name = "numpy" },
{ name = "opencv-python-headless" },
{ name = "panda3d" },
{ name = "panda3d-gltf" },
{ name = "pillow" },
{ name = "progressbar" },
{ name = "psutil" },
{ name = "pygments" },
{ name = "requests" },
{ name = "shapely" },
{ name = "tqdm" },
{ name = "yapf" },
{ name = "filelock", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "gymnasium", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "lxml", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "matplotlib", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "numpy", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "opencv-python-headless", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "panda3d", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "panda3d-gltf", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "pillow", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "progressbar", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "psutil", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "pygments", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "requests", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "shapely", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "tqdm", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "yapf", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
]
wheels = [
{ url = "https://github.com/commaai/metadrive/releases/download/MetaDrive-minimal-0.4.2.4/metadrive_simulator-0.4.2.4-py3-none-any.whl", hash = "sha256:fbf0ea9be67e65cd45d38ff930e3d49f705dd76c9ddbd1e1482e3f87b61efcef" },
@ -1237,9 +1261,11 @@ dependencies = [
{ name = "cffi" },
{ name = "crcmod" },
{ name = "cython" },
{ name = "dearpygui" },
{ name = "future-fstrings" },
{ name = "inputs" },
{ name = "json-rpc" },
{ name = "kaitaistruct" },
{ name = "libusb1" },
{ name = "numpy" },
{ name = "onnx" },
@ -1327,6 +1353,7 @@ requires-dist = [
{ name = "crcmod" },
{ name = "cython" },
{ name = "dbus-next", marker = "extra == 'dev'" },
{ name = "dearpygui", specifier = ">=2.1.0" },
{ name = "dictdiffer", marker = "extra == 'dev'" },
{ name = "future-fstrings" },
{ name = "hypothesis", marker = "extra == 'testing'", specifier = "==6.47.*" },
@ -1334,6 +1361,7 @@ requires-dist = [
{ name = "jeepney", marker = "extra == 'dev'" },
{ name = "jinja2", marker = "extra == 'docs'" },
{ name = "json-rpc" },
{ name = "kaitaistruct" },
{ name = "libusb1" },
{ name = "matplotlib", marker = "extra == 'dev'" },
{ name = "metadrive-simulator", marker = "platform_machine != 'aarch64' and extra == 'tools'", url = "https://github.com/commaai/metadrive/releases/download/MetaDrive-minimal-0.4.2.4/metadrive_simulator-0.4.2.4-py3-none-any.whl" },
@ -1422,8 +1450,8 @@ name = "panda3d-gltf"
version = "0.13"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "panda3d" },
{ name = "panda3d-simplepbr" },
{ name = "panda3d", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "panda3d-simplepbr", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/07/7f/9f18fc3fa843a080acb891af6bcc12262e7bdf1d194a530f7042bebfc81f/panda3d-gltf-0.13.tar.gz", hash = "sha256:d06d373bdd91cf530909b669f43080e599463bbf6d3ef00c3558bad6c6b19675", size = 25573, upload-time = "2021-05-21T05:46:32.738Z" }
wheels = [
@ -1435,8 +1463,8 @@ name = "panda3d-simplepbr"
version = "0.13.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "panda3d" },
{ name = "typing-extensions" },
{ name = "panda3d", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "typing-extensions", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/0d/be/c4d1ded04c22b357277cf6e6a44c1ab4abb285a700bd1991460460e05b99/panda3d_simplepbr-0.13.1.tar.gz", hash = "sha256:c83766d7c8f47499f365a07fe1dff078fc8b3054c2689bdc8dceabddfe7f1a35", size = 6216055, upload-time = "2025-03-30T16:57:41.087Z" }
wheels = [
@ -4173,9 +4201,9 @@ name = "pyopencl"
version = "2025.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "platformdirs" },
{ name = "pytools" },
{ name = "numpy", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "platformdirs", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "pytools", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/28/88/0ac460d3e2def08b2ad6345db6a13613815f616bbbd60c6f4bdf774f4c41/pyopencl-2025.1.tar.gz", hash = "sha256:0116736d7f7920f87b8db4b66a03f27b1d930d2e37ddd14518407cc22dd24779", size = 422510, upload-time = "2025-01-22T00:16:58.421Z" }
wheels = [
@ -4351,7 +4379,7 @@ wheels = [
[[package]]
name = "pytest-xdist"
version = "3.7.1.dev24+g2b4372bd6"
version = "3.7.1.dev24+g2b4372b"
source = { git = "https://github.com/sshane/pytest-xdist?rev=2b4372bd62699fb412c4fe2f95bf9f01bd2018da#2b4372bd62699fb412c4fe2f95bf9f01bd2018da" }
dependencies = [
{ name = "execnet" },
@ -4393,9 +4421,9 @@ name = "pytools"
version = "2024.1.10"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "platformdirs" },
{ name = "siphash24" },
{ name = "typing-extensions" },
{ name = "platformdirs", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "siphash24", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
{ name = "typing-extensions", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ee/0f/56e109c0307f831b5d598ad73976aaaa84b4d0e98da29a642e797eaa940c/pytools-2024.1.10.tar.gz", hash = "sha256:9af6f4b045212c49be32bb31fe19606c478ee4b09631886d05a32459f4ce0a12", size = 81741, upload-time = "2024-07-17T18:47:38.287Z" }
wheels = [
@ -4719,7 +4747,7 @@ name = "shapely"
version = "2.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "numpy" },
{ name = "numpy", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/ca/3c/2da625233f4e605155926566c0e7ea8dda361877f48e8b1655e53456f252/shapely-2.1.1.tar.gz", hash = "sha256:500621967f2ffe9642454808009044c21e5b35db89ce69f8a2042c2ffd0e2772", size = 315422, upload-time = "2025-05-19T11:04:41.265Z" }
wheels = [
@ -4948,7 +4976,7 @@ name = "yapf"
version = "0.43.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "platformdirs" },
{ name = "platformdirs", marker = "platform_machine != 'aarch64' or sys_platform != 'linux'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/23/97/b6f296d1e9cc1ec25c7604178b48532fa5901f721bcf1b8d8148b13e5588/yapf-0.43.0.tar.gz", hash = "sha256:00d3aa24bfedff9420b2e0d5d9f5ab6d9d4268e72afbf59bb3fa542781d5218e", size = 254907, upload-time = "2024-11-14T00:11:41.584Z" }
wheels = [

Loading…
Cancel
Save