Merge branch 'commaai:master' into pt-BR

pull/30987/head
Alexandre Nobuharu Sato 1 year ago committed by GitHub
commit c80c752cc0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 4
      .github/workflows/badges.yaml
  2. 5
      .github/workflows/prebuilt.yaml
  3. 4
      .github/workflows/release.yaml
  4. 2
      .github/workflows/stale.yaml
  5. 8
      .pre-commit-config.yaml
  6. 2
      RELEASES.md
  7. 3
      SConstruct
  8. 2
      cereal
  9. 2
      common/params.cc
  10. 45
      common/profiler.py
  11. 2
      docs/CARS.md
  12. 2
      panda
  13. 1884
      poetry.lock
  14. 2
      pyproject.toml
  15. 7
      release/files_common
  16. 11
      selfdrive/athena/athenad.py
  17. 60
      selfdrive/car/README.md
  18. 6
      selfdrive/car/docs_definitions.py
  19. 15
      selfdrive/car/gm/carcontroller.py
  20. 1
      selfdrive/car/gm/values.py
  21. 3
      selfdrive/car/honda/fingerprints.py
  22. 2
      selfdrive/car/subaru/interface.py
  23. 4
      selfdrive/car/tests/test_lateral_limits.py
  24. 10
      selfdrive/car/tests/test_models.py
  25. 6
      selfdrive/car/toyota/fingerprints.py
  26. 2
      selfdrive/car/toyota/values.py
  27. 13
      selfdrive/car/volkswagen/carcontroller.py
  28. 13
      selfdrive/car/volkswagen/carstate.py
  29. 3
      selfdrive/car/volkswagen/fingerprints.py
  30. 11
      selfdrive/car/volkswagen/interface.py
  31. 20
      selfdrive/car/volkswagen/mqbcan.py
  32. 6
      selfdrive/car/volkswagen/values.py
  33. 9
      selfdrive/controls/controlsd.py
  34. 9
      selfdrive/controls/lib/longitudinal_planner.py
  35. 11
      selfdrive/debug/can_print_changes.py
  36. 48
      selfdrive/debug/count_events.py
  37. 35
      selfdrive/debug/filter_log_message.py
  38. 6
      selfdrive/debug/fingerprint_from_route.py
  39. 9
      selfdrive/debug/run_process_on_route.py
  40. 111
      selfdrive/debug/sensor_data_to_hist.py
  41. 4
      selfdrive/debug/test_fw_query_on_routes.py
  42. 6
      selfdrive/debug/toyota_eps_factor.py
  43. 2
      selfdrive/manager/manager.py
  44. 12
      selfdrive/navd/navd.py
  45. 61
      selfdrive/navd/tests/test_navd.py
  46. 16
      selfdrive/test/helpers.py
  47. 2
      selfdrive/test/process_replay/ref_commit
  48. 2
      selfdrive/test/test_onroad.py
  49. 3
      selfdrive/thermald/thermald.py
  50. 2
      selfdrive/ui/qt/maps/map_eta.cc
  51. 20
      selfdrive/ui/qt/network/networking.cc
  52. 2
      selfdrive/ui/qt/network/networking.h
  53. 13
      selfdrive/ui/qt/offroad/driverview.cc
  54. 2
      selfdrive/ui/qt/offroad/driverview.h
  55. 30
      selfdrive/ui/qt/onroad.cc
  56. 2
      selfdrive/ui/qt/onroad.h
  57. 189
      selfdrive/ui/qt/widgets/cameraview.cc
  58. 49
      selfdrive/ui/qt/widgets/cameraview.h
  59. 7
      selfdrive/ui/tests/test_translations.py
  60. 138
      selfdrive/ui/translations/auto_translate.py
  61. 4
      selfdrive/ui/translations/create_badges.py
  62. 20
      selfdrive/ui/translations/main_ar.ts
  63. 20
      selfdrive/ui/translations/main_de.ts
  64. 20
      selfdrive/ui/translations/main_fr.ts
  65. 20
      selfdrive/ui/translations/main_ja.ts
  66. 20
      selfdrive/ui/translations/main_ko.ts
  67. 20
      selfdrive/ui/translations/main_pt-BR.ts
  68. 20
      selfdrive/ui/translations/main_th.ts
  69. 20
      selfdrive/ui/translations/main_tr.ts
  70. 20
      selfdrive/ui/translations/main_zh-CHS.ts
  71. 20
      selfdrive/ui/translations/main_zh-CHT.ts
  72. 20
      selfdrive/ui/update_translations.py
  73. 8
      selfdrive/ui/watch3.cc
  74. 15
      selfdrive/updated.py
  75. 2
      system/camerad/SConscript
  76. 1
      system/camerad/cameras/camera_common.cc
  77. 1
      system/camerad/cameras/camera_common.h
  78. 22
      system/camerad/cameras/camera_qcom2.cc
  79. 5
      system/camerad/sensors/ar0231.cc
  80. 105
      system/camerad/sensors/os04c10.cc
  81. 298
      system/camerad/sensors/os04c10_registers.h
  82. 5
      system/camerad/sensors/ox03c10.cc
  83. 16
      system/camerad/sensors/sensor.h
  84. 13
      system/camerad/test/get_thumbnails_for_segment.py
  85. 3
      system/hardware/base.py
  86. 11
      system/hardware/tici/hardware.py
  87. 5
      tools/cabana/videowidget.cc
  88. 4
      tools/cabana/videowidget.h
  89. 74
      tools/car_porting/README.md
  90. 6
      tools/car_porting/auto_fingerprint.py
  91. 120
      tools/car_porting/examples/subaru_long_accel.ipynb
  92. 110
      tools/car_porting/examples/subaru_steer_temp_fault.ipynb
  93. 0
      tools/car_porting/test_car_model.py
  94. 4
      tools/latencylogger/latency_logger.py
  95. 24
      tools/lib/README.md
  96. 5
      tools/lib/helpers.py
  97. 221
      tools/lib/logreader.py
  98. 26
      tools/lib/route.py
  99. 86
      tools/lib/tests/test_logreader.py
  100. 71
      tools/plotjuggler/juggle.py
  101. Some files were not shown because too many files have changed in this diff Show More

@ -14,6 +14,8 @@ jobs:
name: create badges
runs-on: ubuntu-20.04
if: github.repository == 'commaai/openpilot'
permissions:
contents: write
steps:
- uses: actions/checkout@v4
with:
@ -23,6 +25,8 @@ jobs:
run: |
${{ env.RUN }} "scons -j$(nproc) && python selfdrive/ui/translations/create_badges.py"
rm .gitattributes
git checkout --orphan badges
git rm -rf --cached .
git config user.email "badge-researcher@comma.ai"

@ -15,6 +15,10 @@ jobs:
if: github.repository == 'commaai/openpilot'
env:
PUSH_IMAGE: true
permissions:
checks: read
contents: read
packages: write
steps:
- name: Wait for green check mark
if: ${{ github.event_name != 'workflow_dispatch' }}
@ -23,6 +27,7 @@ jobs:
ref: master
wait-interval: 30
running-workflow-name: 'build prebuilt'
repo-token: ${{ secrets.GITHUB_TOKEN }}
check-regexp: ^((?!.*(build master-ci).*).)*$
- uses: actions/checkout@v4
with:

@ -14,6 +14,9 @@ jobs:
image: ghcr.io/commaai/openpilot-base:latest
runs-on: ubuntu-20.04
if: github.repository == 'commaai/openpilot'
permissions:
checks: read
contents: write
steps:
- name: Install wait-on-check-action dependencies
run: |
@ -26,6 +29,7 @@ jobs:
ref: master
wait-interval: 30
running-workflow-name: 'build master-ci'
repo-token: ${{ secrets.GITHUB_TOKEN }}
check-regexp: ^((?!.*(build prebuilt).*).)*$
- uses: actions/checkout@v4
with:

@ -20,7 +20,7 @@ jobs:
stale-pr-message: 'This PR has had no activity for ${{ env.DAYS_BEFORE_PR_STALE }} days. It will be automatically closed in ${{ env.DAYS_BEFORE_PR_CLOSE }} days if there is no activity.'
close-pr-message: 'This PR has been automatically closed due to inactivity. Feel free to re-open once activity resumes.'
delete-branch: ${{ github.event.pull_request.head.repo.full_name == 'commaai/openpilot' }} # only delete branches on the main repo
exempt-pr-labels: "ignore stale,needs testing" # if wip or it needs testing from the community, don't mark as stale
exempt-pr-labels: "ignore stale,needs testing,car port" # if wip or it needs testing from the community, don't mark as stale
days-before-pr-stale: ${{ env.DAYS_BEFORE_PR_STALE }}
days-before-pr-close: ${{ env.DAYS_BEFORE_PR_CLOSE }}

@ -39,10 +39,12 @@ repos:
entry: mypy
language: system
types: [python]
args: ['--explicit-package-bases', '--local-partial-types']
args:
- --local-partial-types
- --explicit-package-bases
exclude: '^(third_party/)|(cereal/)|(opendbc/)|(panda/)|(rednose/)|(rednose_repo/)|(tinygrad/)|(tinygrad_repo/)|(teleoprtc/)|(teleoprtc_repo/)|(xx/)'
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.1.6
rev: v0.1.11
hooks:
- id: ruff
exclude: '^(third_party/)|(cereal/)|(panda/)|(rednose/)|(rednose_repo/)|(tinygrad/)|(tinygrad_repo/)|(teleoprtc/)|(teleoprtc_repo/)'
@ -87,7 +89,7 @@ repos:
args:
- --lock
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.27.2
rev: 0.27.3
hooks:
- id: check-github-workflows
# - repo: local

@ -7,7 +7,7 @@ Version 0.9.6 (20XX-XX-XX)
* comma body streaming and controls over WebRTC
* Hyundai Staria 2023 support thanks to sunnyhaibin!
* Kia Niro Plug-in Hybrid 2022 support thanks to sunnyhaibin!
* Toyota RAV4 2023 support
* Toyota RAV4 2023-24 support
* Toyota RAV4 Hybrid 2023-24 support
Version 0.9.5 (2023-11-17)

@ -9,6 +9,9 @@ import SCons.Errors
SCons.Warnings.warningAsException(True)
# pending upstream fix - https://github.com/SCons/scons/issues/4461
#SetOption('warn', 'all')
TICI = os.path.isfile('/TICI')
AGNOS = TICI

@ -1 +1 @@
Subproject commit e29625c30bb2a4a34ce21134d0e5a91b2d4fa1b1
Subproject commit d81d86e7cd83d1eb40314964a4d194231381d557

@ -199,7 +199,7 @@ std::unordered_map<std::string, uint32_t> keys = {
{"UbloxAvailable", PERSISTENT},
{"UpdateAvailable", CLEAR_ON_MANAGER_START | CLEAR_ON_ONROAD_TRANSITION},
{"UpdateFailedCount", CLEAR_ON_MANAGER_START},
{"UpdaterAvailableBranches", CLEAR_ON_MANAGER_START},
{"UpdaterAvailableBranches", PERSISTENT},
{"UpdaterCurrentDescription", CLEAR_ON_MANAGER_START},
{"UpdaterCurrentReleaseNotes", CLEAR_ON_MANAGER_START},
{"UpdaterFetchAvailable", CLEAR_ON_MANAGER_START},

@ -1,45 +0,0 @@
import time
class Profiler():
def __init__(self, enabled=False):
self.enabled = enabled
self.cp = {}
self.cp_ignored = []
self.iter = 0
self.start_time = time.time()
self.last_time = self.start_time
self.tot = 0.
def reset(self, enabled=False):
self.enabled = enabled
self.cp = {}
self.cp_ignored = []
self.iter = 0
self.start_time = time.time()
self.last_time = self.start_time
def checkpoint(self, name, ignore=False):
# ignore flag needed when benchmarking threads with ratekeeper
if not self.enabled:
return
tt = time.time()
if name not in self.cp:
self.cp[name] = 0.
if ignore:
self.cp_ignored.append(name)
self.cp[name] += tt - self.last_time
if not ignore:
self.tot += tt - self.last_time
self.last_time = tt
def display(self):
if not self.enabled:
return
self.iter += 1
print("******* Profiling %d *******" % self.iter)
for n, ms in sorted(self.cp.items(), key=lambda x: -x[1]):
if n in self.cp_ignored:
print("%30s: %9.2f avg: %7.2f percent: %3.0f IGNORED" % (n, ms*1000.0, ms*1000.0/self.iter, ms/self.tot*100))
else:
print("%30s: %9.2f avg: %7.2f percent: %3.0f" % (n, ms*1000.0, ms*1000.0/self.iter, ms/self.tot*100))
print(f"Iter clock: {self.tot / self.iter:2.6f} TOTAL: {self.tot:2.2f}")

@ -240,7 +240,7 @@ A supported vehicle is one that just works when you install a comma device. All
|Toyota|RAV4 2017-18|All|openpilot available[<sup>2</sup>](#footnotes)|19 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-empty.svg)](##)|<details><summary>Parts</summary><sub>- 1 RJ45 cable (7 ft)<br>- 1 Toyota A connector<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Toyota&model=RAV4 2017-18">Buy Here</a></sub></details>||
|Toyota|RAV4 2019-21|All|openpilot|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 RJ45 cable (7 ft)<br>- 1 Toyota A connector<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Toyota&model=RAV4 2019-21">Buy Here</a></sub></details>|<a href="https://www.youtube.com/watch?v=wJxjDd42gGA" target="_blank"><img height="18px" src="assets/icon-youtube.svg"></img></a>|
|Toyota|RAV4 2022|All|Stock|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-empty.svg)](##)|<details><summary>Parts</summary><sub>- 1 RJ45 cable (7 ft)<br>- 1 Toyota A connector<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Toyota&model=RAV4 2022">Buy Here</a></sub></details>||
|Toyota|RAV4 2023|All|Stock|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-empty.svg)](##)|<details><summary>Parts</summary><sub>- 1 RJ45 cable (7 ft)<br>- 1 Toyota A connector<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Toyota&model=RAV4 2023">Buy Here</a></sub></details>||
|Toyota|RAV4 2023-24|All|Stock|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-empty.svg)](##)|<details><summary>Parts</summary><sub>- 1 RJ45 cable (7 ft)<br>- 1 Toyota A connector<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Toyota&model=RAV4 2023-24">Buy Here</a></sub></details>||
|Toyota|RAV4 Hybrid 2016|Toyota Safety Sense P|openpilot available[<sup>2</sup>](#footnotes)|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 RJ45 cable (7 ft)<br>- 1 Toyota A connector<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Toyota&model=RAV4 Hybrid 2016">Buy Here</a></sub></details>|<a href="https://youtu.be/LhT5VzJVfNI?t=26" target="_blank"><img height="18px" src="assets/icon-youtube.svg"></img></a>|
|Toyota|RAV4 Hybrid 2017-18|All|openpilot available[<sup>2</sup>](#footnotes)|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 RJ45 cable (7 ft)<br>- 1 Toyota A connector<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Toyota&model=RAV4 Hybrid 2017-18">Buy Here</a></sub></details>|<a href="https://youtu.be/LhT5VzJVfNI?t=26" target="_blank"><img height="18px" src="assets/icon-youtube.svg"></img></a>|
|Toyota|RAV4 Hybrid 2019-21|All|openpilot|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 RJ45 cable (7 ft)<br>- 1 Toyota A connector<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Toyota&model=RAV4 Hybrid 2019-21">Buy Here</a></sub></details>||

@ -1 +1 @@
Subproject commit 20722a59467fc7d697bfe1edd233a38bafbc89d2
Subproject commit 2a0536c63148a02add52555386b5533f3555ef58

1884
poetry.lock generated

File diff suppressed because it is too large Load Diff

@ -125,7 +125,7 @@ inputs = "*"
Jinja2 = "*"
lru-dict = "*"
matplotlib = "*"
metadrive-simulator = { git = "https://github.com/metadriverse/metadrive.git", rev ="72e842cd1d025bf676e4af8797a01e4aa282109f", markers = "platform_machine != 'aarch64'" } # no linux/aarch64 wheels for certain dependencies
metadrive-simulator = { git = "https://github.com/metadriverse/metadrive.git", rev ="main", markers = "platform_machine != 'aarch64'" } # no linux/aarch64 wheels for certain dependencies
mpld3 = "*"
mypy = "*"
myst-parser = "*"

@ -291,11 +291,8 @@ system/camerad/main.cc
system/camerad/snapshot/*
system/camerad/cameras/camera_common.h
system/camerad/cameras/camera_common.cc
system/camerad/sensors/ar0231.cc
system/camerad/sensors/ar0231_registers.h
system/camerad/sensors/ox03c10.cc
system/camerad/sensors/ox03c10_registers.h
system/camerad/sensors/sensor.h
system/camerad/sensors/*.h
system/camerad/sensors/*.cc
selfdrive/manager/__init__.py
selfdrive/manager/build.py

@ -637,6 +637,8 @@ def ws_proxy_recv(ws: WebSocket, local_sock: socket.socket, ssock: socket.socket
while not (end_event.is_set() or global_end_event.is_set()):
try:
data = ws.recv()
if isinstance(data, str):
data = data.encode("utf-8")
local_sock.sendall(data)
except WebSocketTimeoutException:
pass
@ -728,10 +730,11 @@ def ws_manage(ws: WebSocket, end_event: threading.Event) -> None:
if onroad != onroad_prev:
onroad_prev = onroad
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_USER_TIMEOUT, 16000 if onroad else 0)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 7 if onroad else 30)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 7 if onroad else 10)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 2 if onroad else 3)
if sock is not None:
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_USER_TIMEOUT, 16000 if onroad else 0)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPIDLE, 7 if onroad else 30)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPINTVL, 7 if onroad else 10)
sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_KEEPCNT, 2 if onroad else 3)
if end_event.wait(5):
break

@ -1,63 +1,3 @@
# selfdrive/car
Check out [this blog post](https://blog.comma.ai/how-to-write-a-car-port-for-openpilot/) for a high-level overview of porting a car.
## Useful car porting utilities
Testing car ports in your car is very time-consuming. Check out these utilities to do basic checks on your work before running it in your car.
### [Cabana](/tools/cabana/README.md)
View your car's CAN signals through DBC files, which openpilot uses to parse and create messages that talk to the car.
Example:
```bash
> tools/cabana/cabana '1bbe6bf2d62f58a8|2022-07-14--17-11-43'
```
### [selfdrive/debug/auto_fingerprint.py](/selfdrive/debug/auto_fingerprint.py)
Given a route and platform, automatically inserts FW fingerprints from the platform into the correct place in values.py
Example:
```bash
> python selfdrive/debug/auto_fingerprint.py '1bbe6bf2d62f58a8|2022-07-14--17-11-43' 'SUBARU OUTBACK 6TH GEN'
Attempting to add fw version for: SUBARU OUTBACK 6TH GEN
```
### [selfdrive/car/tests/test_car_interfaces.py](/selfdrive/car/tests/test_car_interfaces.py)
Finds common bugs for car interfaces, without even requiring a route.
#### Example: Typo in signal name
```bash
> pytest selfdrive/car/tests/test_car_interfaces.py -k subaru # replace with the brand you are working on
=====================================================================
FAILED selfdrive/car/tests/test_car_interfaces.py::TestCarInterfaces::test_car_interfaces_165_SUBARU_LEGACY_7TH_GEN - KeyError: 'CruiseControlOOPS'
```
### [selfdrive/debug/test_car_model.py](/selfdrive/debug/test_car_model.py)
Given a route, runs most of the car interface to check for common errors like missing signals, blocked panda messages, and safety mismatches.
#### Example: panda safety mismatch for gasPressed
```bash
> python selfdrive/debug/test_car_model.py '4822a427b188122a|2023-08-14--16-22-21'
=====================================================================
FAIL: test_panda_safety_carstate (__main__.CarModelTestCase.test_panda_safety_carstate)
Assert that panda safety matches openpilot's carState
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/batman/xx/openpilot/openpilot/selfdrive/car/tests/test_models.py", line 380, in test_panda_safety_carstate
self.assertFalse(len(failed_checks), f"panda safety doesn't agree with openpilot: {failed_checks}")
AssertionError: 1 is not false : panda safety doesn't agree with openpilot: {'gasPressed': 116}
```
## Car port structure
### interface.py

@ -244,10 +244,13 @@ class CarInfo:
# all the parts needed for the supported car
car_parts: CarParts = field(default_factory=CarParts)
def __post_init__(self):
self.make, self.model, self.years = split_name(self.name)
self.year_list = get_year_list(self.years)
def init(self, CP: car.CarParams, all_footnotes: Dict[Enum, int]):
self.car_name = CP.carName
self.car_fingerprint = CP.carFingerprint
self.make, self.model, self.years = split_name(self.name)
# longitudinal column
op_long = "Stock"
@ -309,7 +312,6 @@ class CarInfo:
self.row[Column.STEERING_TORQUE] = Star.FULL
self.all_footnotes = all_footnotes
self.year_list = get_year_list(self.years)
self.detail_sentence = self.get_detail_sentence(CP)
return self

@ -154,21 +154,6 @@ class CarController:
if self.frame % 10 == 0:
can_sends.append(gmcan.create_pscm_status(self.packer_pt, CanBus.CAMERA, CS.pscm_status))
# Show green icon when LKA torque is applied, and
# alarming orange icon when approaching torque limit.
# If not sent again, LKA icon disappears in about 5 seconds.
# Conveniently, sending camera message periodically also works as a keepalive.
lka_active = CS.lkas_status == 1
lka_critical = lka_active and abs(actuators.steer) > 0.9
lka_icon_status = (lka_active, lka_critical)
# SW_GMLAN not yet on cam harness, no HUD alerts
if self.CP.networkLocation != NetworkLocation.fwdCamera and \
(self.frame % self.params.CAMERA_KEEPALIVE_STEP == 0 or lka_icon_status != self.lka_icon_status_last):
steer_alert = hud_alert in (VisualAlert.steerRequired, VisualAlert.ldw)
can_sends.append(gmcan.create_lka_icon_command(CanBus.SW_GMLAN, lka_active, lka_critical, steer_alert))
self.lka_icon_status_last = lka_icon_status
new_actuators = actuators.copy()
new_actuators.steer = self.apply_steer_last / self.params.STEER_MAX
new_actuators.steerOutputCan = self.apply_steer_last

@ -139,7 +139,6 @@ class CanBus:
OBSTACLE = 1
CAMERA = 2
CHASSIS = 2
SW_GMLAN = 3
LOOPBACK = 128
DROPPED = 192

@ -744,9 +744,6 @@ FW_VERSIONS = {
b'78109-TPG-A110\x00\x00',
b'78109-TPG-A210\x00\x00',
],
(Ecu.hud, 0x18da61f1, None): [
b'78209-TLA-X010\x00\x00',
],
(Ecu.fwdRadar, 0x18dab0f1, None): [
b'36802-TMB-H040\x00\x00',
b'36802-TPA-E040\x00\x00',

@ -122,7 +122,7 @@ class CarInterface(CarInterfaceBase):
ret.openpilotLongitudinalControl = experimental_long and ret.experimentalLongitudinalAvailable
if candidate in GLOBAL_GEN2 and ret.openpilotLongitudinalControl:
ret.flags |= SubaruFlags.DISABLE_EYESIGHT
ret.flags |= SubaruFlags.DISABLE_EYESIGHT.value
if ret.openpilotLongitudinalControl:
ret.longitudinalTuning.kpBP = [0., 5., 35.]

@ -95,8 +95,8 @@ if __name__ == "__main__":
_jerks["down_jerk"] > MAX_LAT_JERK_DOWN
violation_str = " - VIOLATION" if violation else ""
print(f"{car_model:{max_car_model_len}} - up jerk: {round(_jerks['up_jerk'], 2):5} \
m/s^3, down jerk: {round(_jerks['down_jerk'], 2):5} m/s^3{violation_str}")
print(f"{car_model:{max_car_model_len}} - up jerk: {round(_jerks['up_jerk'], 2):5} " +
f"m/s^3, down jerk: {round(_jerks['down_jerk'], 2):5} m/s^3{violation_str}")
# exit with test result
sys.exit(not result.result.wasSuccessful())

@ -21,6 +21,7 @@ from openpilot.selfdrive.car.car_helpers import FRAME_FINGERPRINT, interfaces
from openpilot.selfdrive.car.honda.values import CAR as HONDA, HONDA_BOSCH
from openpilot.selfdrive.car.tests.routes import non_tested_cars, routes, CarTestRoute
from openpilot.selfdrive.controls.controlsd import Controls
from openpilot.selfdrive.test.helpers import read_segment_list
from openpilot.selfdrive.test.openpilotci import get_url
from openpilot.tools.lib.logreader import LogReader
from openpilot.tools.lib.route import Route, SegmentName, RouteName
@ -52,12 +53,9 @@ def get_test_cases() -> List[Tuple[str, Optional[CarTestRoute]]]:
test_cases.extend(sorted((c.value, r) for r in routes_by_car.get(c, (None,))))
else:
with open(os.path.join(BASEDIR, INTERNAL_SEG_LIST), "r") as f:
seg_list = f.read().splitlines()
seg_list_grouped = [(platform[2:], segment) for platform, segment in zip(seg_list[::2], seg_list[1::2], strict=True)]
seg_list_grouped = random.sample(seg_list_grouped, INTERNAL_SEG_CNT or len(seg_list_grouped))
for platform, segment in seg_list_grouped:
segment_list = read_segment_list(os.path.join(BASEDIR, INTERNAL_SEG_LIST))
segment_list = random.sample(segment_list, INTERNAL_SEG_CNT or len(segment_list))
for platform, segment in segment_list:
segment_name = SegmentName(segment)
test_cases.append((platform, CarTestRoute(segment_name.route_name.canonical_name, platform,
segment=segment_name.segment_num)))

@ -145,6 +145,7 @@ FW_VERSIONS = {
b'8821F0601200 ',
b'8821F0601300 ',
b'8821F0601400 ',
b'8821F0601500 ',
b'8821F0602000 ',
b'8821F0603300 ',
b'8821F0603400 ',
@ -189,6 +190,7 @@ FW_VERSIONS = {
b'8821F0601200 ',
b'8821F0601300 ',
b'8821F0601400 ',
b'8821F0601500 ',
b'8821F0602000 ',
b'8821F0603300 ',
b'8821F0603400 ',
@ -736,6 +738,7 @@ FW_VERSIONS = {
b'\x018966353Q4000\x00\x00\x00\x00',
b'\x018966353R1100\x00\x00\x00\x00',
b'\x018966353R7100\x00\x00\x00\x00',
b'\x018966353R8000\x00\x00\x00\x00',
b'\x018966353R8100\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
@ -1134,6 +1137,7 @@ FW_VERSIONS = {
CAR.RAV4_TSS2_2023: {
(Ecu.abs, 0x7b0, None): [
b'\x01F15260R450\x00\x00\x00\x00\x00\x00',
b'\x01F15260R51000\x00\x00\x00\x00',
b'\x01F15264283200\x00\x00\x00\x00',
b'\x01F15264283300\x00\x00\x00\x00',
b'\x01F152642F1000\x00\x00\x00\x00',
@ -1149,6 +1153,7 @@ FW_VERSIONS = {
b'\x01896634AE1001\x00\x00\x00\x00',
b'\x01896634AF0000\x00\x00\x00\x00',
b'\x01896634AJ2000\x00\x00\x00\x00',
b'\x01896634AL5000\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F0R03100\x00\x00\x00\x00',
@ -1407,6 +1412,7 @@ FW_VERSIONS = {
CAR.LEXUS_RC: {
(Ecu.engine, 0x700, None): [
b'\x01896632461100\x00\x00\x00\x00',
b'\x01896632478100\x00\x00\x00\x00',
b'\x01896632478200\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [

@ -180,7 +180,7 @@ CAR_INFO: Dict[str, Union[ToyotaCarInfo, List[ToyotaCarInfo]]] = {
ToyotaCarInfo("Toyota RAV4 Hybrid 2022", video_link="https://youtu.be/U0nH9cnrFB0"),
],
CAR.RAV4_TSS2_2023: [
ToyotaCarInfo("Toyota RAV4 2023"),
ToyotaCarInfo("Toyota RAV4 2023-24"),
ToyotaCarInfo("Toyota RAV4 Hybrid 2023-24"),
],
CAR.MIRAI: ToyotaCarInfo("Toyota Mirai 2021"),

@ -5,7 +5,7 @@ from openpilot.common.conversions import Conversions as CV
from openpilot.common.realtime import DT_CTRL
from openpilot.selfdrive.car import apply_driver_steer_torque_limits
from openpilot.selfdrive.car.volkswagen import mqbcan, pqcan
from openpilot.selfdrive.car.volkswagen.values import CANBUS, PQ_CARS, CarControllerParams
from openpilot.selfdrive.car.volkswagen.values import CANBUS, PQ_CARS, CarControllerParams, VolkswagenFlags
VisualAlert = car.CarControl.HUDControl.VisualAlert
LongCtrlState = car.CarControl.Actuators.LongControlState
@ -65,13 +65,22 @@ class CarController:
self.apply_steer_last = apply_steer
can_sends.append(self.CCS.create_steering_control(self.packer_pt, CANBUS.pt, apply_steer, hca_enabled))
if self.CP.flags & VolkswagenFlags.STOCK_HCA_PRESENT:
# Pacify VW Emergency Assist driver inactivity detection by changing its view of driver steering input torque
# to the greatest of actual driver input or 2x openpilot's output (1x openpilot output is not enough to
# consistently reset inactivity detection on straight level roads). See commaai/openpilot#23274 for background.
ea_simulated_torque = clip(apply_steer * 2, -self.CCP.STEER_MAX, self.CCP.STEER_MAX)
if abs(CS.out.steeringTorque) > abs(ea_simulated_torque):
ea_simulated_torque = CS.out.steeringTorque
can_sends.append(self.CCS.create_eps_update(self.packer_pt, CANBUS.cam, CS.eps_stock_values, ea_simulated_torque))
# **** Acceleration Controls ******************************************** #
if self.frame % self.CCP.ACC_CONTROL_STEP == 0 and self.CP.openpilotLongitudinalControl:
acc_control = self.CCS.acc_control_value(CS.out.cruiseState.available, CS.out.accFaulted, CC.longActive)
accel = clip(actuators.accel, self.CCP.ACCEL_MIN, self.CCP.ACCEL_MAX) if CC.longActive else 0
stopping = actuators.longControlState == LongCtrlState.stopping
starting = actuators.longControlState == LongCtrlState.starting
starting = actuators.longControlState == LongCtrlState.pid and (CS.esp_hold_confirmation or CS.out.vEgo < self.CP.vEgoStopping)
can_sends.extend(self.CCS.create_acc_accel_control(self.packer_pt, CANBUS.pt, CS.acc_type, CC.longActive, accel,
acc_control, stopping, starting, CS.esp_hold_confirmation))

@ -4,7 +4,7 @@ from openpilot.common.conversions import Conversions as CV
from openpilot.selfdrive.car.interfaces import CarStateBase
from opendbc.can.parser import CANParser
from openpilot.selfdrive.car.volkswagen.values import DBC, CANBUS, PQ_CARS, NetworkLocation, TransmissionType, GearShifter, \
CarControllerParams
CarControllerParams, VolkswagenFlags
class CarState(CarStateBase):
@ -14,6 +14,7 @@ class CarState(CarStateBase):
self.button_states = {button.event_type: False for button in self.CCP.BUTTONS}
self.esp_hold_confirmation = False
self.upscale_lead_car_signal = False
self.eps_stock_values = False
def create_button_events(self, pt_cp, buttons):
button_events = []
@ -59,6 +60,11 @@ class CarState(CarStateBase):
ret.steerFaultPermanent = hca_status in ("DISABLED", "FAULT")
ret.steerFaultTemporary = hca_status in ("INITIALIZING", "REJECTED")
# VW Emergency Assist status tracking and mitigation
self.eps_stock_values = pt_cp.vl["LH_EPS_03"]
if self.CP.flags & VolkswagenFlags.STOCK_HCA_PRESENT:
ret.carFaultedNonCritical = bool(cam_cp.vl["HCA_01"]["EA_Ruckfreigabe"]) or cam_cp.vl["HCA_01"]["EA_ACC_Sollstatus"] > 0
# Update gas, brakes, and gearshift.
ret.gas = pt_cp.vl["Motor_20"]["MO_Fahrpedalrohwert_01"] / 100.0
ret.gasPressed = ret.gas > 0
@ -293,6 +299,11 @@ class CarState(CarStateBase):
messages = []
if CP.flags & VolkswagenFlags.STOCK_HCA_PRESENT:
messages += [
("HCA_01", 1), # From R242 Driver assistance camera, 50Hz if steering/1Hz if not
]
if CP.networkLocation == NetworkLocation.fwdCamera:
messages += [
# sig_address, frequency

@ -540,6 +540,7 @@ FW_VERSIONS = {
b'\xf1\x875NA907115E \xf1\x890003',
b'\xf1\x875NA907115E \xf1\x890005',
b'\xf1\x875NA907115J \xf1\x890002',
b'\xf1\x875NA907115K \xf1\x890004',
b'\xf1\x8783A907115 \xf1\x890007',
b'\xf1\x8783A907115B \xf1\x890005',
b'\xf1\x8783A907115F \xf1\x890002',
@ -570,6 +571,7 @@ FW_VERSIONS = {
b'\xf1\x870GC300046Q \xf1\x892802',
],
(Ecu.srs, 0x715, None): [
b'\xf1\x875Q0959655AG\xf1\x890336\xf1\x82\x1316143231313500314617011730179333423100',
b'\xf1\x875Q0959655AG\xf1\x890338\xf1\x82\x1316143231313500314617011730179333423100',
b'\xf1\x875Q0959655AR\xf1\x890317\xf1\x82\x1331310031333334313132573732379333313100',
b'\xf1\x875Q0959655BJ\xf1\x890336\xf1\x82\x1312110031333300314232583732379333423100',
@ -587,6 +589,7 @@ FW_VERSIONS = {
(Ecu.eps, 0x712, None): [
b'\xf1\x875Q0909143M \xf1\x892041\xf1\x820529A6060603',
b'\xf1\x875Q0909143P \xf1\x892051\xf1\x820527A6050705',
b'\xf1\x875Q0909143P \xf1\x892051\xf1\x820527A6070705',
b'\xf1\x875Q0909144AB\xf1\x891082\xf1\x82\x0521A60604A1',
b'\xf1\x875Q0910143C \xf1\x892211\xf1\x82\x0567A6000600',
b'\xf1\x875Q0910143C \xf1\x892211\xf1\x82\x0567A6017A00',

@ -3,7 +3,7 @@ from panda import Panda
from openpilot.common.conversions import Conversions as CV
from openpilot.selfdrive.car import get_safety_config
from openpilot.selfdrive.car.interfaces import CarInterfaceBase
from openpilot.selfdrive.car.volkswagen.values import CAR, PQ_CARS, CANBUS, NetworkLocation, TransmissionType, GearShifter
from openpilot.selfdrive.car.volkswagen.values import CAR, PQ_CARS, CANBUS, NetworkLocation, TransmissionType, GearShifter, VolkswagenFlags
ButtonType = car.CarState.ButtonEvent.Type
EventName = car.CarEvent.EventName
@ -67,6 +67,9 @@ class CarInterface(CarInterfaceBase):
else:
ret.networkLocation = NetworkLocation.fwdCamera
if 0x126 in fingerprint[2]: # HCA_01
ret.flags |= VolkswagenFlags.STOCK_HCA_PRESENT.value
# Global lateral tuning defaults, can be overridden per-vehicle
ret.steerActuatorDelay = 0.1
@ -90,11 +93,9 @@ class CarInterface(CarInterfaceBase):
ret.pcmCruise = not ret.openpilotLongitudinalControl
ret.stoppingControl = True
ret.startingState = True
ret.startAccel = 1.0
ret.stopAccel = -0.55
ret.vEgoStarting = 1.0
ret.vEgoStopping = 1.0
ret.vEgoStarting = 0.1
ret.vEgoStopping = 0.5
ret.longitudinalTuning.kpV = [0.1]
ret.longitudinalTuning.kiV = [0.0]

@ -10,6 +10,24 @@ def create_steering_control(packer, bus, apply_steer, lkas_enabled):
return packer.make_can_msg("HCA_01", bus, values)
def create_eps_update(packer, bus, eps_stock_values, ea_simulated_torque):
values = {s: eps_stock_values[s] for s in [
"COUNTER", # Sync counter value to EPS output
"EPS_Lenkungstyp", # EPS rack type
"EPS_Berechneter_LW", # Absolute raw steering angle
"EPS_VZ_BLW", # Raw steering angle sign
"EPS_HCA_Status", # EPS HCA control status
]}
values.update({
# Absolute driver torque input and sign, with EA inactivity mitigation
"EPS_Lenkmoment": abs(ea_simulated_torque),
"EPS_VZ_Lenkmoment": 1 if ea_simulated_torque < 0 else 0,
})
return packer.make_can_msg("LH_EPS_03", bus, values)
def create_lka_hud_control(packer, bus, ldw_stock_values, enabled, steering_pressed, hud_alert, hud_control):
values = {}
if len(ldw_stock_values):
@ -94,7 +112,7 @@ def create_acc_accel_control(packer, bus, acc_type, acc_enabled, accel, acc_cont
acc_hold_type = 0
acc_07_values = {
"ACC_Anhalteweg": 0.75 if stopping else 20.46, # Distance to stop (stopping coordinator handles terminal roll-out)
"ACC_Anhalteweg": 0.3 if stopping else 20.46, # Distance to stop (stopping coordinator handles terminal roll-out)
"ACC_Freilauf_Info": 2 if acc_enabled else 0,
"ACC_Folgebeschl": 3.02, # Not using secondary controller accel unless and until we understand its impact
"ACC_Sollbeschleunigung_02": accel if acc_enabled else 3.01,

@ -1,6 +1,6 @@
from collections import defaultdict, namedtuple
from dataclasses import dataclass, field
from enum import Enum, StrEnum
from enum import Enum, IntFlag, StrEnum
from typing import Dict, List, Union
from cereal import car
@ -109,6 +109,10 @@ class CANBUS:
cam = 2
class VolkswagenFlags(IntFlag):
STOCK_HCA_PRESENT = 1
# Check the 7th and 8th characters of the VIN before adding a new CAR. If the
# chassis code is already listed below, don't add a new CAR, just add to the
# FW_VERSIONS for that existing CAR.

@ -8,7 +8,6 @@ from typing import SupportsFloat
from cereal import car, log
from openpilot.common.numpy_fast import clip
from openpilot.common.realtime import config_realtime_process, Priority, Ratekeeper, DT_CTRL
from openpilot.common.profiler import Profiler
from openpilot.common.params import Params
import cereal.messaging as messaging
from cereal.visionipc import VisionIpcClient, VisionStreamType
@ -195,7 +194,6 @@ class Controls:
# controlsd is driven by can recv, expected at 100Hz
self.rk = Ratekeeper(100, print_delay_threshold=None)
self.prof = Profiler(False) # off by default
def set_initial_state(self):
if REPLAY:
@ -851,12 +849,10 @@ class Controls:
def step(self):
start_time = time.monotonic()
self.prof.checkpoint("Ratekeeper", ignore=True)
# Sample data from sockets and get a carState
CS = self.data_sample()
cloudlog.timestamp("Data sampled")
self.prof.checkpoint("Sample")
self.update_events(CS)
cloudlog.timestamp("Events updated")
@ -864,16 +860,12 @@ class Controls:
if not self.CP.passive and self.initialized:
# Update control state
self.state_transition(CS)
self.prof.checkpoint("State transition")
# Compute actuators (runs PID loops and lateral MPC)
CC, lac_log = self.state_control(CS)
self.prof.checkpoint("State Control")
# Publish data
self.publish_logs(CS, start_time, CC, lac_log)
self.prof.checkpoint("Sent")
self.CS_prev = CS
@ -893,7 +885,6 @@ class Controls:
while True:
self.step()
self.rk.monitor_time()
self.prof.display()
except SystemExit:
e.set()
t.join()

@ -47,13 +47,14 @@ def limit_accel_in_turns(v_ego, angle_steers, a_target, CP):
class LongitudinalPlanner:
def __init__(self, CP, init_v=0.0, init_a=0.0):
def __init__(self, CP, init_v=0.0, init_a=0.0, dt=DT_MDL):
self.CP = CP
self.mpc = LongitudinalMpc()
self.fcw = False
self.dt = dt
self.a_desired = init_a
self.v_desired_filter = FirstOrderFilter(init_v, 2.0, DT_MDL)
self.v_desired_filter = FirstOrderFilter(init_v, 2.0, self.dt)
self.v_model_error = 0.0
self.v_desired_trajectory = np.zeros(CONTROL_N)
@ -148,8 +149,8 @@ class LongitudinalPlanner:
# Interpolate 0.05 seconds and save as starting point for next iteration
a_prev = self.a_desired
self.a_desired = float(interp(DT_MDL, ModelConstants.T_IDXS[:CONTROL_N], self.a_desired_trajectory))
self.v_desired_filter.x = self.v_desired_filter.x + DT_MDL * (self.a_desired + a_prev) / 2.0
self.a_desired = float(interp(self.dt, ModelConstants.T_IDXS[:CONTROL_N], self.a_desired_trajectory))
self.v_desired_filter.x = self.v_desired_filter.x + self.dt * (self.a_desired + a_prev) / 2.0
def publish(self, sm, pm):
plan_send = messaging.new_message('longitudinalPlan')

@ -3,10 +3,11 @@ import argparse
import binascii
import time
from collections import defaultdict
from typing import Optional
import cereal.messaging as messaging
from openpilot.selfdrive.debug.can_table import can_table
from openpilot.tools.lib.logreader import logreader_from_route_or_segment
from openpilot.tools.lib.logreader import LogIterable, LogReader
RED = '\033[91m'
CLEAR = '\033[0m'
@ -95,13 +96,15 @@ if __name__ == "__main__":
args = parser.parse_args()
init_lr, new_lr = None, None
init_lr: Optional[LogIterable] = None
new_lr: Optional[LogIterable] = None
if args.init:
if args.init == '':
init_lr = []
else:
init_lr = logreader_from_route_or_segment(args.init)
init_lr = LogReader(args.init)
if args.comp:
new_lr = logreader_from_route_or_segment(args.comp)
new_lr = LogReader(args.comp)
can_printer(args.bus, init_msgs=init_lr, new_msgs=new_lr, table=args.table)

@ -4,16 +4,12 @@ import math
import datetime
from collections import Counter
from pprint import pprint
from tqdm import tqdm
from typing import List, Tuple, cast
from cereal.services import SERVICE_LIST
from openpilot.tools.lib.route import Route
from openpilot.tools.lib.logreader import LogReader
from openpilot.tools.lib.logreader import LogReader, ReadMode
if __name__ == "__main__":
r = Route(sys.argv[1])
cnt_valid: Counter = Counter()
cnt_events: Counter = Counter()
@ -24,31 +20,29 @@ if __name__ == "__main__":
start_time = math.inf
end_time = -math.inf
ignition_off = None
for q in tqdm(r.qlog_paths()):
if q is None:
continue
lr = list(LogReader(q))
for msg in lr:
end_time = max(end_time, msg.logMonoTime)
start_time = min(start_time, msg.logMonoTime)
for msg in LogReader(sys.argv[1], ReadMode.QLOG):
end_time = max(end_time, msg.logMonoTime)
start_time = min(start_time, msg.logMonoTime)
if msg.which() == 'onroadEvents':
for e in msg.onroadEvents:
cnt_events[e.name] += 1
elif msg.which() == 'controlsState':
if len(alerts) == 0 or alerts[-1][1] != msg.controlsState.alertType:
if msg.which() == 'onroadEvents':
for e in msg.onroadEvents:
cnt_events[e.name] += 1
elif msg.which() == 'controlsState':
at = msg.controlsState.alertType
if "/override" not in at or "lanechange" in at.lower():
if len(alerts) == 0 or alerts[-1][1] != at:
t = (msg.logMonoTime - start_time) / 1e9
alerts.append((t, msg.controlsState.alertType))
elif msg.which() == 'pandaStates':
if ignition_off is None:
ign = any(ps.ignitionLine or ps.ignitionCan for ps in msg.pandaStates)
if not ign:
ignition_off = msg.logMonoTime
elif msg.which() in cams:
cnt_cameras[msg.which()] += 1
alerts.append((t, at))
elif msg.which() == 'pandaStates':
if ignition_off is None:
ign = any(ps.ignitionLine or ps.ignitionCan for ps in msg.pandaStates)
if not ign:
ignition_off = msg.logMonoTime
elif msg.which() in cams:
cnt_cameras[msg.which()] += 1
if not msg.valid:
cnt_valid[msg.which()] += 1
if not msg.valid:
cnt_valid[msg.which()] += 1
duration = (end_time - start_time) / 1e9

@ -1,11 +1,9 @@
#!/usr/bin/env python3
import os
import argparse
import json
import cereal.messaging as messaging
from openpilot.tools.lib.logreader import LogReader
from openpilot.tools.lib.route import Route
LEVELS = {
"DEBUG": 10,
@ -53,31 +51,18 @@ if __name__ == "__main__":
parser.add_argument("route", type=str, nargs='*', help="route name + segment number for offline usage")
args = parser.parse_args()
logs = None
if len(args.route):
if os.path.exists(args.route[0]):
logs = [args.route[0]]
else:
r = Route(args.route[0])
logs = [q_log if r_log is None else r_log for (q_log, r_log) in zip(r.qlog_paths(), r.log_paths(), strict=True)]
if len(args.route) == 2 and logs:
n = int(args.route[1])
logs = [logs[n]]
min_level = LEVELS[args.level]
if logs:
for log in logs:
if log:
lr = LogReader(log)
for m in lr:
if m.which() == 'logMessage':
print_logmessage(m.logMonoTime, m.logMessage, min_level)
elif m.which() == 'errorLogMessage' and 'qlog' in log:
print_logmessage(m.logMonoTime, m.errorLogMessage, min_level)
elif m.which() == 'androidLog':
print_androidlog(m.logMonoTime, m.androidLog)
if args.route:
for route in args.route:
lr = LogReader(route)
for m in lr:
if m.which() == 'logMessage':
print_logmessage(m.logMonoTime, m.logMessage, min_level)
elif m.which() == 'errorLogMessage':
print_logmessage(m.logMonoTime, m.errorLogMessage, min_level)
elif m.which() == 'androidLog':
print_androidlog(m.logMonoTime, m.androidLog)
else:
sm = messaging.SubMaster(['logMessage', 'androidLog'], addr=args.addr)
while True:

@ -1,8 +1,7 @@
#!/usr/bin/env python3
import sys
from openpilot.tools.lib.route import Route
from openpilot.tools.lib.logreader import MultiLogIterator
from openpilot.tools.lib.logreader import LogReader, ReadMode
def get_fingerprint(lr):
@ -40,6 +39,5 @@ if __name__ == "__main__":
print("Usage: ./fingerprint_from_route.py <route>")
sys.exit(1)
route = Route(sys.argv[1])
lr = MultiLogIterator(route.log_paths()[:5])
lr = LogReader(sys.argv[1], ReadMode.QLOG)
get_fingerprint(lr)

@ -3,14 +3,12 @@
import argparse
from openpilot.selfdrive.test.process_replay.process_replay import CONFIGS, replay_process
from openpilot.tools.lib.logreader import MultiLogIterator
from openpilot.tools.lib.route import Route
from openpilot.tools.lib.helpers import save_log
from openpilot.tools.lib.logreader import LogReader
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Run process on route and create new logs",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--qlog", help="Use qlog instead of log", action="store_true")
parser.add_argument("--fingerprint", help="The fingerprint to use")
parser.add_argument("route", help="The route name to use")
parser.add_argument("process", help="The process to run")
@ -18,8 +16,7 @@ if __name__ == "__main__":
cfg = [c for c in CONFIGS if c.proc_name == args.process][0]
route = Route(args.route)
lr = MultiLogIterator(route.qlog_paths() if args.qlog else route.log_paths())
lr = LogReader(args.route)
inputs = list(lr)
outputs = replay_process(cfg, inputs, fingerprint=args.fingerprint)
@ -29,5 +26,5 @@ if __name__ == "__main__":
inputs = [i for i in inputs if i.which() not in produces]
outputs = sorted(inputs + outputs, key=lambda x: x.logMonoTime)
fn = f"{args.route}_{args.process}.bz2"
fn = f"{args.route.replace('/', '_')}_{args.process}.bz2"
save_log(fn, outputs)

@ -1,111 +0,0 @@
#!/usr/bin/env python3
'''
printing the gap between interrupts in a histogram to check if the
frequency is what we expect, the bmx is not interrupt driven for as we
get interrupts in a 2kHz rate.
'''
import argparse
import sys
import numpy as np
from collections import defaultdict
from openpilot.tools.lib.logreader import LogReader
from openpilot.tools.lib.route import Route
import matplotlib.pyplot as plt
SRC_BMX = "bmx055"
SRC_LSM = "lsm6ds3"
def parseEvents(log_reader):
bmx_data = defaultdict(list)
lsm_data = defaultdict(list)
for m in log_reader:
if m.which() not in ['accelerometer', 'gyroscope']:
continue
d = getattr(m, m.which()).to_dict()
if d["source"] == SRC_BMX and "acceleration" in d:
bmx_data["accel"].append(d["timestamp"] / 1e9)
if d["source"] == SRC_BMX and "gyroUncalibrated" in d:
bmx_data["gyro"].append(d["timestamp"] / 1e9)
if d["source"] == SRC_LSM and "acceleration" in d:
lsm_data["accel"].append(d["timestamp"] / 1e9)
if d["source"] == SRC_LSM and "gyroUncalibrated" in d:
lsm_data["gyro"].append(d["timestamp"] / 1e9)
return bmx_data, lsm_data
def cleanData(data):
if len(data) == 0:
return [], []
data.sort()
diffs = np.diff(data)
return data, diffs
def logAvgValues(data, sensor):
if len(data) == 0:
print(f"{sensor}: no data to average")
return
avg = sum(data) / len(data)
hz = 1 / avg
print(f"{sensor}: data_points: {len(data)} avg [ns]: {avg} avg [Hz]: {hz}")
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("route", type=str, help="route name")
parser.add_argument("segment", type=int, help="segment number")
args = parser.parse_args()
r = Route(args.route)
logs = r.log_paths()
if len(logs) == 0:
print("NO data routes")
sys.exit(0)
if args.segment >= len(logs):
print(f"RouteID: {args.segment} out of range, max: {len(logs) -1}")
sys.exit(0)
lr = LogReader(logs[args.segment])
bmx_data, lsm_data = parseEvents(lr)
# sort bmx accel data, and then cal all the diffs, and to a histogram of those
bmx_accel, bmx_accel_diffs = cleanData(bmx_data["accel"])
bmx_gyro, bmx_gyro_diffs = cleanData(bmx_data["gyro"])
lsm_accel, lsm_accel_diffs = cleanData(lsm_data["accel"])
lsm_gyro, lsm_gyro_diffs = cleanData(lsm_data["gyro"])
# get out the averages
logAvgValues(bmx_accel_diffs, "bmx accel")
logAvgValues(bmx_gyro_diffs, "bmx gyro ")
logAvgValues(lsm_accel_diffs, "lsm accel")
logAvgValues(lsm_gyro_diffs, "lsm gyro ")
fig, axs = plt.subplots(1, 2, tight_layout=True)
axs[0].hist(bmx_accel_diffs, bins=50)
axs[0].set_title("bmx_accel")
axs[1].hist(bmx_gyro_diffs, bins=50)
axs[1].set_title("bmx_gyro")
figl, axsl = plt.subplots(1, 2, tight_layout=True)
axsl[0].hist(lsm_accel_diffs, bins=50)
axsl[0].set_title("lsm_accel")
axsl[1].hist(lsm_gyro_diffs, bins=50)
axsl[1].set_title("lsm_gyro")
print("check plot...")
plt.show()

@ -112,8 +112,8 @@ if __name__ == "__main__":
padding = max([len(fw.brand or UNKNOWN_BRAND) for fw in car_fw])
for version in sorted(car_fw, key=lambda fw: fw.brand):
subaddr = None if version.subAddress == 0 else hex(version.subAddress)
print(f" Brand: {version.brand or UNKNOWN_BRAND:{padding}}, bus: {version.bus} - \
(Ecu.{version.ecu}, {hex(version.address)}, {subaddr}): [{version.fwVersion}],")
print(f" Brand: {version.brand or UNKNOWN_BRAND:{padding}}, bus: {version.bus} - " +
"(Ecu.{version.ecu}, {hex(version.address)}, {subaddr}): [{version.fwVersion}],")
print("Mismatches")
found = False

@ -5,8 +5,7 @@ import matplotlib.pyplot as plt
from sklearn import linear_model
from openpilot.selfdrive.car.toyota.values import STEER_THRESHOLD
from openpilot.tools.lib.route import Route
from openpilot.tools.lib.logreader import MultiLogIterator
from openpilot.tools.lib.logreader import LogReader
MIN_SAMPLES = 30 * 100
@ -58,7 +57,6 @@ def get_eps_factor(lr, plot=False):
if __name__ == "__main__":
r = Route(sys.argv[1])
lr = MultiLogIterator(r.log_paths())
lr = LogReader(sys.argv[1])
n = get_eps_factor(lr, plot="--plot" in sys.argv)
print("EPS torque factor: ", n)

@ -229,6 +229,8 @@ if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print("got CTRL-C, exiting")
except Exception:
add_file_handler(cloudlog)
cloudlog.exception("Manager failed to start")

@ -71,8 +71,11 @@ class RouteEngine:
self.ui_pid = ui_pid[0]
self.update_location()
self.recompute_route()
self.send_instruction()
try:
self.recompute_route()
self.send_instruction()
except Exception:
cloudlog.exception("navd.failed_to_compute")
def update_location(self):
location = self.sm['liveLocationKalman']
@ -256,7 +259,10 @@ class RouteEngine:
for i in range(self.step_idx + 1, len(self.route)):
total_distance += self.route[i]['distance']
total_time += self.route[i]['duration']
total_time_typical += self.route[i]['duration_typical']
if self.route[i]['duration_typical'] is None:
total_time_typical += self.route[i]['duration']
else:
total_time_typical += self.route[i]['duration_typical']
msg.navInstruction.distanceRemaining = total_distance
msg.navInstruction.timeRemaining = total_time

@ -0,0 +1,61 @@
#!/usr/bin/env python3
import json
import random
import unittest
import numpy as np
import cereal.messaging as messaging
from openpilot.common.params import Params
from openpilot.selfdrive.manager.process_config import managed_processes
class TestNavd(unittest.TestCase):
def setUp(self):
self.params = Params()
self.sm = messaging.SubMaster(['navRoute', 'navInstruction'])
def tearDown(self):
managed_processes['navd'].stop()
def _check_route(self, start, end, check_coords=True):
self.params.put("NavDestination", json.dumps(end))
self.params.put("LastGPSPosition", json.dumps(start))
managed_processes['navd'].start()
for _ in range(30):
self.sm.update(1000)
if all(f > 0 for f in self.sm.rcv_frame.values()):
break
else:
raise Exception("didn't get a route")
assert managed_processes['navd'].proc.is_alive()
managed_processes['navd'].stop()
# ensure start and end match up
if check_coords:
coords = self.sm['navRoute'].coordinates
assert np.allclose([start['latitude'], start['longitude'], end['latitude'], end['longitude']],
[coords[0].latitude, coords[0].longitude, coords[-1].latitude, coords[-1].longitude],
rtol=1e-3)
def test_simple(self):
start = {
"latitude": 32.7427228,
"longitude": -117.2321177,
}
end = {
"latitude": 32.7557004,
"longitude": -117.268002,
}
self._check_route(start, end)
def test_random(self):
for _ in range(10):
start = {"latitude": random.uniform(-90, 90), "longitude": random.uniform(-180, 180)}
end = {"latitude": random.uniform(-90, 90), "longitude": random.uniform(-180, 180)}
self._check_route(start, end, check_coords=False)
if __name__ == "__main__":
unittest.main()

@ -8,6 +8,7 @@ from openpilot.common.params import Params
from openpilot.selfdrive.manager.process_config import managed_processes
from openpilot.system.hardware import PC
from openpilot.system.version import training_version, terms_version
from openpilot.tools.lib.logreader import LogIterable
def set_params_enabled():
@ -72,3 +73,18 @@ def with_processes(processes, init_time=0, ignore_stopped=None):
def noop(*args, **kwargs):
pass
def read_segment_list(segment_list_path):
with open(segment_list_path, "r") as f:
seg_list = f.read().splitlines()
return [(platform[2:], segment) for platform, segment in zip(seg_list[::2], seg_list[1::2], strict=True)]
# Utilities for sanitizing routes of only essential data for testing car ports and doing validation.
PRESERVE_SERVICES = ["can", "carParams", "pandaStates", "pandaStateDEPRECATED"]
def sanitize(lr: LogIterable) -> LogIterable:
return filter(lambda msg: msg.which() in PRESERVE_SERVICES, lr)

@ -1 +1 @@
ea96f935a7a16c53623c3b03e70c0fbfa6b249e7
1b981ce7f817974d4a7a28b06f01f727a5a7ea7b

@ -34,7 +34,7 @@ PROCS = {
"./encoderd": 17.0,
"./camerad": 14.5,
"./locationd": 11.0,
"./mapsd": (1.0, 10.0),
"./mapsd": (0.5, 10.0),
"selfdrive.controls.plannerd": 11.0,
"./ui": 18.0,
"selfdrive.locationd.paramsd": 9.0,

@ -312,6 +312,9 @@ def thermald_thread(end_event, hw_queue) -> None:
# must be at an engageable thermal band to go onroad
startup_conditions["device_temp_engageable"] = thermal_status < ThermalStatus.red
# ensure device is fully booted
startup_conditions["device_booted"] = startup_conditions.get("device_booted", False) or HARDWARE.booted()
# if the temperature enters the danger zone, go offroad to cool down
onroad_conditions["device_temp_good"] = thermal_status < ThermalStatus.danger
extra_text = f"{offroad_comp_temp:.1f}C"

@ -20,7 +20,7 @@ void MapETA::paintEvent(QPaintEvent *event) {
QPainter p(this);
p.setRenderHint(QPainter::Antialiasing);
p.setPen(Qt::NoPen);
p.setBrush(QColor(0, 0, 0, 150));
p.setBrush(QColor(0, 0, 0, 255));
QSizeF txt_size = eta_doc.size();
p.drawRoundedRect((width() - txt_size.width()) / 2 - UI_BORDER_SIZE, 0, txt_size.width() + UI_BORDER_SIZE * 2, height() + 25, 25, 25);
p.translate((width() - txt_size.width()) / 2, (height() - txt_size.height()) / 2);

@ -48,6 +48,7 @@ Networking::Networking(QWidget* parent, bool show_advanced) : QFrame(parent) {
an = new AdvancedNetworking(this, wifi);
connect(an, &AdvancedNetworking::backPress, [=]() { main_layout->setCurrentWidget(wifiScreen); });
connect(an, &AdvancedNetworking::requestWifiScreen, [=]() { main_layout->setCurrentWidget(wifiScreen); });
main_layout->addWidget(an);
QPalette pal = palette();
@ -181,6 +182,25 @@ AdvancedNetworking::AdvancedNetworking(QWidget* parent, WifiManager* wifi): QWid
});
list->addItem(meteredToggle);
// Hidden Network
hiddenNetworkButton = new ButtonControl(tr("Hidden Network"), tr("CONNECT"));
connect(hiddenNetworkButton, &ButtonControl::clicked, [=]() {
QString ssid = InputDialog::getText(tr("Enter SSID"), this, "", false, 1);
if (!ssid.isEmpty()) {
QString pass = InputDialog::getText(tr("Enter password"), this, tr("for \"%1\"").arg(ssid), true, -1);
Network hidden_network;
hidden_network.ssid = ssid.toUtf8();
if (!pass.isEmpty()) {
hidden_network.security_type = SecurityType::WPA;
wifi->connect(hidden_network, pass);
} else {
wifi->connect(hidden_network);
}
emit requestWifiScreen();
}
});
list->addItem(hiddenNetworkButton);
// Set initial config
wifi->updateGsmSettings(roamingEnabled, QString::fromStdString(params.get("GsmApn")), metered);

@ -62,12 +62,14 @@ private:
ToggleControl* tetheringToggle;
ToggleControl* roamingToggle;
ButtonControl* editApnButton;
ButtonControl* hiddenNetworkButton;
ToggleControl* meteredToggle;
WifiManager* wifi = nullptr;
Params params;
signals:
void backPress();
void requestWifiScreen();
public slots:
void toggleTethering(bool enabled);

@ -7,7 +7,7 @@
const int FACE_IMG_SIZE = 130;
DriverViewWindow::DriverViewWindow(QWidget* parent) : CameraView("camerad", VISION_STREAM_DRIVER, true, parent) {
DriverViewWindow::DriverViewWindow(QWidget* parent) : CameraWidget("camerad", VISION_STREAM_DRIVER, true, parent) {
face_img = loadPixmap("../assets/img_driver_face_static.png", {FACE_IMG_SIZE, FACE_IMG_SIZE});
QObject::connect(this, &CameraWidget::clicked, this, &DriverViewWindow::done);
QObject::connect(device(), &Device::interactiveTimeout, this, [this]() {
@ -20,20 +20,22 @@ DriverViewWindow::DriverViewWindow(QWidget* parent) : CameraView("camerad", VISI
void DriverViewWindow::showEvent(QShowEvent* event) {
params.putBool("IsDriverViewEnabled", true);
device()->resetInteractiveTimeout(60);
CameraView::showEvent(event);
CameraWidget::showEvent(event);
}
void DriverViewWindow::hideEvent(QHideEvent* event) {
params.putBool("IsDriverViewEnabled", false);
CameraView::hideEvent(event);
stopVipcThread();
CameraWidget::hideEvent(event);
}
void DriverViewWindow::paintGL() {
CameraView::paintGL();
CameraWidget::paintGL();
std::lock_guard lk(frame_lock);
QPainter p(this);
// startup msg
if (!frame) {
if (frames.empty()) {
p.setPen(Qt::white);
p.setRenderHint(QPainter::TextAntialiasing);
p.setFont(InterFont(100, QFont::Bold));
@ -45,6 +47,7 @@ void DriverViewWindow::paintGL() {
cereal::DriverStateV2::Reader driver_state = sm["driverStateV2"].getDriverStateV2();
bool is_rhd = driver_state.getWheelOnRightProb() > 0.5;
auto driver_data = is_rhd ? driver_state.getRightDriverData() : driver_state.getLeftDriverData();
bool face_detected = driver_data.getFaceProb() > 0.7;
if (face_detected) {
auto fxy_list = driver_data.getFacePosition();

@ -2,7 +2,7 @@
#include "selfdrive/ui/qt/widgets/cameraview.h"
class DriverViewWindow : public CameraView {
class DriverViewWindow : public CameraWidget {
Q_OBJECT
public:

@ -44,12 +44,12 @@ OnroadWindow::OnroadWindow(QWidget *parent) : QWidget(parent) {
split->addWidget(nvg);
if (getenv("DUAL_CAMERA_VIEW")) {
CameraWidget *arCam = new CameraView("camerad", VISION_STREAM_ROAD, true, this);
CameraWidget *arCam = new CameraWidget("camerad", VISION_STREAM_ROAD, true, this);
split->insertWidget(0, arCam);
}
if (getenv("MAP_RENDER_VIEW")) {
CameraWidget *map_render = new CameraView("navd", VISION_STREAM_MAP, false, this);
CameraWidget *map_render = new CameraWidget("navd", VISION_STREAM_MAP, false, this);
split->insertWidget(0, map_render);
}
@ -126,7 +126,6 @@ void OnroadWindow::offroadTransition(bool offroad) {
#endif
alerts->updateAlert({});
nvg->disconnectVipc();
}
void OnroadWindow::primeChanged(bool prime) {
@ -329,8 +328,6 @@ void AnnotatedCameraWidget::updateState(const UIState &s) {
map_settings_btn->setVisible(!hideBottomIcons);
main_layout->setAlignment(map_settings_btn, (rightHandDM ? Qt::AlignLeft : Qt::AlignRight) | Qt::AlignBottom);
}
update();
}
void AnnotatedCameraWidget::drawHud(QPainter &p) {
@ -612,6 +609,21 @@ void AnnotatedCameraWidget::paintGL() {
// draw camera frame
{
std::lock_guard lk(frame_lock);
if (frames.empty()) {
if (skip_frame_count > 0) {
skip_frame_count--;
qDebug() << "skipping frame, not ready";
return;
}
} else {
// skip drawing up to this many frames if we're
// missing camera frames. this smooths out the
// transitions from the narrow and wide cameras
skip_frame_count = 5;
}
// Wide or narrow cam dependent on speed
bool has_wide_cam = available_streams.count(VISION_STREAM_WIDE_ROAD);
if (has_wide_cam) {
@ -627,18 +639,14 @@ void AnnotatedCameraWidget::paintGL() {
}
CameraWidget::setStreamType(wide_cam_requested ? VISION_STREAM_WIDE_ROAD : VISION_STREAM_ROAD);
s->scene.wide_cam = CameraWidget::streamType() == VISION_STREAM_WIDE_ROAD;
s->scene.wide_cam = CameraWidget::getStreamType() == VISION_STREAM_WIDE_ROAD;
if (s->scene.calibration_valid) {
auto calib = s->scene.wide_cam ? s->scene.view_from_wide_calib : s->scene.view_from_calib;
CameraWidget::updateCalibration(calib);
} else {
CameraWidget::updateCalibration(DEFAULT_CALIBRATION);
}
if (!CameraWidget::receiveFrame(sm["uiPlan"].getUiPlan().getFrameId())) {
qDebug() << "skipping frame, not ready";
return;
}
CameraWidget::setFrameId(model.getFrameId());
CameraWidget::paintGL();
}

@ -93,6 +93,8 @@ private:
bool v_ego_cluster_seen = false;
int status = STATUS_DISENGAGED;
std::unique_ptr<PubMaster> pm;
int skip_frame_count = 0;
bool wide_cam_requested = false;
protected:

@ -6,6 +6,14 @@
#include <GLES3/gl3.h>
#endif
#include <cmath>
#include <set>
#include <string>
#include <utility>
#include <QOpenGLBuffer>
#include <QOffscreenSurface>
namespace {
const char frame_vertex_shader[] =
@ -87,25 +95,24 @@ mat4 get_fit_view_transform(float widget_aspect_ratio, float frame_aspect_ratio)
} // namespace
CameraWidget::CameraWidget(std::string stream_name, VisionStreamType type, bool zoom, QWidget *parent)
: stream_name(stream_name), requested_stream_type(type), zoomed_view(zoom), QOpenGLWidget(parent) {
CameraWidget::CameraWidget(std::string stream_name, VisionStreamType type, bool zoom, QWidget* parent) :
stream_name(stream_name), requested_stream_type(type), zoomed_view(zoom), QOpenGLWidget(parent) {
setAttribute(Qt::WA_OpaquePaintEvent);
qRegisterMetaType<std::set<VisionStreamType>>("availableStreams");
QObject::connect(this, &CameraWidget::vipcThreadConnected, this, &CameraWidget::vipcConnected, Qt::BlockingQueuedConnection);
QObject::connect(this, &CameraWidget::vipcThreadFrameReceived, this, &CameraWidget::vipcFrameReceived, Qt::QueuedConnection);
QObject::connect(this, &CameraWidget::vipcAvailableStreamsUpdated, this, &CameraWidget::availableStreamsUpdated, Qt::QueuedConnection);
}
CameraWidget::~CameraWidget() {
makeCurrent();
stopVipcThread();
if (isValid()) {
glDeleteVertexArrays(1, &frame_vao);
glDeleteBuffers(1, &frame_vbo);
glDeleteBuffers(1, &frame_ibo);
glDeleteBuffers(2, textures);
}
#ifdef QCOM2
EGLDisplay egl_display = eglGetCurrentDisplay();
for (auto &pair : egl_images) {
eglDestroyImageKHR(egl_display, pair.second);
}
egl_images.clear();
#endif
doneCurrent();
}
@ -169,11 +176,45 @@ void CameraWidget::initializeGL() {
#endif
}
void CameraWidget::showEvent(QShowEvent *event) {
if (!vipc_thread) {
clearFrames();
vipc_thread = new QThread();
connect(vipc_thread, &QThread::started, [=]() { vipcThread(); });
connect(vipc_thread, &QThread::finished, vipc_thread, &QObject::deleteLater);
vipc_thread->start();
}
}
void CameraWidget::stopVipcThread() {
makeCurrent();
if (vipc_thread) {
vipc_thread->requestInterruption();
vipc_thread->quit();
vipc_thread->wait();
vipc_thread = nullptr;
}
#ifdef QCOM2
EGLDisplay egl_display = eglGetCurrentDisplay();
assert(egl_display != EGL_NO_DISPLAY);
for (auto &pair : egl_images) {
eglDestroyImageKHR(egl_display, pair.second);
assert(eglGetError() == EGL_SUCCESS);
}
egl_images.clear();
#endif
}
void CameraWidget::availableStreamsUpdated(std::set<VisionStreamType> streams) {
available_streams = streams;
}
void CameraWidget::updateFrameMat() {
int w = glWidth(), h = glHeight();
if (zoomed_view) {
if (streamType() == VISION_STREAM_DRIVER) {
if (active_stream_type == VISION_STREAM_DRIVER) {
if (stream_width > 0 && stream_height > 0) {
frame_mat = get_driver_view_transform(w, h, stream_width, stream_height);
}
@ -182,7 +223,7 @@ void CameraWidget::updateFrameMat() {
// to ensure this ends up in the middle of the screen
// for narrow come and a little lower for wide cam.
// TODO: use proper perspective transform?
if (streamType() == VISION_STREAM_WIDE_ROAD) {
if (active_stream_type == VISION_STREAM_WIDE_ROAD) {
intrinsic_matrix = ECAM_INTRINSIC_MATRIX;
zoom = 2.0;
} else {
@ -228,15 +269,25 @@ void CameraWidget::paintGL() {
glClearColor(bg.redF(), bg.greenF(), bg.blueF(), bg.alphaF());
glClear(GL_STENCIL_BUFFER_BIT | GL_COLOR_BUFFER_BIT);
if (!frame) return;
std::lock_guard lk(frame_lock);
if (frames.empty()) return;
int frame_idx = frames.size() - 1;
// Always draw latest frame until sync logic is more stable
// for (frame_idx = 0; frame_idx < frames.size() - 1; frame_idx++) {
// if (frames[frame_idx].first == draw_frame_id) break;
// }
// Log duplicate/dropped frames
if (frame_id == prev_frame_id) {
qDebug() << "Drawing same frame twice" << frame_id;
} else if (frame_id != prev_frame_id + 1) {
qDebug() << "Skipped frame" << frame_id;
if (frames[frame_idx].first == prev_frame_id) {
qDebug() << "Drawing same frame twice" << frames[frame_idx].first;
} else if (frames[frame_idx].first != prev_frame_id + 1) {
qDebug() << "Skipped frame" << frames[frame_idx].first;
}
prev_frame_id = frame_id;
prev_frame_id = frames[frame_idx].first;
VisionBuf *frame = frames[frame_idx].second;
assert(frame != nullptr);
updateFrameMat();
@ -276,7 +327,7 @@ void CameraWidget::paintGL() {
glPixelStorei(GL_UNPACK_ROW_LENGTH, 0);
}
void CameraWidget::vipcConnected() {
void CameraWidget::vipcConnected(VisionIpcClient *vipc_client) {
makeCurrent();
stream_width = vipc_client->buffers[0].width;
stream_height = vipc_client->buffers[0].height;
@ -326,69 +377,59 @@ void CameraWidget::vipcConnected() {
#endif
}
bool CameraWidget::receiveFrame(uint64_t request_frame_id) {
if (!vipc_client || vipc_client->type != requested_stream_type) {
qDebug().nospace() << "connecting to stream" << requested_stream_type
<< (vipc_client ? QString(", was connected to %1").arg(vipc_client->type) : "");
vipc_client.reset(new VisionIpcClient(stream_name, requested_stream_type, false));
}
void CameraWidget::vipcFrameReceived() {
update();
}
if (!vipc_client->connected) {
frame = nullptr;
recent_frames.clear();
available_streams = VisionIpcClient::getAvailableStreams(stream_name, false);
if (available_streams.empty() || !vipc_client->connect(false)) {
return false;
void CameraWidget::vipcThread() {
VisionStreamType cur_stream = requested_stream_type;
std::unique_ptr<VisionIpcClient> vipc_client;
VisionIpcBufExtra meta_main = {0};
while (!QThread::currentThread()->isInterruptionRequested()) {
if (!vipc_client || cur_stream != requested_stream_type) {
clearFrames();
qDebug().nospace() << "connecting to stream " << requested_stream_type << ", was connected to " << cur_stream;
cur_stream = requested_stream_type;
vipc_client.reset(new VisionIpcClient(stream_name, cur_stream, false));
}
emit vipcAvailableStreamsUpdated();
vipcConnected();
}
active_stream_type = cur_stream;
if (!vipc_client->connected) {
clearFrames();
auto streams = VisionIpcClient::getAvailableStreams(stream_name, false);
if (streams.empty()) {
QThread::msleep(100);
continue;
}
emit vipcAvailableStreamsUpdated(streams);
VisionIpcBufExtra meta_main = {};
while (auto buf = vipc_client->recv(&meta_main, 0)) {
if (recent_frames.size() > FRAME_BUFFER_SIZE) {
recent_frames.pop_front();
if (!vipc_client->connect(false)) {
QThread::msleep(100);
continue;
}
emit vipcThreadConnected(vipc_client.get());
}
recent_frames.emplace_back(meta_main.frame_id, buf);
}
frame = nullptr;
if (request_frame_id > 0) {
auto it = std::find_if(recent_frames.begin(), recent_frames.end(),
[request_frame_id](auto &f) { return f.first == request_frame_id; });
if (it != recent_frames.end()) {
std::tie(frame_id, frame) = *it;
if (VisionBuf *buf = vipc_client->recv(&meta_main, 1000)) {
{
std::lock_guard lk(frame_lock);
frames.push_back(std::make_pair(meta_main.frame_id, buf));
while (frames.size() > FRAME_BUFFER_SIZE) {
frames.pop_front();
}
}
emit vipcThreadFrameReceived();
} else {
if (!isVisible()) {
vipc_client->connected = false;
}
}
} else if (!recent_frames.empty()) {
std::tie(frame_id, frame) = recent_frames.back();
}
return frame != nullptr;
}
void CameraWidget::disconnectVipc() {
recent_frames.clear();
frame = nullptr;
frame_id = 0;
prev_frame_id = 0;
if (vipc_client) {
vipc_client->connected = false;
}
}
// Cameraview
CameraView::CameraView(const std::string &name, VisionStreamType stream_type, bool zoom, QWidget *parent)
: CameraWidget(name, stream_type, zoom, parent) {
timer = new QTimer(this);
timer->setInterval(1000.0 / UI_FREQ);
timer->callOnTimeout(this, [this]() { if (receiveFrame()) update(); });
}
void CameraView::showEvent(QShowEvent *event) {
timer->start();
}
void CameraView::hideEvent(QHideEvent *event) {
timer->stop();
disconnectVipc();
void CameraWidget::clearFrames() {
std::lock_guard lk(frame_lock);
frames.clear();
available_streams.clear();
}

@ -3,6 +3,7 @@
#include <deque>
#include <map>
#include <memory>
#include <mutex>
#include <set>
#include <string>
#include <utility>
@ -10,7 +11,7 @@
#include <QOpenGLFunctions>
#include <QOpenGLShaderProgram>
#include <QOpenGLWidget>
#include <QTimer>
#include <QThread>
#ifdef QCOM2
#define EGL_EGLEXT_PROTOTYPES
@ -32,27 +33,31 @@ class CameraWidget : public QOpenGLWidget, protected QOpenGLFunctions {
Q_OBJECT
public:
using QOpenGLWidget::QOpenGLWidget;
explicit CameraWidget(std::string stream_name, VisionStreamType stream_type, bool zoom, QWidget* parent = nullptr);
~CameraWidget();
void disconnectVipc();
void setBackgroundColor(const QColor &color) { bg = color; }
void setFrameId(int frame_id) { draw_frame_id = frame_id; }
void setStreamType(VisionStreamType type) { requested_stream_type = type; }
inline VisionStreamType streamType() const { return requested_stream_type; }
inline const std::set<VisionStreamType> &availableStreams() const { return available_streams; }
bool receiveFrame(uint64_t request_frame_id = 0);
VisionStreamType getStreamType() { return active_stream_type; }
void stopVipcThread();
signals:
void vipcAvailableStreamsUpdated();
void clicked();
void vipcThreadConnected(VisionIpcClient *);
void vipcThreadFrameReceived();
void vipcAvailableStreamsUpdated(std::set<VisionStreamType>);
protected:
void paintGL() override;
void initializeGL() override;
void resizeGL(int w, int h) override { updateFrameMat(); }
void showEvent(QShowEvent *event) override;
void mouseReleaseEvent(QMouseEvent *event) override { emit clicked(); }
virtual void updateFrameMat();
void updateCalibration(const mat3 &calib);
void vipcConnected();
void vipcThread();
void clearFrames();
int glWidth();
int glHeight();
@ -68,18 +73,14 @@ protected:
std::map<int, EGLImageKHR> egl_images;
#endif
// vipc
std::string stream_name;
int stream_width = 0;
int stream_height = 0;
int stream_stride = 0;
VisionStreamType requested_stream_type;
std::atomic<VisionStreamType> active_stream_type;
std::atomic<VisionStreamType> requested_stream_type;
std::set<VisionStreamType> available_streams;
std::unique_ptr<VisionIpcClient> vipc_client;
std::deque<std::pair<uint32_t, VisionBuf*>> recent_frames;
VisionBuf *frame = nullptr;
uint64_t frame_id = 0;
uint64_t prev_frame_id = 0;
QThread *vipc_thread = nullptr;
// Calibration
float x_offset = 0;
@ -87,16 +88,16 @@ protected:
float zoom = 1.0;
mat3 calibration = DEFAULT_CALIBRATION;
mat3 intrinsic_matrix = FCAM_INTRINSIC_MATRIX;
};
// update frames based on timer
class CameraView : public CameraWidget {
Q_OBJECT
public:
CameraView(const std::string &name, VisionStreamType stream_type, bool zoom, QWidget *parent = nullptr);
void showEvent(QShowEvent *event) override;
void hideEvent(QHideEvent *event) override;
std::recursive_mutex frame_lock;
std::deque<std::pair<uint32_t, VisionBuf*>> frames;
uint32_t draw_frame_id = 0;
uint32_t prev_frame_id = 0;
private:
QTimer *timer;
protected slots:
void vipcConnected(VisionIpcClient *vipc_client);
void vipcFrameReceived();
void availableStreamsUpdated(std::set<VisionStreamType> streams);
};
Q_DECLARE_METATYPE(std::set<VisionStreamType>);

@ -22,6 +22,9 @@ FORMAT_ARG = re.compile("%[0-9]+")
@parameterized_class(("name", "file"), translation_files.items())
class TestTranslations(unittest.TestCase):
name: str
file: str
@staticmethod
def _read_translation_file(path, file):
tr_file = os.path.join(path, f"{file}.ts")
@ -30,12 +33,12 @@ class TestTranslations(unittest.TestCase):
def test_missing_translation_files(self):
self.assertTrue(os.path.exists(os.path.join(TRANSLATIONS_DIR, f"{self.file}.ts")),
f"{self.name} has no XML translation file, run selfdrive/ui/update_translations.py")
f"{self.name} has no XML translation file, run selfdrive/ui/update_translations.py")
def test_translations_updated(self):
with tempfile.TemporaryDirectory() as tmpdir:
shutil.copytree(TRANSLATIONS_DIR, tmpdir, dirs_exist_ok=True)
update_translations(plural_only=["main_en"], translations_dir=tmpdir)
update_translations(translation_files=[self.file], translations_dir=tmpdir)
cur_translations = self._read_translation_file(TRANSLATIONS_DIR, self.file)
new_translations = self._read_translation_file(tmpdir, self.file)

@ -0,0 +1,138 @@
#!/usr/bin/env python3
import argparse
import json
import os
import pathlib
import xml.etree.ElementTree as ET
from typing import cast
import requests
TRANSLATIONS_DIR = pathlib.Path(__file__).resolve().parent
TRANSLATIONS_LANGUAGES = TRANSLATIONS_DIR / "languages.json"
OPENAI_MODEL = "gpt-4"
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY")
OPENAI_PROMPT = "You are a professional translator from English to {language} (ISO 639 language code). " + \
"The following sentence or word is in the GUI of a software called openpilot, translate it accordingly."
def get_language_files(languages: list[str] | None = None) -> dict[str, pathlib.Path]:
files = {}
with open(TRANSLATIONS_LANGUAGES) as fp:
language_dict = json.load(fp)
for filename in language_dict.values():
path = TRANSLATIONS_DIR / f"{filename}.ts"
language = path.stem.split("main_")[1]
if languages is None or language in languages:
files[language] = path
return files
def translate_phrase(text: str, language: str) -> str:
response = requests.post(
"https://api.openai.com/v1/chat/completions",
json={
"model": OPENAI_MODEL,
"messages": [
{
"role": "system",
"content": OPENAI_PROMPT.format(language=language),
},
{
"role": "user",
"content": text,
},
],
"temperature": 0.8,
"max_tokens": 1024,
"top_p": 1,
},
headers={
"Authorization": f"Bearer {OPENAI_API_KEY}",
"Content-Type": "application/json",
},
)
if 400 <= response.status_code < 600:
raise requests.HTTPError(f'Error {response.status_code}: {response.json()}', response=response)
data = response.json()
return cast(str, data["choices"][0]["message"]["content"])
def translate_file(path: pathlib.Path, language: str, all_: bool) -> None:
tree = ET.parse(path)
root = tree.getroot()
for context in root.findall("./context"):
name = context.find("name")
if name is None:
raise ValueError("name not found")
print(f"Context: {name.text}")
for message in context.findall("./message"):
source = message.find("source")
translation = message.find("translation")
if source is None or translation is None:
raise ValueError("source or translation not found")
if not all_ and translation.attrib.get("type") != "unfinished":
continue
llm_translation = translate_phrase(cast(str, source.text), language)
print(f"Source: {source.text}\n" +
f"Current translation: {translation.text}\n" +
f"LLM translation: {llm_translation}")
translation.text = llm_translation
with path.open("w", encoding="utf-8") as fp:
fp.write('<?xml version="1.0" encoding="utf-8"?>\n' +
'<!DOCTYPE TS>\n' +
ET.tostring(root, encoding="utf-8").decode())
def main():
arg_parser = argparse.ArgumentParser("Auto translate")
group = arg_parser.add_mutually_exclusive_group(required=True)
group.add_argument("-a", "--all-files", action="store_true", help="Translate all files")
group.add_argument("-f", "--file", nargs="+", help="Translate the selected files. (Example: -f fr de)")
arg_parser.add_argument("-t", "--all-translations", action="store_true", default=False, help="Translate all sections. (Default: only unfinished)")
args = arg_parser.parse_args()
if OPENAI_API_KEY is None:
print("OpenAI API key is missing. (Hint: use `export OPENAI_API_KEY=YOUR-KEY` before you run the script).\n" +
"If you don't have one go to: https://beta.openai.com/account/api-keys.")
exit(1)
files = get_language_files(None if args.all_files else args.file)
if args.file:
missing_files = set(args.file) - set(files)
if len(missing_files):
print(f"No language files found: {missing_files}")
exit(1)
print(f"Translation mode: {'all' if args.all_translations else 'only unfinished'}. Files: {list(files)}")
for lang, path in files.items():
print(f"Translate {lang} ({path})")
translate_file(path, lang, args.all_translations)
if __name__ == "__main__":
main()

@ -54,8 +54,8 @@ if __name__ == "__main__":
badge_svg.extend([f'<g transform="translate(0, {idx * BADGE_HEIGHT})">', content_svg, "</g>"])
badge_svg.insert(0, f'<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" \
height="{len(translation_files) * BADGE_HEIGHT}" width="{max_badge_width}">')
badge_svg.insert(0, '<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" ' +
f'height="{len(translation_files) * BADGE_HEIGHT}" width="{max_badge_width}">')
badge_svg.append("</svg>")
with open(os.path.join(BASEDIR, "translation_badge.svg"), "w") as badge_f:

@ -66,6 +66,26 @@
<source>Prevent large data uploads when on a metered connection</source>
<translation>منع تحميل البيانات الكبيرة عندما يكون الاتصال محدوداً</translation>
</message>
<message>
<source>Hidden Network</source>
<translation>شبكة مخفية</translation>
</message>
<message>
<source>CONNECT</source>
<translation>الاتصال</translation>
</message>
<message>
<source>Enter SSID</source>
<translation>أدخل SSID</translation>
</message>
<message>
<source>Enter password</source>
<translation>أدخل كلمة المرور</translation>
</message>
<message>
<source>for &quot;%1&quot;</source>
<translation>من أجل &quot;%1&quot;</translation>
</message>
</context>
<context>
<name>AnnotatedCameraWidget</name>

@ -66,6 +66,26 @@
<source>Prevent large data uploads when on a metered connection</source>
<translation>Hochladen großer Dateien über getaktete Verbindungen unterbinden</translation>
</message>
<message>
<source>Hidden Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>CONNECT</source>
<translation type="unfinished">CONNECT</translation>
</message>
<message>
<source>Enter SSID</source>
<translation type="unfinished">SSID eingeben</translation>
</message>
<message>
<source>Enter password</source>
<translation type="unfinished">Passwort eingeben</translation>
</message>
<message>
<source>for &quot;%1&quot;</source>
<translation type="unfinished">für &quot;%1&quot;</translation>
</message>
</context>
<context>
<name>AnnotatedCameraWidget</name>

@ -66,6 +66,26 @@
<source>Prevent large data uploads when on a metered connection</source>
<translation>Éviter les transferts de données importants sur une connexion limitée</translation>
</message>
<message>
<source>Hidden Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>CONNECT</source>
<translation type="unfinished">CONNECTER</translation>
</message>
<message>
<source>Enter SSID</source>
<translation type="unfinished">Entrer le SSID</translation>
</message>
<message>
<source>Enter password</source>
<translation type="unfinished">Entrer le mot de passe</translation>
</message>
<message>
<source>for &quot;%1&quot;</source>
<translation type="unfinished">pour &quot;%1&quot;</translation>
</message>
</context>
<context>
<name>AnnotatedCameraWidget</name>

@ -66,6 +66,26 @@
<source>Prevent large data uploads when on a metered connection</source>
<translation></translation>
</message>
<message>
<source>Hidden Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>CONNECT</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Enter SSID</source>
<translation type="unfinished">SSID </translation>
</message>
<message>
<source>Enter password</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>for &quot;%1&quot;</source>
<translation type="unfinished">%1</translation>
</message>
</context>
<context>
<name>AnnotatedCameraWidget</name>

@ -66,6 +66,26 @@
<source>Prevent large data uploads when on a metered connection</source>
<translation> </translation>
</message>
<message>
<source>Hidden Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>CONNECT</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Enter SSID</source>
<translation type="unfinished">SSID </translation>
</message>
<message>
<source>Enter password</source>
<translation type="unfinished"> </translation>
</message>
<message>
<source>for &quot;%1&quot;</source>
<translation type="unfinished">&quot;%1&quot; </translation>
</message>
</context>
<context>
<name>AnnotatedCameraWidget</name>

@ -66,6 +66,26 @@
<source>Prevent large data uploads when on a metered connection</source>
<translation>Evite grandes uploads de dados quando estiver em uma conexão limitada</translation>
</message>
<message>
<source>Hidden Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>CONNECT</source>
<translation type="unfinished">CONEXÃO</translation>
</message>
<message>
<source>Enter SSID</source>
<translation type="unfinished">Insira SSID</translation>
</message>
<message>
<source>Enter password</source>
<translation type="unfinished">Insira a senha</translation>
</message>
<message>
<source>for &quot;%1&quot;</source>
<translation type="unfinished">para &quot;%1&quot;</translation>
</message>
</context>
<context>
<name>AnnotatedCameraWidget</name>

@ -66,6 +66,26 @@
<source>Prevent large data uploads when on a metered connection</source>
<translation></translation>
</message>
<message>
<source>Hidden Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>CONNECT</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Enter SSID</source>
<translation type="unfinished"> SSID</translation>
</message>
<message>
<source>Enter password</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>for &quot;%1&quot;</source>
<translation type="unfinished"> &quot;%1&quot;</translation>
</message>
</context>
<context>
<name>AnnotatedCameraWidget</name>

@ -66,6 +66,26 @@
<source>Prevent large data uploads when on a metered connection</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Hidden Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>CONNECT</source>
<translation type="unfinished">BAĞLANTI</translation>
</message>
<message>
<source>Enter SSID</source>
<translation type="unfinished">APN Gir</translation>
</message>
<message>
<source>Enter password</source>
<translation type="unfinished">Parolayı girin</translation>
</message>
<message>
<source>for &quot;%1&quot;</source>
<translation type="unfinished">için &quot;%1&quot;</translation>
</message>
</context>
<context>
<name>AnnotatedCameraWidget</name>

@ -66,6 +66,26 @@
<source>Prevent large data uploads when on a metered connection</source>
<translation>使</translation>
</message>
<message>
<source>Hidden Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>CONNECT</source>
<translation type="unfinished">CONNECT</translation>
</message>
<message>
<source>Enter SSID</source>
<translation type="unfinished">SSID</translation>
</message>
<message>
<source>Enter password</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>for &quot;%1&quot;</source>
<translation type="unfinished">&quot;%1&quot;</translation>
</message>
</context>
<context>
<name>AnnotatedCameraWidget</name>

@ -66,6 +66,26 @@
<source>Prevent large data uploads when on a metered connection</source>
<translation>使</translation>
</message>
<message>
<source>Hidden Network</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>CONNECT</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>Enter SSID</source>
<translation type="unfinished"> SSID</translation>
</message>
<message>
<source>Enter password</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>for &quot;%1&quot;</source>
<translation type="unfinished"> &quot;%1&quot;</translation>
</message>
</context>
<context>
<name>AnnotatedCameraWidget</name>

@ -9,6 +9,7 @@ UI_DIR = os.path.join(BASEDIR, "selfdrive", "ui")
TRANSLATIONS_DIR = os.path.join(UI_DIR, "translations")
LANGUAGES_FILE = os.path.join(TRANSLATIONS_DIR, "languages.json")
TRANSLATIONS_INCLUDE_FILE = os.path.join(TRANSLATIONS_DIR, "alerts_generated.h")
PLURAL_ONLY = ["main_en"] # base language, only create entries for strings with plural forms
def generate_translations_include():
@ -22,21 +23,20 @@ def generate_translations_include():
with open(TRANSLATIONS_INCLUDE_FILE, "w") as f:
f.write(content)
def update_translations(vanish=False, plural_only=None, translations_dir=TRANSLATIONS_DIR):
generate_translations_include()
if plural_only is None:
plural_only = []
def update_translations(vanish: bool = False, translation_files: None | list[str] = None, translations_dir: str = TRANSLATIONS_DIR):
generate_translations_include()
with open(LANGUAGES_FILE, "r") as f:
translation_files = json.load(f)
if translation_files is None:
with open(LANGUAGES_FILE, "r") as f:
translation_files = json.load(f).values()
for file in translation_files.values():
for file in translation_files:
tr_file = os.path.join(translations_dir, f"{file}.ts")
args = f"lupdate -locations none -recursive {UI_DIR} -ts {tr_file} -I {BASEDIR}"
if vanish:
args += " -no-obsolete"
if file in plural_only:
if file in PLURAL_ONLY:
args += " -pluralonly"
ret = os.system(args)
assert ret == 0
@ -46,8 +46,6 @@ if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Update translation files for UI",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--vanish", action="store_true", help="Remove translations with source text no longer found")
parser.add_argument("--plural-only", type=str, nargs="*", default=["main_en"],
help="Translation codes to only create plural translations for (ie. the base language)")
args = parser.parse_args()
update_translations(args.vanish, args.plural_only)
update_translations(args.vanish)

@ -19,15 +19,15 @@ int main(int argc, char *argv[]) {
{
QHBoxLayout *hlayout = new QHBoxLayout();
layout->addLayout(hlayout);
hlayout->addWidget(new CameraView("navd", VISION_STREAM_MAP, false));
hlayout->addWidget(new CameraView("camerad", VISION_STREAM_ROAD, false));
hlayout->addWidget(new CameraWidget("navd", VISION_STREAM_MAP, false));
hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_ROAD, false));
}
{
QHBoxLayout *hlayout = new QHBoxLayout();
layout->addLayout(hlayout);
hlayout->addWidget(new CameraView("camerad", VISION_STREAM_DRIVER, false));
hlayout->addWidget(new CameraView("camerad", VISION_STREAM_WIDE_ROAD, false));
hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_DRIVER, false));
hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_WIDE_ROAD, false));
}
return a.exec();

@ -230,7 +230,6 @@ class Updater:
b: Union[str, None] = self.params.get("UpdaterTargetBranch", encoding='utf-8')
if b is None:
b = self.get_branch(BASEDIR)
self.params.put("UpdaterTargetBranch", b)
return b
@property
@ -245,7 +244,7 @@ class Updater:
@property
def update_available(self) -> bool:
if os.path.isdir(OVERLAY_MERGED):
if os.path.isdir(OVERLAY_MERGED) and len(self.branches) > 0:
hash_mismatch = self.get_commit_hash(OVERLAY_MERGED) != self.branches[self.target_branch]
branch_mismatch = self.get_branch(OVERLAY_MERGED) != self.target_branch
return hash_mismatch or branch_mismatch
@ -259,9 +258,11 @@ class Updater:
def set_params(self, update_success: bool, failed_count: int, exception: Optional[str]) -> None:
self.params.put("UpdateFailedCount", str(failed_count))
self.params.put("UpdaterTargetBranch", self.target_branch)
self.params.put_bool("UpdaterFetchAvailable", self.update_available)
self.params.put("UpdaterAvailableBranches", ','.join(self.branches.keys()))
if len(self.branches):
self.params.put("UpdaterAvailableBranches", ','.join(self.branches.keys()))
last_update = datetime.datetime.utcnow()
if update_success:
@ -428,10 +429,11 @@ def main() -> None:
# invalidate old finalized update
set_consistent_flag(False)
# wait a bit before first cycle
wait_helper.sleep(60)
# set initial state
params.put("UpdaterState", "idle")
# Run the update loop
first_run = True
while True:
wait_helper.ready_event.clear()
@ -444,7 +446,8 @@ def main() -> None:
# ensure we have some params written soon after startup
updater.set_params(False, update_failed_count, exception)
if not system_time_valid():
if not system_time_valid() or first_run:
first_run = False
wait_helper.sleep(60)
continue

@ -3,7 +3,7 @@ Import('env', 'arch', 'cereal', 'messaging', 'common', 'gpucommon', 'visionipc')
libs = ['m', 'pthread', common, 'jpeg', 'OpenCL', 'yuv', cereal, messaging, 'zmq', 'capnp', 'kj', visionipc, gpucommon, 'atomic']
camera_obj = env.Object(['cameras/camera_qcom2.cc', 'cameras/camera_common.cc', 'cameras/camera_util.cc',
'sensors/ar0231.cc', 'sensors/ox03c10.cc'])
'sensors/ar0231.cc', 'sensors/ox03c10.cc', 'sensors/os04c10.cc'])
env.Program('camerad', ['main.cc', camera_obj], LIBS=libs)
if GetOption("extras") and arch == "x86_64":

@ -136,6 +136,7 @@ void CameraBuf::queue(size_t buf_idx) {
void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, CameraState *c) {
framed.setFrameId(frame_data.frame_id);
framed.setRequestId(frame_data.request_id);
framed.setTimestampEof(frame_data.timestamp_eof);
framed.setTimestampSof(frame_data.timestamp_sof);
framed.setIntegLines(frame_data.integ_lines);

@ -26,6 +26,7 @@ const bool env_ctrl_exp_from_params = getenv("CTRL_EXP_FROM_PARAMS") != NULL;
typedef struct FrameMetadata {
uint32_t frame_id;
uint32_t request_id;
// Timestamps
uint64_t timestamp_sof;

@ -150,7 +150,7 @@ int CameraState::sensors_init() {
power->count = 1;
power->cmd_type = CAMERA_SENSOR_CMD_TYPE_PWR_UP;
power->power_settings[0].power_seq_type = 0;
power->power_settings[0].config_val_low = ci->power_config_val_low;
power->power_settings[0].config_val_low = ci->mclk_frequency;
power = power_set_wait(power, 1);
// reset high
@ -316,10 +316,10 @@ void CameraState::config_isp(int io_mem_handle, int fence, int request_id, int b
.h_init = 0x0,
.v_init = 0x0,
};
io_cfg[0].format = CAM_FORMAT_MIPI_RAW_12; // CAM_FORMAT_UBWC_TP10 for YUV
io_cfg[0].format = ci->mipi_format; // CAM_FORMAT_UBWC_TP10 for YUV
io_cfg[0].color_space = CAM_COLOR_SPACE_BASE; // CAM_COLOR_SPACE_BT601_FULL for YUV
io_cfg[0].color_pattern = 0x5; // 0x0 for YUV
io_cfg[0].bpp = 0xc;
io_cfg[0].bpp = (ci->mipi_format == CAM_FORMAT_MIPI_RAW_10 ? 0xa : 0xc); // bits per pixel
io_cfg[0].resource_type = CAM_ISP_IFE_OUT_RES_RDI_0; // CAM_ISP_IFE_OUT_RES_FULL for YUV
io_cfg[0].fence = fence;
io_cfg[0].direction = CAM_BUF_OUTPUT;
@ -459,7 +459,8 @@ void CameraState::camera_open(MultiCameraState *multi_cam_state_, int camera_num
// Try different sensors one by one until it success.
if (!init_sensor_lambda(new AR0231) &&
!init_sensor_lambda(new OX03C10)) {
!init_sensor_lambda(new OX03C10) &&
!init_sensor_lambda(new OS04C10)) {
LOGE("** sensor %d FAILED bringup, disabling", camera_num);
enabled = false;
return;
@ -481,7 +482,6 @@ void CameraState::camera_open(MultiCameraState *multi_cam_state_, int camera_num
LOG("-- Configuring sensor");
sensors_i2c(ci->init_reg_array.data(), ci->init_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, ci->data_word);
printf("dt is %x\n", ci->in_port_info_dt);
// NOTE: to be able to disable road and wide road, we still have to configure the sensor over i2c
// If you don't do this, the strobe GPIO is an output (even in reset it seems!)
@ -495,8 +495,8 @@ void CameraState::camera_open(MultiCameraState *multi_cam_state_, int camera_num
.lane_cfg = 0x3210,
.vc = 0x0,
.dt = ci->in_port_info_dt,
.format = CAM_FORMAT_MIPI_RAW_12,
.dt = ci->frame_data_type,
.format = ci->mipi_format,
.test_pattern = 0x2, // 0x3?
.usage_type = 0x0,
@ -522,7 +522,7 @@ void CameraState::camera_open(MultiCameraState *multi_cam_state_, int camera_num
.num_out_res = 0x1,
.data[0] = (struct cam_isp_out_port_info){
.res_type = CAM_ISP_IFE_OUT_RES_RDI_0,
.format = CAM_FORMAT_MIPI_RAW_12,
.format = ci->mipi_format,
.width = ci->frame_width,
.height = ci->frame_height + ci->extra_height,
.comp_grp_id = 0x0, .split_point = 0x0, .secure_mode = 0x0,
@ -766,6 +766,7 @@ void CameraState::handle_camera_event(void *evdat) {
auto &meta_data = buf.camera_bufs_metadata[buf_idx];
meta_data.frame_id = main_id - idx_offset;
meta_data.request_id = real_id;
meta_data.timestamp_sof = timestamp;
exp_lock.lock();
meta_data.gain = analog_gain_frac * (1 + dc_gain_weight * (ci->dc_gain_factor-1) / ci->dc_gain_max_weight);
@ -970,6 +971,9 @@ void cameras_run(MultiCameraState *s) {
event_data->u.frame_msg.frame_id, event_data->u.frame_msg.request_id, event_data->u.frame_msg.timestamp/1e6, event_data->u.frame_msg.sof_status);
}
// for debugging
//do_exit = do_exit || event_data->u.frame_msg.frame_id > (30*20);
if (event_data->session_hdl == s->road_cam.session_handle) {
s->road_cam.handle_camera_event(event_data);
} else if (event_data->session_hdl == s->wide_road_cam.session_handle) {
@ -980,6 +984,8 @@ void cameras_run(MultiCameraState *s) {
LOGE("Unknown vidioc event source");
assert(false);
}
} else {
LOGE("unhandled event %d\n", ev.type);
}
} else {
LOGE("VIDIOC_DQEVENT failed, errno=%d", errno);

@ -93,8 +93,9 @@ AR0231::AR0231() {
init_reg_array.assign(std::begin(init_array_ar0231), std::end(init_array_ar0231));
probe_reg_addr = 0x3000;
probe_expected_data = 0x354;
in_port_info_dt = 0x12; // Changing stats to 0x2C doesn't work, so change pixels to 0x12 instead
power_config_val_low = 19200000; //Hz
mipi_format = CAM_FORMAT_MIPI_RAW_12;
frame_data_type = 0x12; // Changing stats to 0x2C doesn't work, so change pixels to 0x12 instead
mclk_frequency = 19200000; //Hz
dc_gain_factor = 2.5;
dc_gain_min_weight = 0;

@ -0,0 +1,105 @@
#include "system/camerad/sensors/sensor.h"
namespace {
const float sensor_analog_gains_OS04C10[] = {
1.0, 1.0625, 1.125, 1.1875, 1.25, 1.3125, 1.375, 1.4375, 1.5, 1.5625, 1.6875,
1.8125, 1.9375, 2.0, 2.125, 2.25, 2.375, 2.5, 2.625, 2.75, 2.875, 3.0,
3.125, 3.375, 3.625, 3.875, 4.0, 4.25, 4.5, 4.75, 5.0, 5.25, 5.5,
5.75, 6.0, 6.25, 6.5, 7.0, 7.5, 8.0, 8.5, 9.0, 9.5, 10.0,
10.5, 11.0, 11.5, 12.0, 12.5, 13.0, 13.5, 14.0, 14.5, 15.0, 15.5};
const uint32_t os04c10_analog_gains_reg[] = {
0x100, 0x110, 0x120, 0x130, 0x140, 0x150, 0x160, 0x170, 0x180, 0x190, 0x1B0,
0x1D0, 0x1F0, 0x200, 0x220, 0x240, 0x260, 0x280, 0x2A0, 0x2C0, 0x2E0, 0x300,
0x320, 0x360, 0x3A0, 0x3E0, 0x400, 0x440, 0x480, 0x4C0, 0x500, 0x540, 0x580,
0x5C0, 0x600, 0x640, 0x680, 0x700, 0x780, 0x800, 0x880, 0x900, 0x980, 0xA00,
0xA80, 0xB00, 0xB80, 0xC00, 0xC80, 0xD00, 0xD80, 0xE00, 0xE80, 0xF00, 0xF80};
const uint32_t VS_TIME_MIN_OS04C10 = 1;
//const uint32_t VS_TIME_MAX_OS04C10 = 34; // vs < 35
} // namespace
OS04C10::OS04C10() {
image_sensor = cereal::FrameData::ImageSensor::OS04C10;
data_word = false;
frame_width = 1920;
frame_height = 1080;
frame_stride = (1920*10/8);
/*
frame_width = 0xa80;
frame_height = 0x5f0;
frame_stride = 0xd20;
*/
extra_height = 0;
frame_offset = 0;
start_reg_array.assign(std::begin(start_reg_array_os04c10), std::end(start_reg_array_os04c10));
init_reg_array.assign(std::begin(init_array_os04c10), std::end(init_array_os04c10));
probe_reg_addr = 0x300a;
probe_expected_data = 0x5304;
mipi_format = CAM_FORMAT_MIPI_RAW_10;
frame_data_type = 0x2b;
mclk_frequency = 24000000; // Hz
dc_gain_factor = 7.32;
dc_gain_min_weight = 1; // always on is fine
dc_gain_max_weight = 1;
dc_gain_on_grey = 0.9;
dc_gain_off_grey = 1.0;
exposure_time_min = 2; // 1x
exposure_time_max = 2016;
analog_gain_min_idx = 0x0;
analog_gain_rec_idx = 0x0; // 1x
analog_gain_max_idx = 0x36;
analog_gain_cost_delta = -1;
analog_gain_cost_low = 0.4;
analog_gain_cost_high = 6.4;
for (int i = 0; i <= analog_gain_max_idx; i++) {
sensor_analog_gains[i] = sensor_analog_gains_OS04C10[i];
}
min_ev = (exposure_time_min + VS_TIME_MIN_OS04C10) * sensor_analog_gains[analog_gain_min_idx];
max_ev = exposure_time_max * dc_gain_factor * sensor_analog_gains[analog_gain_max_idx];
target_grey_factor = 0.01;
}
std::vector<i2c_random_wr_payload> OS04C10::getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const {
// t_HCG&t_LCG + t_VS on LPD, t_SPD on SPD
uint32_t hcg_time = exposure_time;
//uint32_t lcg_time = hcg_time;
//uint32_t spd_time = std::min(std::max((uint32_t)exposure_time, (exposure_time_max + VS_TIME_MAX_OS04C10) / 3), exposure_time_max + VS_TIME_MAX_OS04C10);
//uint32_t vs_time = std::min(std::max((uint32_t)exposure_time / 40, VS_TIME_MIN_OS04C10), VS_TIME_MAX_OS04C10);
uint32_t real_gain = os04c10_analog_gains_reg[new_exp_g];
hcg_time = 100;
real_gain = 0x320;
return {
{0x3501, hcg_time>>8}, {0x3502, hcg_time&0xFF},
//{0x3581, lcg_time>>8}, {0x3582, lcg_time&0xFF},
//{0x3541, spd_time>>8}, {0x3542, spd_time&0xFF},
//{0x35c2, vs_time&0xFF},
{0x3508, real_gain>>8}, {0x3509, real_gain&0xFF},
};
}
int OS04C10::getSlaveAddress(int port) const {
assert(port >= 0 && port <= 2);
return (int[]){0x6C, 0x20, 0x6C}[port];
}
float OS04C10::getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const {
float score = std::abs(desired_ev - (exp_t * exp_gain));
float m = exp_g_idx > analog_gain_rec_idx ? analog_gain_cost_high : analog_gain_cost_low;
score += std::abs(exp_g_idx - (int)analog_gain_rec_idx) * m;
score += ((1 - analog_gain_cost_delta) +
analog_gain_cost_delta * (exp_g_idx - analog_gain_min_idx) / (analog_gain_max_idx - analog_gain_min_idx)) *
std::abs(exp_g_idx - gain_idx) * 5.0;
return score;
}

@ -0,0 +1,298 @@
#pragma once
const struct i2c_random_wr_payload start_reg_array_os04c10[] = {{0x100, 1}};
const struct i2c_random_wr_payload stop_reg_array_os04c10[] = {{0x100, 0}};
const struct i2c_random_wr_payload init_array_os04c10[] = {
// OS04C10_AA_00_02_17_wAO_1920x1080_MIPI728Mbps_Linear12bit_20FPS_4Lane_MCLK24MHz
{0x0103, 0x01},
{0x0301, 0x84},
{0x0303, 0x01},
{0x0305, 0x5b},
{0x0306, 0x01},
{0x0307, 0x17},
{0x0323, 0x04},
{0x0324, 0x01},
{0x0325, 0x62},
{0x3012, 0x06},
{0x3013, 0x02},
{0x3016, 0x72},
{0x3021, 0x03},
{0x3106, 0x21},
{0x3107, 0xa1},
{0x3500, 0x00},
{0x3501, 0x00},
{0x3502, 0x40},
{0x3503, 0x88},
{0x3508, 0x07},
{0x3509, 0xc0},
{0x350a, 0x04},
{0x350b, 0x00},
{0x350c, 0x07},
{0x350d, 0xc0},
{0x350e, 0x04},
{0x350f, 0x00},
{0x3510, 0x00},
{0x3511, 0x00},
{0x3512, 0x20},
{0x3624, 0x00},
{0x3625, 0x4c},
{0x3660, 0x00},
{0x3666, 0xa5},
{0x3667, 0xa5},
{0x366a, 0x64},
{0x3673, 0x0d},
{0x3672, 0x0d},
{0x3671, 0x0d},
{0x3670, 0x0d},
{0x3685, 0x00},
{0x3694, 0x0d},
{0x3693, 0x0d},
{0x3692, 0x0d},
{0x3691, 0x0d},
{0x3696, 0x4c},
{0x3697, 0x4c},
{0x3698, 0x40},
{0x3699, 0x80},
{0x369a, 0x18},
{0x369b, 0x1f},
{0x369c, 0x14},
{0x369d, 0x80},
{0x369e, 0x40},
{0x369f, 0x21},
{0x36a0, 0x12},
{0x36a1, 0x5d},
{0x36a2, 0x66},
{0x370a, 0x00},
{0x370e, 0x0c},
{0x3710, 0x00},
{0x3713, 0x00},
{0x3725, 0x02},
{0x372a, 0x03},
{0x3738, 0xce},
{0x3748, 0x00},
{0x374a, 0x00},
{0x374c, 0x00},
{0x374e, 0x00},
{0x3756, 0x00},
{0x3757, 0x0e},
{0x3767, 0x00},
{0x3771, 0x00},
{0x377b, 0x20},
{0x377c, 0x00},
{0x377d, 0x0c},
{0x3781, 0x03},
{0x3782, 0x00},
{0x3789, 0x14},
{0x3795, 0x02},
{0x379c, 0x00},
{0x379d, 0x00},
{0x37b8, 0x04},
{0x37ba, 0x03},
{0x37bb, 0x00},
{0x37bc, 0x04},
{0x37be, 0x08},
{0x37c4, 0x11},
{0x37c5, 0x80},
{0x37c6, 0x14},
{0x37c7, 0x08},
{0x37da, 0x11},
{0x381f, 0x08},
{0x3829, 0x03},
{0x3881, 0x00},
{0x3888, 0x04},
{0x388b, 0x00},
{0x3c80, 0x10},
{0x3c86, 0x00},
{0x3c8c, 0x20},
{0x3c9f, 0x01},
{0x3d85, 0x1b},
{0x3d8c, 0x71},
{0x3d8d, 0xe2},
{0x3f00, 0x0b},
{0x3f06, 0x04},
{0x400a, 0x01},
{0x400b, 0x50},
{0x400e, 0x08},
{0x4043, 0x7e},
{0x4045, 0x7e},
{0x4047, 0x7e},
{0x4049, 0x7e},
{0x4090, 0x14},
{0x40b0, 0x00},
{0x40b1, 0x00},
{0x40b2, 0x00},
{0x40b3, 0x00},
{0x40b4, 0x00},
{0x40b5, 0x00},
{0x40b7, 0x00},
{0x40b8, 0x00},
{0x40b9, 0x00},
{0x40ba, 0x00},
{0x4301, 0x00},
{0x4303, 0x00},
{0x4502, 0x04},
{0x4503, 0x00},
{0x4504, 0x06},
{0x4506, 0x00},
{0x4507, 0x64},
{0x4803, 0x00},
{0x480c, 0x32},
{0x480e, 0x00},
{0x4813, 0x00},
{0x4819, 0x70},
{0x481f, 0x30},
{0x4823, 0x3f},
{0x4825, 0x30},
{0x4833, 0x10},
{0x484b, 0x07},
{0x488b, 0x00},
{0x4d00, 0x04},
{0x4d01, 0xad},
{0x4d02, 0xbc},
{0x4d03, 0xa1},
{0x4d04, 0x1f},
{0x4d05, 0x4c},
{0x4d0b, 0x01},
{0x4e00, 0x2a},
{0x4e0d, 0x00},
{0x5001, 0x09},
{0x5004, 0x00},
{0x5080, 0x04},
{0x5036, 0x00},
{0x5180, 0x70},
{0x5181, 0x10},
{0x520a, 0x03},
{0x520b, 0x06},
{0x520c, 0x0c},
{0x580b, 0x0f},
{0x580d, 0x00},
{0x580f, 0x00},
{0x5820, 0x00},
{0x5821, 0x00},
{0x301c, 0xf8},
{0x301e, 0xb4},
{0x301f, 0xd0},
{0x3022, 0x01},
{0x3109, 0xe7},
{0x3600, 0x00},
{0x3610, 0x65},
{0x3611, 0x85},
{0x3613, 0x3a},
{0x3615, 0x60},
{0x3621, 0x90},
{0x3620, 0x0c},
{0x3629, 0x00},
{0x3661, 0x04},
{0x3664, 0x70},
{0x3665, 0x00},
{0x3681, 0xa6},
{0x3682, 0x53},
{0x3683, 0x2a},
{0x3684, 0x15},
{0x3700, 0x2a},
{0x3701, 0x12},
{0x3703, 0x28},
{0x3704, 0x0e},
{0x3706, 0x4a},
{0x3709, 0x4a},
{0x370b, 0xa2},
{0x370c, 0x01},
{0x370f, 0x04},
{0x3714, 0x24},
{0x3716, 0x24},
{0x3719, 0x11},
{0x371a, 0x1e},
{0x3720, 0x00},
{0x3724, 0x13},
{0x373f, 0xb0},
{0x3741, 0x4a},
{0x3743, 0x4a},
{0x3745, 0x4a},
{0x3747, 0x4a},
{0x3749, 0xa2},
{0x374b, 0xa2},
{0x374d, 0xa2},
{0x374f, 0xa2},
{0x3755, 0x10},
{0x376c, 0x00},
{0x378d, 0x30},
{0x3790, 0x4a},
{0x3791, 0xa2},
{0x3798, 0x40},
{0x379e, 0x00},
{0x379f, 0x04},
{0x37a1, 0x10},
{0x37a2, 0x1e},
{0x37a8, 0x10},
{0x37a9, 0x1e},
{0x37ac, 0xa0},
{0x37b9, 0x01},
{0x37bd, 0x01},
{0x37bf, 0x26},
{0x37c0, 0x11},
{0x37c2, 0x04},
{0x37cd, 0x19},
{0x37e0, 0x08},
{0x37e6, 0x04},
{0x37e5, 0x02},
{0x37e1, 0x0c},
{0x3737, 0x04},
{0x37d8, 0x02},
{0x37e2, 0x10},
{0x3739, 0x10},
{0x3662, 0x10},
{0x37e4, 0x20},
{0x37e3, 0x08},
{0x37d9, 0x08},
{0x4040, 0x00},
{0x4041, 0x07},
{0x4008, 0x02},
{0x4009, 0x0d},
{0x3800, 0x01},
{0x3801, 0x80},
{0x3802, 0x00},
{0x3803, 0xdc},
{0x3804, 0x09},
{0x3805, 0x0f},
{0x3806, 0x05},
{0x3807, 0x23},
{0x3808, 0x07},
{0x3809, 0x80},
{0x380a, 0x04},
{0x380b, 0x38},
{0x380c, 0x04},
{0x380d, 0x2e},
{0x380e, 0x12},
{0x380f, 0x70},
{0x3811, 0x08},
{0x3813, 0x08},
{0x3814, 0x01},
{0x3815, 0x01},
{0x3816, 0x01},
{0x3817, 0x01},
{0x3820, 0x88},
{0x3821, 0x00},
{0x3880, 0x25},
{0x3882, 0x20},
{0x3c91, 0x0b},
{0x3c94, 0x45},
{0x3cad, 0x00},
{0x3cae, 0x00},
{0x4000, 0xf3},
{0x4001, 0x60},
{0x4003, 0x40},
{0x4300, 0xff},
{0x4302, 0x0f},
{0x4305, 0x83},
{0x4505, 0x84},
{0x4809, 0x1e},
{0x480a, 0x04},
{0x4837, 0x15},
{0x4c00, 0x08},
{0x4c01, 0x08},
{0x4c04, 0x00},
{0x4c05, 0x00},
{0x5000, 0xf9},
{0x3c8c, 0x10},
};

@ -34,8 +34,9 @@ OX03C10::OX03C10() {
init_reg_array.assign(std::begin(init_array_ox03c10), std::end(init_array_ox03c10));
probe_reg_addr = 0x300a;
probe_expected_data = 0x5803;
in_port_info_dt = 0x2c; // one is 0x2a, two are 0x2b
power_config_val_low = 24000000; //Hz
mipi_format = CAM_FORMAT_MIPI_RAW_12;
frame_data_type = 0x2c; // one is 0x2a, two are 0x2b
mclk_frequency = 24000000; //Hz
dc_gain_factor = 7.32;
dc_gain_min_weight = 1; // always on is fine

@ -9,12 +9,14 @@
#include "system/camerad/cameras/camera_common.h"
#include "system/camerad/sensors/ar0231_registers.h"
#include "system/camerad/sensors/ox03c10_registers.h"
#include "system/camerad/sensors/os04c10_registers.h"
#define ANALOG_GAIN_MAX_CNT 55
const size_t FRAME_WIDTH = 1928;
const size_t FRAME_HEIGHT = 1208;
const size_t FRAME_STRIDE = 2896; // for 12 bit output. 1928 * 12 / 8 + 4 (alignment)
class SensorInfo {
public:
SensorInfo() = default;
@ -56,8 +58,10 @@ public:
uint32_t probe_expected_data;
std::vector<i2c_random_wr_payload> start_reg_array;
std::vector<i2c_random_wr_payload> init_reg_array;
uint32_t in_port_info_dt;
uint32_t power_config_val_low;
uint32_t mipi_format;
uint32_t mclk_frequency;
uint32_t frame_data_type;
};
class AR0231 : public SensorInfo {
@ -79,3 +83,11 @@ public:
float getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const override;
int getSlaveAddress(int port) const override;
};
class OS04C10 : public SensorInfo {
public:
OS04C10();
std::vector<i2c_random_wr_payload> getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const override;
float getExposureScore(float desired_ev, int exp_t, int exp_g_idx, float exp_gain, int gain_idx) const override;
int getSlaveAddress(int port) const override;
};

@ -1,26 +1,19 @@
#!/usr/bin/env python3
import argparse
import os
from tqdm import tqdm
from openpilot.tools.lib.logreader import LogReader
from openpilot.tools.lib.route import Route
import argparse
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("route", help="The route name")
parser.add_argument("segment", type=int, help="The index of the segment")
args = parser.parse_args()
out_path = os.path.join("jpegs", f"{args.route.replace('|', '_')}_{args.segment}")
out_path = os.path.join("jpegs", f"{args.route.replace('|', '_').replace('/', '_')}")
os.makedirs(out_path, exist_ok=True)
r = Route(args.route)
path = r.log_paths()[args.segment] or r.qlog_paths()[args.segment]
lr = list(LogReader(path))
lr = LogReader(args.route)
for msg in tqdm(lr):
if msg.which() == 'thumbnail':

@ -23,6 +23,9 @@ class HardwareBase(ABC):
except Exception:
return default
def booted(self) -> bool:
return True
@abstractmethod
def reboot(self, reason=None):
pass

@ -74,6 +74,11 @@ def sudo_write(val, path):
# fallback for debugfs files
os.system(f"sudo su -c 'echo {val} > {path}'")
def sudo_read(path: str) -> str:
try:
return subprocess.check_output(f"sudo cat {path}", shell=True, encoding='utf8')
except Exception:
return ""
def affine_irq(val, action):
irqs = get_irqs_for_action(action)
@ -554,6 +559,12 @@ class Tici(HardwareBase):
time.sleep(0.5)
gpio_set(GPIO.STM_BOOT0, 0)
def booted(self):
# this normally boots within 8s, but on rare occasions takes 30+s
encoder_state = sudo_read("/sys/kernel/debug/msm_vidc/core0/info")
if "Core state: 0" in encoder_state and (time.monotonic() < 60*2):
return False
return True
if __name__ == "__main__":
t = Tici()

@ -139,7 +139,7 @@ QWidget *VideoWidget::createCameraWidget() {
QStackedLayout *stacked = new QStackedLayout();
stacked->setStackingMode(QStackedLayout::StackAll);
stacked->addWidget(cam_widget = new CameraView("camerad", VISION_STREAM_ROAD, false));
stacked->addWidget(cam_widget = new CameraWidget("camerad", VISION_STREAM_ROAD, false));
cam_widget->setMinimumHeight(MIN_VIDEO_HEIGHT);
cam_widget->setSizePolicy(QSizePolicy::Preferred, QSizePolicy::MinimumExpanding);
stacked->addWidget(alert_label = new InfoLabel(this));
@ -160,13 +160,12 @@ QWidget *VideoWidget::createCameraWidget() {
return w;
}
void VideoWidget::vipcAvailableStreamsUpdated() {
void VideoWidget::vipcAvailableStreamsUpdated(std::set<VisionStreamType> streams) {
static const QString stream_names[] = {
[VISION_STREAM_ROAD] = "Road camera",
[VISION_STREAM_WIDE_ROAD] = "Wide road camera",
[VISION_STREAM_DRIVER] = "Driver camera"};
const auto &streams = cam_widget->availableStreams();
for (int i = 0; i < streams.size(); ++i) {
if (camera_tab->count() <= i) {
camera_tab->addTab(QString());

@ -73,9 +73,9 @@ protected:
QWidget *createCameraWidget();
QHBoxLayout *createPlaybackController();
void loopPlaybackClicked();
void vipcAvailableStreamsUpdated();
void vipcAvailableStreamsUpdated(std::set<VisionStreamType> streams);
CameraView *cam_widget;
CameraWidget *cam_widget;
double maximum_time = 0;
QToolButton *time_btn = nullptr;
ToolButton *seek_backward_btn = nullptr;

@ -0,0 +1,74 @@
# tools/car_porting
Check out [this blog post](https://blog.comma.ai/how-to-write-a-car-port-for-openpilot/) for a high-level overview of porting a car.
## Useful car porting utilities
Testing car ports in your car is very time-consuming. Check out these utilities to do basic checks on your work before running it in your car.
### [Cabana](/tools/cabana/README.md)
View your car's CAN signals through DBC files, which openpilot uses to parse and create messages that talk to the car.
Example:
```bash
> tools/cabana/cabana '1bbe6bf2d62f58a8|2022-07-14--17-11-43'
```
### [tools/car_porting/auto_fingerprint.py](/tools/car_porting/auto_fingerprint.py)
Given a route and platform, automatically inserts FW fingerprints from the platform into the correct place in fingerprints.py
Example:
```bash
> python tools/car_porting/auto_fingerprint.py '1bbe6bf2d62f58a8|2022-07-14--17-11-43' 'SUBARU OUTBACK 6TH GEN'
Attempting to add fw version for: SUBARU OUTBACK 6TH GEN
```
### [selfdrive/car/tests/test_car_interfaces.py](/selfdrive/car/tests/test_car_interfaces.py)
Finds common bugs for car interfaces, without even requiring a route.
#### Example: Typo in signal name
```bash
> pytest selfdrive/car/tests/test_car_interfaces.py -k subaru # replace with the brand you are working on
=====================================================================
FAILED selfdrive/car/tests/test_car_interfaces.py::TestCarInterfaces::test_car_interfaces_165_SUBARU_LEGACY_7TH_GEN - KeyError: 'CruiseControlOOPS'
```
### [tools/car_porting/test_car_model.py](/tools/car_porting/test_car_model.py)
Given a route, runs most of the car interface to check for common errors like missing signals, blocked panda messages, and safety mismatches.
#### Example: panda safety mismatch for gasPressed
```bash
> python tools/car_porting/test_car_model.py '4822a427b188122a|2023-08-14--16-22-21'
=====================================================================
FAIL: test_panda_safety_carstate (__main__.CarModelTestCase.test_panda_safety_carstate)
Assert that panda safety matches openpilot's carState
----------------------------------------------------------------------
Traceback (most recent call last):
File "/home/batman/xx/openpilot/openpilot/selfdrive/car/tests/test_models.py", line 380, in test_panda_safety_carstate
self.assertFalse(len(failed_checks), f"panda safety doesn't agree with openpilot: {failed_checks}")
AssertionError: 1 is not false : panda safety doesn't agree with openpilot: {'gasPressed': 116}
```
### [tools/car_porting/examples/subaru_steer_temp_fault.ipynb](/tools/car_porting/examples/subaru_steer_temp_fault.ipynb)
An example of searching through a database of segments for a specific condition, and plotting the results.
![steer warning example](https://github.com/commaai/openpilot/assets/9648890/d60ad120-4b44-4974-ac79-adc660fb8fe2)
*a plot of the steer_warning vs steering angle, where we can see it is clearly caused by a large steering angle change*
### [tools/car_porting/examples/subaru_long_accel.ipynb](/tools/car_porting/examples/subaru_long_accel.ipynb)
An example of plotting the response of an actuator when it is active.
![brake pressure example](https://github.com/commaai/openpilot/assets/9648890/8f32cf1d-8fc0-4407-b540-70625ebbf082)
*a plot of the brake_pressure vs acceleration, where we can see it is a fairly linear response.*

@ -5,10 +5,9 @@ from collections import defaultdict
from typing import Optional
from openpilot.selfdrive.debug.format_fingerprints import format_brand_fw_versions
from openpilot.tools.lib.logreader import MultiLogIterator
from openpilot.tools.lib.route import Route
from openpilot.selfdrive.car.fw_versions import match_fw_to_car
from openpilot.selfdrive.car.interfaces import get_interface_attr
from openpilot.tools.lib.logreader import LogReader, ReadMode
ALL_FW_VERSIONS = get_interface_attr("FW_VERSIONS")
@ -25,8 +24,7 @@ if __name__ == "__main__":
parser.add_argument("platform", help="The platform, or leave empty to auto-determine using fuzzy", default=None, nargs='?')
args = parser.parse_args()
route = Route(args.route)
lr = MultiLogIterator(route.qlog_paths())
lr = LogReader(args.route, ReadMode.QLOG)
carFw = None
carVin = None

@ -0,0 +1,120 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"segments = [\n",
" \"d9df6f87e8feff94|2023-03-28--17-41-10/1:12\"\n",
"]\n",
"platform = \"SUBARU OUTBACK 6TH GEN\"\n"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import copy\n",
"import numpy as np\n",
"\n",
"from opendbc.can.parser import CANParser\n",
"\n",
"from openpilot.selfdrive.car.subaru.values import DBC\n",
"from openpilot.tools.lib.logreader import LogReader\n",
"\n",
"\"\"\"\n",
"In this example, we plot the relationship between Cruise_Brake and Acceleration for stock eyesight.\n",
"\"\"\"\n",
"\n",
"for segment in segments:\n",
" lr = LogReader(segment)\n",
"\n",
" messages = [\n",
" (\"ES_Distance\", 20),\n",
" (\"ES_Brake\", 20),\n",
" (\"ES_Status\", 20),\n",
" ]\n",
"\n",
" cp = CANParser(DBC[platform][\"pt\"], messages, 1)\n",
"\n",
" es_distance_history = []\n",
" es_status_history = []\n",
" es_brake_history = []\n",
" acceleration_history = []\n",
"\n",
" last_acc = 0\n",
"\n",
" for msg in lr:\n",
" if msg.which() == \"can\":\n",
" cp.update_strings([msg.as_builder().to_bytes()])\n",
" es_distance_history.append(copy.copy(cp.vl[\"ES_Distance\"]))\n",
" es_brake_history.append(copy.copy(cp.vl[\"ES_Brake\"]))\n",
" es_status_history.append(copy.copy(cp.vl[\"ES_Status\"]))\n",
"\n",
" acceleration_history.append(last_acc)\n",
" \n",
" if msg.which() == \"carState\":\n",
" last_acc = msg.carState.aEgo"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"def process(history, func):\n",
" return np.array([func(h) for h in history])\n",
"\n",
"cruise_activated = process(es_status_history, lambda es_status: es_status[\"Cruise_Activated\"])\n",
"cruise_throttle = process(es_distance_history, lambda es_distance: es_distance[\"Cruise_Throttle\"])\n",
"cruise_rpm = process(es_status_history, lambda es_status: es_status[\"Cruise_RPM\"])\n",
"cruise_brake = process(es_brake_history, lambda es_brake: es_brake[\"Brake_Pressure\"])\n",
"acceleration = process(acceleration_history, lambda acc: acc)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import matplotlib.pyplot as plt\n",
"\n",
"valid_brake = (cruise_activated==1) & (cruise_brake>0) # only when cruise is activated and eyesight is braking\n",
"\n",
"ax = plt.figure().add_subplot()\n",
"\n",
"ax.set_title(\"Brake_Pressure vs Acceleration\")\n",
"ax.set_xlabel(\"Brake_Pessure\")\n",
"ax.set_ylabel(\"Acceleration\")\n",
"ax.scatter(cruise_brake[valid_brake], -acceleration[valid_brake])"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

@ -0,0 +1,110 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"# An example of searching through a database of segments for a specific condition, and plotting the results.\n",
"\n",
"segments = [\n",
" \"c3d1ccb52f5f9d65|2023-07-22--01-23-20/6:10\",\n",
"]\n",
"platform = \"SUBARU OUTBACK 6TH GEN\""
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"import copy\n",
"import matplotlib.pyplot as plt\n",
"import numpy as np\n",
"\n",
"from opendbc.can.parser import CANParser\n",
"\n",
"from openpilot.selfdrive.car.subaru.values import CanBus, DBC\n",
"from openpilot.tools.lib.logreader import LogReader\n",
"\n",
"\"\"\"\n",
"In this example, we search for positive transitions of Steer_Warning, which indicate that the EPS\n",
"has stopped responding to our messages. This analysis would allow you to find the cause of these\n",
"steer warnings and potentially work around them.\n",
"\"\"\"\n",
"\n",
"for segment in segments:\n",
" lr = LogReader(segment)\n",
"\n",
" can_msgs = [msg for msg in lr if msg.which() == \"can\"]\n",
"\n",
" messages = [\n",
" (\"Steering_Torque\", 50)\n",
" ]\n",
"\n",
" cp = CANParser(DBC[platform][\"pt\"], messages, CanBus.main)\n",
"\n",
" steering_torque_history = []\n",
" examples = []\n",
"\n",
" for msg in can_msgs:\n",
" cp.update_strings([msg.as_builder().to_bytes()])\n",
" steering_torque_history.append(copy.copy(cp.vl[\"Steering_Torque\"]))\n",
" \n",
" steer_warning_last = False\n",
" for i, steering_torque_msg in enumerate(steering_torque_history):\n",
" steer_warning = steering_torque_msg[\"Steer_Warning\"]\n",
"\n",
" steer_angle = steering_torque_msg[\"Steering_Angle\"]\n",
"\n",
" if steer_warning and not steer_warning_last: # positive transition of \"Steer_Warning\"\n",
" examples.append(i)\n",
"\n",
" steer_warning_last = steer_warning\n",
"\n",
" FRAME_DELTA = 100 # plot this many frames around the positive transition\n",
"\n",
" for example in examples:\n",
" fig, axs = plt.subplots(2)\n",
"\n",
" min_frame = int(example-FRAME_DELTA/2)\n",
" max_frame = int(example+FRAME_DELTA/2)\n",
"\n",
" steering_angle_history = [msg[\"Steering_Angle\"] for msg in steering_torque_history[min_frame:max_frame]]\n",
" steering_warning_history = [msg[\"Steer_Warning\"] for msg in steering_torque_history[min_frame:max_frame]]\n",
"\n",
" xs = np.arange(-FRAME_DELTA/2, FRAME_DELTA/2)\n",
"\n",
" axs[0].plot(xs, steering_angle_history)\n",
" axs[0].set_ylabel(\"Steering Angle (deg)\")\n",
" axs[1].plot(xs, steering_warning_history)\n",
" axs[1].set_ylabel(\"Steer Warning\")\n",
"\n",
" plt.show()\n"
]
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 3",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.4"
}
},
"nbformat": 4,
"nbformat_minor": 2
}

@ -8,7 +8,7 @@ import sys
from bisect import bisect_left, bisect_right
from collections import defaultdict
from openpilot.tools.lib.logreader import logreader_from_route_or_segment
from openpilot.tools.lib.logreader import LogReader
DEMO_ROUTE = "9f583b1d93915c31|2022-05-18--10-49-51--0"
@ -236,7 +236,7 @@ if __name__ == "__main__":
args = parser.parse_args()
r = DEMO_ROUTE if args.demo else args.route_or_segment_name.strip()
lr = logreader_from_route_or_segment(r, sort_by_time=True)
lr = LogReader(r, sort_by_time=True)
data, _ = get_timestamps(lr)
print_timestamps(data['timestamp'], data['duration'], data['start'], args.relative)

@ -32,20 +32,22 @@ for msg in lr:
print(msg.carState.steeringAngleDeg)
```
### MultiLogIterator
### Segment Ranges
`MultiLogIterator` is similar to `LogReader`, but reads multiple logs.
We also support a new format called a "segment range", where you can specify which segments from a route to load.
```python
from openpilot.tools.lib.route import Route
from openpilot.tools.lib.logreader import MultiLogIterator
# setup a MultiLogIterator to read all the logs in the route
r = Route("a2a0ccea32023010|2023-07-27--13-01-19")
lr = MultiLogIterator(r.log_paths())
lr = LogReader("a2a0ccea32023010|2023-07-27--13-01-19/4") # 4th segment
lr = LogReader("a2a0ccea32023010|2023-07-27--13-01-19/4:6") # 4th and 5th segment
lr = LogReader("a2a0ccea32023010|2023-07-27--13-01-19/-1") # last segment
lr = LogReader("a2a0ccea32023010|2023-07-27--13-01-19/:5") # first 5 segments
lr = LogReader("a2a0ccea32023010|2023-07-27--13-01-19/1:") # all except first segment
```
# print all the steering angles values from all the logs in the route
for msg in lr:
if msg.which() == "carState":
print(msg.carState.steeringAngleDeg)
and can select which type of logs to grab
```python
lr = LogReader("a2a0ccea32023010|2023-07-27--13-01-19/4/q") # get qlogs
lr = LogReader("a2a0ccea32023010|2023-07-27--13-01-19/4/r") # get rlogs (default)
```

@ -7,8 +7,11 @@ TIME_FMT = "%Y-%m-%d--%H-%M-%S"
class RE:
DONGLE_ID = r'(?P<dongle_id>[a-z0-9]{16})'
TIMESTAMP = r'(?P<timestamp>[0-9]{4}-[0-9]{2}-[0-9]{2}--[0-9]{2}-[0-9]{2}-[0-9]{2})'
ROUTE_NAME = r'{}[|_/]{}'.format(DONGLE_ID, TIMESTAMP)
ROUTE_NAME = r'(?P<route_name>{}[|_/]{})'.format(DONGLE_ID, TIMESTAMP)
SEGMENT_NAME = r'{}(?:--|/)(?P<segment_num>[0-9]+)'.format(ROUTE_NAME)
INDEX = r'-?[0-9]+'
SLICE = r'(?P<start>{})?:?(?P<end>{})?:?(?P<step>{})?'.format(INDEX, INDEX, INDEX)
SEGMENT_RANGE = r'{}(?:--|/)?(?P<slice>({}))?/?(?P<selector>([qr]))?'.format(ROUTE_NAME, SLICE)
BOOTLOG_NAME = ROUTE_NAME
EXPLORER_FILE = r'^(?P<segment_name>{})--(?P<file_name>[a-z]+\.[a-z0-9]+)$'.format(SEGMENT_NAME)

@ -1,82 +1,29 @@
#!/usr/bin/env python3
import bz2
import capnp
import enum
import itertools
import numpy as np
import os
import pathlib
import re
import sys
import bz2
import urllib.parse
import capnp
import warnings
from typing import Iterable, Iterator
from urllib.parse import parse_qs, urlparse
from cereal import log as capnp_log
from openpilot.selfdrive.test.openpilotci import get_url
from openpilot.tools.lib.filereader import FileReader
from openpilot.tools.lib.route import Route, SegmentName
from openpilot.tools.lib.helpers import RE
from openpilot.tools.lib.route import Route, SegmentRange
LogIterable = Iterable[capnp._DynamicStructReader]
# this is an iterator itself, and uses private variables from LogReader
class MultiLogIterator:
def __init__(self, log_paths, sort_by_time=False):
self._log_paths = log_paths
self.sort_by_time = sort_by_time
self._first_log_idx = next(i for i in range(len(log_paths)) if log_paths[i] is not None)
self._current_log = self._first_log_idx
self._idx = 0
self._log_readers = [None]*len(log_paths)
self.start_time = self._log_reader(self._first_log_idx)._ts[0]
def _log_reader(self, i):
if self._log_readers[i] is None and self._log_paths[i] is not None:
log_path = self._log_paths[i]
self._log_readers[i] = LogReader(log_path, sort_by_time=self.sort_by_time)
return self._log_readers[i]
def __iter__(self) -> Iterator[capnp._DynamicStructReader]:
return self
def _inc(self):
lr = self._log_reader(self._current_log)
if self._idx < len(lr._ents)-1:
self._idx += 1
else:
self._idx = 0
self._current_log = next(i for i in range(self._current_log + 1, len(self._log_readers) + 1)
if i == len(self._log_readers) or self._log_paths[i] is not None)
if self._current_log == len(self._log_readers):
raise StopIteration
def __next__(self):
while 1:
lr = self._log_reader(self._current_log)
ret = lr._ents[self._idx]
self._inc()
return ret
def tell(self):
# returns seconds from start of log
return (self._log_reader(self._current_log)._ts[self._idx] - self.start_time) * 1e-9
def seek(self, ts):
# seek to nearest minute
minute = int(ts/60)
if minute >= len(self._log_paths) or self._log_paths[minute] is None:
return False
self._current_log = minute
# HACK: O(n) seek afterward
self._idx = 0
while self.tell() < ts:
self._inc()
return True
def reset(self):
self.__init__(self._log_paths, sort_by_time=self.sort_by_time)
class LogReader:
class _LogFileReader:
def __init__(self, fn, canonicalize=True, only_union_types=False, sort_by_time=False, dat=None):
self.data_version = None
self._only_union_types = only_union_types
@ -106,10 +53,6 @@ class LogReader:
self._ents = list(sorted(_ents, key=lambda x: x.logMonoTime) if sort_by_time else _ents)
self._ts = [x.logMonoTime for x in self._ents]
@classmethod
def from_bytes(cls, dat):
return cls("", dat=dat)
def __iter__(self) -> Iterator[capnp._DynamicStructReader]:
for ent in self._ents:
if self._only_union_types:
@ -121,13 +64,139 @@ class LogReader:
else:
yield ent
def logreader_from_route_or_segment(r, sort_by_time=False):
sn = SegmentName(r, allow_route_name=True)
route = Route(sn.route_name.canonical_name)
if sn.segment_num < 0:
return MultiLogIterator(route.log_paths(), sort_by_time=sort_by_time)
else:
return LogReader(route.log_paths()[sn.segment_num], sort_by_time=sort_by_time)
class ReadMode(enum.StrEnum):
RLOG = "r" # only read rlogs
QLOG = "q" # only read qlogs
#AUTO = "a" # default to rlogs, fallback to qlogs, not supported yet
def create_slice_from_string(s: str):
m = re.fullmatch(RE.SLICE, s)
assert m is not None, f"Invalid slice: {s}"
start, end, step = m.groups()
start = int(start) if start is not None else None
end = int(end) if end is not None else None
step = int(step) if step is not None else None
if start is not None and ":" not in s and end is None and step is None:
return start
return slice(start, end, step)
def parse_slice(sr: SegmentRange):
route = Route(sr.route_name)
segs = np.arange(route.max_seg_number+1)
s = create_slice_from_string(sr._slice)
return segs[s] if isinstance(s, slice) else [segs[s]]
def comma_api_source(sr: SegmentRange, mode=ReadMode.RLOG, sort_by_time=False):
segs = parse_slice(sr)
route = Route(sr.route_name)
log_paths = route.log_paths() if mode == ReadMode.RLOG else route.qlog_paths()
invalid_segs = [seg for seg in segs if log_paths[seg] is None]
assert not len(invalid_segs), f"Some of the requested segments are not available: {invalid_segs}"
for seg in segs:
yield _LogFileReader(log_paths[seg], sort_by_time=sort_by_time)
def internal_source(sr: SegmentRange, mode=ReadMode.RLOG, sort_by_time=False):
segs = parse_slice(sr)
for seg in segs:
yield _LogFileReader(f"cd:/{sr.dongle_id}/{sr.timestamp}/{seg}/{'rlog' if mode == ReadMode.RLOG else 'qlog'}.bz2", sort_by_time=sort_by_time)
def openpilotci_source(sr: SegmentRange, mode=ReadMode.RLOG, sort_by_time=False):
segs = parse_slice(sr)
for seg in segs:
yield _LogFileReader(get_url(sr.route_name, seg, 'rlog' if mode == ReadMode.RLOG else 'qlog'), sort_by_time=sort_by_time)
def direct_source(file_or_url, sort_by_time):
yield _LogFileReader(file_or_url, sort_by_time=sort_by_time)
def auto_source(*args, **kwargs):
# Automatically determine viable source
try:
next(internal_source(*args, **kwargs))
return internal_source(*args, **kwargs)
except Exception:
pass
try:
next(openpilotci_source(*args, **kwargs))
return openpilotci_source(*args, **kwargs)
except Exception:
pass
return comma_api_source(*args, **kwargs)
def parse_useradmin(identifier):
if "useradmin.comma.ai" in identifier:
query = parse_qs(urlparse(identifier).query)
return query["onebox"][0]
return None
def parse_cabana(identifier):
if "cabana.comma.ai" in identifier:
query = parse_qs(urlparse(identifier).query)
return query["route"][0]
return None
def parse_direct(identifier):
if identifier.startswith(("http://", "https://", "cd:/")) or pathlib.Path(identifier).exists():
return identifier
return None
def parse_indirect(identifier):
parsed = parse_useradmin(identifier) or parse_cabana(identifier)
if parsed is not None:
return parsed, comma_api_source, True
return identifier, None, False
class LogReader:
def _logreaders_from_identifier(self, identifier):
parsed, source, is_indirect = parse_indirect(identifier)
if not is_indirect:
direct_parsed = parse_direct(identifier)
if direct_parsed is not None:
return direct_source(identifier, sort_by_time=self.sort_by_time)
sr = SegmentRange(parsed)
mode = self.default_mode if sr.selector is None else ReadMode(sr.selector)
source = self.default_source if source is None else source
return source(sr, mode, sort_by_time=self.sort_by_time)
def __init__(self, identifier: str, default_mode=ReadMode.RLOG, default_source=auto_source, sort_by_time=False):
self.default_mode = default_mode
self.default_source = default_source
self.sort_by_time = sort_by_time
self.identifier = identifier
self.reset()
def __iter__(self):
self.reset()
return self
def __next__(self):
return next(self.chain)
def reset(self):
self.lrs = self._logreaders_from_identifier(self.identifier)
self.chain = itertools.chain(*self.lrs)
@staticmethod
def from_bytes(dat):
return _LogFileReader("", dat=dat)
if __name__ == "__main__":

@ -229,3 +229,29 @@ class SegmentName:
def data_dir(self) -> Optional[str]: return self._data_dir
def __str__(self) -> str: return self._canonical_name
class SegmentRange:
def __init__(self, segment_range: str):
self.m = re.fullmatch(RE.SEGMENT_RANGE, segment_range)
assert self.m, f"Segment range is not valid {segment_range}"
@property
def route_name(self):
return self.m.group("route_name")
@property
def dongle_id(self):
return self.m.group("dongle_id")
@property
def timestamp(self):
return self.m.group("timestamp")
@property
def _slice(self):
return self.m.group("slice")
@property
def selector(self):
return self.m.group("selector")

@ -0,0 +1,86 @@
import shutil
import tempfile
import numpy as np
import unittest
from parameterized import parameterized
import requests
from openpilot.tools.lib.logreader import LogReader, parse_indirect, parse_slice, ReadMode
from openpilot.tools.lib.route import SegmentRange
NUM_SEGS = 17 # number of segments in the test route
ALL_SEGS = list(np.arange(NUM_SEGS))
TEST_ROUTE = "344c5c15b34f2d8a/2024-01-03--09-37-12"
QLOG_FILE = "https://commadataci.blob.core.windows.net/openpilotci/0375fdf7b1ce594d/2019-06-13--08-32-25/3/qlog.bz2"
class TestLogReader(unittest.TestCase):
@parameterized.expand([
(f"{TEST_ROUTE}", ALL_SEGS),
(f"{TEST_ROUTE.replace('/', '|')}", ALL_SEGS),
(f"{TEST_ROUTE}--0", [0]),
(f"{TEST_ROUTE}--5", [5]),
(f"{TEST_ROUTE}/0", [0]),
(f"{TEST_ROUTE}/5", [5]),
(f"{TEST_ROUTE}/0:10", ALL_SEGS[0:10]),
(f"{TEST_ROUTE}/0:0", []),
(f"{TEST_ROUTE}/4:6", ALL_SEGS[4:6]),
(f"{TEST_ROUTE}/0:-1", ALL_SEGS[0:-1]),
(f"{TEST_ROUTE}/:5", ALL_SEGS[:5]),
(f"{TEST_ROUTE}/2:", ALL_SEGS[2:]),
(f"{TEST_ROUTE}/2:-1", ALL_SEGS[2:-1]),
(f"{TEST_ROUTE}/-1", [ALL_SEGS[-1]]),
(f"{TEST_ROUTE}/-2", [ALL_SEGS[-2]]),
(f"{TEST_ROUTE}/-2:-1", ALL_SEGS[-2:-1]),
(f"{TEST_ROUTE}/-4:-2", ALL_SEGS[-4:-2]),
(f"{TEST_ROUTE}/:10:2", ALL_SEGS[:10:2]),
(f"{TEST_ROUTE}/5::2", ALL_SEGS[5::2]),
(f"https://useradmin.comma.ai/?onebox={TEST_ROUTE}", ALL_SEGS),
(f"https://useradmin.comma.ai/?onebox={TEST_ROUTE.replace('/', '|')}", ALL_SEGS),
(f"https://useradmin.comma.ai/?onebox={TEST_ROUTE.replace('/', '%7C')}", ALL_SEGS),
(f"https://cabana.comma.ai/?route={TEST_ROUTE}", ALL_SEGS),
])
def test_indirect_parsing(self, identifier, expected):
parsed, _, _ = parse_indirect(identifier)
sr = SegmentRange(parsed)
segs = parse_slice(sr)
self.assertListEqual(list(segs), expected)
def test_direct_parsing(self):
qlog = tempfile.NamedTemporaryFile(mode='wb', delete=False)
with requests.get(QLOG_FILE, stream=True) as r:
with qlog as f:
shutil.copyfileobj(r.raw, f)
for f in [QLOG_FILE, qlog.name]:
l = len(list(LogReader(f)))
self.assertGreater(l, 100)
@parameterized.expand([
(f"{TEST_ROUTE}///",),
(f"{TEST_ROUTE}---",),
(f"{TEST_ROUTE}/-4:--2",),
(f"{TEST_ROUTE}/-a",),
(f"{TEST_ROUTE}/j",),
(f"{TEST_ROUTE}/0:1:2:3",),
(f"{TEST_ROUTE}/:::3",),
])
def test_bad_ranges(self, segment_range):
with self.assertRaises(AssertionError):
sr = SegmentRange(segment_range)
parse_slice(sr)
def test_modes(self):
qlog_len = len(list(LogReader(f"{TEST_ROUTE}/0", ReadMode.QLOG)))
rlog_len = len(list(LogReader(f"{TEST_ROUTE}/0", ReadMode.RLOG)))
self.assertLess(qlog_len * 6, rlog_len)
def test_modes_from_name(self):
qlog_len = len(list(LogReader(f"{TEST_ROUTE}/0/q")))
rlog_len = len(list(LogReader(f"{TEST_ROUTE}/0/r")))
self.assertLess(qlog_len * 6, rlog_len)
if __name__ == "__main__":
unittest.main()

@ -1,7 +1,7 @@
#!/usr/bin/env python3
import multiprocessing
import os
import sys
import multiprocessing
import platform
import shutil
import subprocess
@ -9,13 +9,12 @@ import tarfile
import tempfile
import requests
import argparse
from functools import partial
from openpilot.common.basedir import BASEDIR
from openpilot.selfdrive.test.openpilotci import get_url
from openpilot.tools.lib.logreader import LogReader
from openpilot.tools.lib.route import Route, SegmentName
from openpilot.tools.lib.helpers import save_log
from urllib.parse import urlparse, parse_qs
from openpilot.tools.lib.logreader import LogReader
juggle_dir = os.path.dirname(os.path.realpath(__file__))
@ -53,17 +52,6 @@ def get_plotjuggler_version():
return tuple(map(int, version.split(".")))
def load_segment(segment_name):
if segment_name is None:
return []
try:
return list(LogReader(segment_name))
except (AssertionError, ValueError) as e:
print(f"Error parsing {segment_name}: {e}")
return []
def start_juggler(fn=None, dbc=None, layout=None, route_or_segment_name=None):
env = os.environ.copy()
env["BASEDIR"] = BASEDIR
@ -82,48 +70,16 @@ def start_juggler(fn=None, dbc=None, layout=None, route_or_segment_name=None):
cmd = f'{PLOTJUGGLER_BIN} --buffer_size {MAX_STREAMING_BUFFER_SIZE} --plugin_folders {INSTALL_DIR}{extra_args}'
subprocess.call(cmd, shell=True, env=env, cwd=juggle_dir)
def process(can, lr):
return [d for d in lr if can or d.which() not in ['can', 'sendcan']]
def juggle_route(route_or_segment_name, segment_count, qlog, can, layout, dbc=None, ci=False):
segment_start = 0
if 'cabana' in route_or_segment_name:
query = parse_qs(urlparse(route_or_segment_name).query)
route_or_segment_name = query["route"][0]
def juggle_route(route_or_segment_name, can, layout, dbc=None):
sr = LogReader(route_or_segment_name)
if route_or_segment_name.startswith(("http://", "https://", "cd:/")) or os.path.isfile(route_or_segment_name):
logs = [route_or_segment_name]
elif ci:
route_or_segment_name = SegmentName(route_or_segment_name, allow_route_name=True)
route = route_or_segment_name.route_name.canonical_name
segment_start = max(route_or_segment_name.segment_num, 0)
logs = [get_url(route, i) for i in range(100)] # Assume there not more than 100 segments
else:
route_or_segment_name = SegmentName(route_or_segment_name, allow_route_name=True)
segment_start = max(route_or_segment_name.segment_num, 0)
if route_or_segment_name.segment_num != -1 and segment_count is None:
segment_count = 1
r = Route(route_or_segment_name.route_name.canonical_name, route_or_segment_name.data_dir)
logs = r.qlog_paths() if qlog else r.log_paths()
segment_end = segment_start + segment_count if segment_count else None
logs = logs[segment_start:segment_end]
if None in logs:
resp = input(f"{logs.count(None)}/{len(logs)} of the rlogs in this segment are missing, would you like to fall back to the qlogs? (y/n) ")
if resp == 'y':
logs = r.qlog_paths()[segment_start:segment_end]
else:
print("Please try a different route or segment")
return
all_data = []
with multiprocessing.Pool(24) as pool:
for d in pool.map(load_segment, logs):
all_data += d
if not can:
all_data = [d for d in all_data if d.which() not in ['can', 'sendcan']]
all_data = []
for p in pool.map(partial(process, can), sr.lrs):
all_data.extend(p)
# Infer DBC name from logs
if dbc is None:
@ -146,15 +102,12 @@ if __name__ == "__main__":
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("--demo", action="store_true", help="Use the demo route instead of providing one")
parser.add_argument("--qlog", action="store_true", help="Use qlogs")
parser.add_argument("--ci", action="store_true", help="Download data from openpilot CI bucket")
parser.add_argument("--can", action="store_true", help="Parse CAN data")
parser.add_argument("--stream", action="store_true", help="Start PlotJuggler in streaming mode")
parser.add_argument("--layout", nargs='?', help="Run PlotJuggler with a pre-defined layout")
parser.add_argument("--install", action="store_true", help="Install or update PlotJuggler + plugins")
parser.add_argument("--dbc", help="Set the DBC name to load for parsing CAN data. If not set, the DBC will be automatically inferred from the logs.")
parser.add_argument("route_or_segment_name", nargs='?', help="The route or segment name to plot (cabana share URL accepted)")
parser.add_argument("segment_count", type=int, nargs='?', help="The number of segments to plot")
if len(sys.argv) == 1:
parser.print_help()
@ -177,4 +130,4 @@ if __name__ == "__main__":
start_juggler(layout=args.layout)
else:
route_or_segment_name = DEMO_ROUTE if args.demo else args.route_or_segment_name.strip()
juggle_route(route_or_segment_name, args.segment_count, args.qlog, args.can, args.layout, args.dbc, args.ci)
juggle_route(route_or_segment_name, args.can, args.layout, args.dbc)

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save