Merge remote-tracking branch 'origin/master' into ford-platform-codes

pull/31124/head
Cameron Clough 1 year ago
commit 7a3980edd5
  1. 2
      cereal
  2. 2
      docs/CARS.md
  3. 2
      release/check-submodules.sh
  4. 3
      selfdrive/car/ford/fingerprints.py
  5. 2
      selfdrive/car/ford/values.py
  6. 1
      selfdrive/car/honda/fingerprints.py
  7. 1
      selfdrive/car/subaru/fingerprints.py
  8. 21
      selfdrive/car/subaru/values.py
  9. 4
      selfdrive/car/tests/test_fw_fingerprint.py
  10. 6
      selfdrive/car/toyota/carcontroller.py
  11. 1
      selfdrive/car/toyota/carstate.py
  12. 4
      selfdrive/car/toyota/fingerprints.py
  13. 1
      selfdrive/controls/controlsd.py
  14. 4
      selfdrive/locationd/locationd.cc
  15. 1
      selfdrive/locationd/test/test_locationd.py
  16. 3
      selfdrive/locationd/test/test_locationd_scenarios.py
  17. 9
      selfdrive/test/helpers.py
  18. 17
      selfdrive/test/process_replay/migration.py
  19. 2
      selfdrive/test/process_replay/ref_commit
  20. 115
      selfdrive/updated/common.py
  21. 236
      selfdrive/updated/git.py
  22. 99
      selfdrive/updated/tests/test_base.py
  23. 22
      selfdrive/updated/tests/test_git.py
  24. 300
      selfdrive/updated/updated.py
  25. 8
      system/camerad/cameras/camera_common.cc
  26. 175
      system/camerad/cameras/real_debayer.cl
  27. 8
      system/camerad/sensors/ar0231.cc
  28. 53
      system/camerad/sensors/os04c10.cc
  29. 155
      system/camerad/sensors/os04c10_registers.h
  30. 6
      system/camerad/sensors/ox03c10.cc
  31. 4
      system/camerad/sensors/sensor.h
  32. 5
      system/hardware/tici/hardware.py
  33. 2
      system/qcomgpsd/cgpsd.py
  34. 4
      system/qcomgpsd/qcomgpsd.py
  35. 1
      system/ubloxd/ublox_msg.cc
  36. 1
      tools/sim/scenarios/metadrive/stay_in_lane.py

@ -1 +1 @@
Subproject commit c5c2a60f1aa796e7de464015349db3c336b79220 Subproject commit cf7bb3e74974879abef94286fab4d39398fe402b

@ -35,7 +35,7 @@ A supported vehicle is one that just works when you install a comma device. All
|Chrysler|Pacifica Hybrid 2019-23|Adaptive Cruise Control (ACC)|Stock|0 mph|39 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 FCA connector<br>- 1 RJ45 cable (7 ft)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Chrysler&model=Pacifica Hybrid 2019-23">Buy Here</a></sub></details>|| |Chrysler|Pacifica Hybrid 2019-23|Adaptive Cruise Control (ACC)|Stock|0 mph|39 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 FCA connector<br>- 1 RJ45 cable (7 ft)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Chrysler&model=Pacifica Hybrid 2019-23">Buy Here</a></sub></details>||
|comma|body|All|openpilot|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|None|| |comma|body|All|openpilot|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|None||
|Dodge|Durango 2020-21|Adaptive Cruise Control (ACC)|Stock|0 mph|39 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 FCA connector<br>- 1 RJ45 cable (7 ft)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Dodge&model=Durango 2020-21">Buy Here</a></sub></details>|| |Dodge|Durango 2020-21|Adaptive Cruise Control (ACC)|Stock|0 mph|39 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 FCA connector<br>- 1 RJ45 cable (7 ft)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Dodge&model=Durango 2020-21">Buy Here</a></sub></details>||
|Ford|Bronco Sport 2021-22|Co-Pilot360 Assist+|openpilot available[<sup>1</sup>](#footnotes)|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 Ford Q3 connector<br>- 1 RJ45 cable (7 ft)<br>- 1 angled mount (8 degrees)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Ford&model=Bronco Sport 2021-22">Buy Here</a></sub></details>|| |Ford|Bronco Sport 2021-23|Co-Pilot360 Assist+|openpilot available[<sup>1</sup>](#footnotes)|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 Ford Q3 connector<br>- 1 RJ45 cable (7 ft)<br>- 1 angled mount (8 degrees)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Ford&model=Bronco Sport 2021-23">Buy Here</a></sub></details>||
|Ford|Escape 2020-22|Co-Pilot360 Assist+|openpilot available[<sup>1</sup>](#footnotes)|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 Ford Q3 connector<br>- 1 RJ45 cable (7 ft)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Ford&model=Escape 2020-22">Buy Here</a></sub></details>|| |Ford|Escape 2020-22|Co-Pilot360 Assist+|openpilot available[<sup>1</sup>](#footnotes)|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 Ford Q3 connector<br>- 1 RJ45 cable (7 ft)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Ford&model=Escape 2020-22">Buy Here</a></sub></details>||
|Ford|Escape Hybrid 2020-22|Co-Pilot360 Assist+|openpilot available[<sup>1</sup>](#footnotes)|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 Ford Q3 connector<br>- 1 RJ45 cable (7 ft)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Ford&model=Escape Hybrid 2020-22">Buy Here</a></sub></details>|| |Ford|Escape Hybrid 2020-22|Co-Pilot360 Assist+|openpilot available[<sup>1</sup>](#footnotes)|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 Ford Q3 connector<br>- 1 RJ45 cable (7 ft)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Ford&model=Escape Hybrid 2020-22">Buy Here</a></sub></details>||
|Ford|Escape Plug-in Hybrid 2020-22|Co-Pilot360 Assist+|openpilot available[<sup>1</sup>](#footnotes)|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 Ford Q3 connector<br>- 1 RJ45 cable (7 ft)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Ford&model=Escape Plug-in Hybrid 2020-22">Buy Here</a></sub></details>|| |Ford|Escape Plug-in Hybrid 2020-22|Co-Pilot360 Assist+|openpilot available[<sup>1</sup>](#footnotes)|0 mph|0 mph|[![star](assets/icon-star-full.svg)](##)|[![star](assets/icon-star-full.svg)](##)|<details><summary>Parts</summary><sub>- 1 Ford Q3 connector<br>- 1 RJ45 cable (7 ft)<br>- 1 comma 3X<br>- 1 comma power v2<br>- 1 harness box<br>- 1 mount<br>- 1 right angle OBD-C cable (1.5 ft)<br><a href="https://comma.ai/shop/comma-3x.html?make=Ford&model=Escape Plug-in Hybrid 2020-22">Buy Here</a></sub></details>||

@ -1,7 +1,7 @@
#!/bin/bash #!/bin/bash
while read hash submodule ref; do while read hash submodule ref; do
git -C $submodule fetch --depth 1000 origin master git -C $submodule fetch --depth 2000 origin master
git -C $submodule branch -r --contains $hash | grep "origin/master" git -C $submodule branch -r --contains $hash | grep "origin/master"
if [ "$?" -eq 0 ]; then if [ "$?" -eq 0 ]; then
echo "$submodule ok" echo "$submodule ok"

@ -8,16 +8,19 @@ FW_VERSIONS = {
(Ecu.eps, 0x730, None): [ (Ecu.eps, 0x730, None): [
b'LX6C-14D003-AH\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', b'LX6C-14D003-AH\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'LX6C-14D003-AK\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', b'LX6C-14D003-AK\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'LX6C-14D003-AL\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
], ],
(Ecu.abs, 0x760, None): [ (Ecu.abs, 0x760, None): [
b'LX6C-2D053-RD\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', b'LX6C-2D053-RD\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'LX6C-2D053-RE\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', b'LX6C-2D053-RE\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'LX6C-2D053-RF\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
], ],
(Ecu.fwdRadar, 0x764, None): [ (Ecu.fwdRadar, 0x764, None): [
b'LB5T-14D049-AB\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', b'LB5T-14D049-AB\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
], ],
(Ecu.fwdCamera, 0x706, None): [ (Ecu.fwdCamera, 0x706, None): [
b'M1PT-14F397-AC\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', b'M1PT-14F397-AC\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'M1PT-14F397-AD\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
], ],
}, },
CAR.ESCAPE_MK4: { CAR.ESCAPE_MK4: {

@ -88,7 +88,7 @@ class FordCANFDPlatformConfig(FordPlatformConfig):
class CAR(Platforms): class CAR(Platforms):
BRONCO_SPORT_MK1 = FordPlatformConfig( BRONCO_SPORT_MK1 = FordPlatformConfig(
"FORD BRONCO SPORT 1ST GEN", "FORD BRONCO SPORT 1ST GEN",
FordCarInfo("Ford Bronco Sport 2021-22"), FordCarInfo("Ford Bronco Sport 2021-23"),
CarSpecs(mass=1625, wheelbase=2.67, steerRatio=17.7), CarSpecs(mass=1625, wheelbase=2.67, steerRatio=17.7),
) )
ESCAPE_MK4 = FordPlatformConfig( ESCAPE_MK4 = FordPlatformConfig(

@ -39,6 +39,7 @@ FW_VERSIONS = {
b'37805-6B2-A810\x00\x00', b'37805-6B2-A810\x00\x00',
b'37805-6B2-A820\x00\x00', b'37805-6B2-A820\x00\x00',
b'37805-6B2-A920\x00\x00', b'37805-6B2-A920\x00\x00',
b'37805-6B2-A960\x00\x00',
b'37805-6B2-AA10\x00\x00', b'37805-6B2-AA10\x00\x00',
b'37805-6B2-C520\x00\x00', b'37805-6B2-C520\x00\x00',
b'37805-6B2-C540\x00\x00', b'37805-6B2-C540\x00\x00',

@ -26,6 +26,7 @@ FW_VERSIONS = {
b'\xd1,\xa0q\x07', b'\xd1,\xa0q\x07',
], ],
(Ecu.transmission, 0x7e1, None): [ (Ecu.transmission, 0x7e1, None): [
b'\x00>\xf0\x00\x00',
b'\x00\xfe\xf7\x00\x00', b'\x00\xfe\xf7\x00\x00',
b'\x01\xfe\xf7\x00\x00', b'\x01\xfe\xf7\x00\x00',
b'\x01\xfe\xf9\x00\x00', b'\x01\xfe\xf9\x00\x00',

@ -228,6 +228,13 @@ SUBARU_VERSION_REQUEST = bytes([uds.SERVICE_TYPE.READ_DATA_BY_IDENTIFIER]) + \
SUBARU_VERSION_RESPONSE = bytes([uds.SERVICE_TYPE.READ_DATA_BY_IDENTIFIER + 0x40]) + \ SUBARU_VERSION_RESPONSE = bytes([uds.SERVICE_TYPE.READ_DATA_BY_IDENTIFIER + 0x40]) + \
p16(uds.DATA_IDENTIFIER_TYPE.APPLICATION_DATA_IDENTIFICATION) p16(uds.DATA_IDENTIFIER_TYPE.APPLICATION_DATA_IDENTIFICATION)
# The EyeSight ECU takes 10s to respond to SUBARU_VERSION_REQUEST properly,
# log this alternate manufacturer-specific query
SUBARU_ALT_VERSION_REQUEST = bytes([uds.SERVICE_TYPE.READ_DATA_BY_IDENTIFIER]) + \
p16(0xf100)
SUBARU_ALT_VERSION_RESPONSE = bytes([uds.SERVICE_TYPE.READ_DATA_BY_IDENTIFIER + 0x40]) + \
p16(0xf100)
FW_QUERY_CONFIG = FwQueryConfig( FW_QUERY_CONFIG = FwQueryConfig(
requests=[ requests=[
Request( Request(
@ -245,6 +252,20 @@ FW_QUERY_CONFIG = FwQueryConfig(
whitelist_ecus=[Ecu.fwdCamera], whitelist_ecus=[Ecu.fwdCamera],
bus=0, bus=0,
), ),
Request(
[SUBARU_ALT_VERSION_REQUEST],
[SUBARU_ALT_VERSION_RESPONSE],
whitelist_ecus=[Ecu.fwdCamera],
bus=0,
logging=True,
),
Request(
[StdQueries.DEFAULT_DIAGNOSTIC_REQUEST, StdQueries.TESTER_PRESENT_REQUEST, SUBARU_VERSION_REQUEST],
[StdQueries.DEFAULT_DIAGNOSTIC_RESPONSE, StdQueries.TESTER_PRESENT_RESPONSE, SUBARU_VERSION_RESPONSE],
whitelist_ecus=[Ecu.fwdCamera],
bus=0,
logging=True,
),
Request( Request(
[StdQueries.TESTER_PRESENT_REQUEST, SUBARU_VERSION_REQUEST], [StdQueries.TESTER_PRESENT_REQUEST, SUBARU_VERSION_REQUEST],
[StdQueries.TESTER_PRESENT_RESPONSE, SUBARU_VERSION_RESPONSE], [StdQueries.TESTER_PRESENT_RESPONSE, SUBARU_VERSION_RESPONSE],

@ -263,7 +263,7 @@ class TestFwFingerprintTiming(unittest.TestCase):
print(f'get_vin {name} case, query time={self.total_time / self.N} seconds') print(f'get_vin {name} case, query time={self.total_time / self.N} seconds')
def test_fw_query_timing(self): def test_fw_query_timing(self):
total_ref_time = {1: 8.4, 2: 9.3} total_ref_time = {1: 8.6, 2: 9.5}
brand_ref_times = { brand_ref_times = {
1: { 1: {
'gm': 1.0, 'gm': 1.0,
@ -274,7 +274,7 @@ class TestFwFingerprintTiming(unittest.TestCase):
'hyundai': 1.05, 'hyundai': 1.05,
'mazda': 0.1, 'mazda': 0.1,
'nissan': 0.8, 'nissan': 0.8,
'subaru': 0.45, 'subaru': 0.65,
'tesla': 0.3, 'tesla': 0.3,
'toyota': 1.6, 'toyota': 1.6,
'volkswagen': 0.65, 'volkswagen': 0.65,

@ -141,9 +141,9 @@ class CarController(CarControllerBase):
lead = hud_control.leadVisible or CS.out.vEgo < 12. # at low speed we always assume the lead is present so ACC can be engaged lead = hud_control.leadVisible or CS.out.vEgo < 12. # at low speed we always assume the lead is present so ACC can be engaged
# Press distance button until we are at the correct bar length. Only change while enabled to avoid skipping startup popup # Press distance button until we are at the correct bar length. Only change while enabled to avoid skipping startup popup
if self.frame % 6 == 0: if self.frame % 6 == 0 and self.CP.openpilotLongitudinalControl:
if CS.pcm_follow_distance_values.get(CS.pcm_follow_distance, "UNKNOWN") != "FAR" and CS.out.cruiseState.enabled and \ desired_distance = 4 - hud_control.leadDistanceBars
self.CP.carFingerprint not in UNSUPPORTED_DSU_CAR: if CS.out.cruiseState.enabled and CS.pcm_follow_distance != desired_distance:
self.distance_button = not self.distance_button self.distance_button = not self.distance_button
else: else:
self.distance_button = 0 self.distance_button = 0

@ -44,7 +44,6 @@ class CarState(CarStateBase):
self.distance_button = 0 self.distance_button = 0
self.pcm_follow_distance = 0 self.pcm_follow_distance = 0
self.pcm_follow_distance_values = can_define.dv['PCM_CRUISE_2']['PCM_FOLLOW_DISTANCE']
self.low_speed_lockout = False self.low_speed_lockout = False
self.acc_type = 1 self.acc_type = 1

@ -639,6 +639,7 @@ FW_VERSIONS = {
(Ecu.dsu, 0x791, None): [ (Ecu.dsu, 0x791, None): [
b'881510E01100\x00\x00\x00\x00', b'881510E01100\x00\x00\x00\x00',
b'881510E01200\x00\x00\x00\x00', b'881510E01200\x00\x00\x00\x00',
b'881510E02200\x00\x00\x00\x00',
], ],
(Ecu.fwdRadar, 0x750, 0xf): [ (Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702100\x00\x00\x00\x00', b'8821F4702100\x00\x00\x00\x00',
@ -686,6 +687,7 @@ FW_VERSIONS = {
b'\x01896630EB1000\x00\x00\x00\x00', b'\x01896630EB1000\x00\x00\x00\x00',
b'\x01896630EB1100\x00\x00\x00\x00', b'\x01896630EB1100\x00\x00\x00\x00',
b'\x01896630EB1200\x00\x00\x00\x00', b'\x01896630EB1200\x00\x00\x00\x00',
b'\x01896630EB1300\x00\x00\x00\x00',
b'\x01896630EB2000\x00\x00\x00\x00', b'\x01896630EB2000\x00\x00\x00\x00',
b'\x01896630EB2100\x00\x00\x00\x00', b'\x01896630EB2100\x00\x00\x00\x00',
b'\x01896630EB2200\x00\x00\x00\x00', b'\x01896630EB2200\x00\x00\x00\x00',
@ -1141,6 +1143,7 @@ FW_VERSIONS = {
b'\x01F15264283300\x00\x00\x00\x00', b'\x01F15264283300\x00\x00\x00\x00',
b'\x01F152642F1000\x00\x00\x00\x00', b'\x01F152642F1000\x00\x00\x00\x00',
b'\x01F152642F8000\x00\x00\x00\x00', b'\x01F152642F8000\x00\x00\x00\x00',
b'\x01F152642F8100\x00\x00\x00\x00',
], ],
(Ecu.eps, 0x7a1, None): [ (Ecu.eps, 0x7a1, None): [
b'\x028965B0R11000\x00\x00\x00\x008965B0R12000\x00\x00\x00\x00', b'\x028965B0R11000\x00\x00\x00\x008965B0R12000\x00\x00\x00\x00',
@ -1153,6 +1156,7 @@ FW_VERSIONS = {
b'\x01896634AF0000\x00\x00\x00\x00', b'\x01896634AF0000\x00\x00\x00\x00',
b'\x01896634AJ2000\x00\x00\x00\x00', b'\x01896634AJ2000\x00\x00\x00\x00',
b'\x01896634AL5000\x00\x00\x00\x00', b'\x01896634AL5000\x00\x00\x00\x00',
b'\x01896634AL6000\x00\x00\x00\x00',
], ],
(Ecu.fwdRadar, 0x750, 0xf): [ (Ecu.fwdRadar, 0x750, 0xf): [
b'\x018821F0R03100\x00\x00\x00\x00', b'\x018821F0R03100\x00\x00\x00\x00',

@ -680,6 +680,7 @@ class Controls:
hudControl.speedVisible = self.enabled hudControl.speedVisible = self.enabled
hudControl.lanesVisible = self.enabled hudControl.lanesVisible = self.enabled
hudControl.leadVisible = self.sm['longitudinalPlan'].hasLead hudControl.leadVisible = self.sm['longitudinalPlan'].hasLead
hudControl.leadDistanceBars = self.sm['longitudinalPlan'].personality.raw + 1
hudControl.rightLaneVisible = True hudControl.rightLaneVisible = True
hudControl.leftLaneVisible = True hudControl.leftLaneVisible = True

@ -308,14 +308,12 @@ void Localizer::input_fake_gps_observations(double current_time) {
} }
void Localizer::handle_gps(double current_time, const cereal::GpsLocationData::Reader& log, const double sensor_time_offset) { void Localizer::handle_gps(double current_time, const cereal::GpsLocationData::Reader& log, const double sensor_time_offset) {
// ignore the message if the fix is invalid
bool gps_invalid_flag = (log.getFlags() % 2 == 0);
bool gps_unreasonable = (Vector2d(log.getHorizontalAccuracy(), log.getVerticalAccuracy()).norm() >= SANE_GPS_UNCERTAINTY); bool gps_unreasonable = (Vector2d(log.getHorizontalAccuracy(), log.getVerticalAccuracy()).norm() >= SANE_GPS_UNCERTAINTY);
bool gps_accuracy_insane = ((log.getVerticalAccuracy() <= 0) || (log.getSpeedAccuracy() <= 0) || (log.getBearingAccuracyDeg() <= 0)); bool gps_accuracy_insane = ((log.getVerticalAccuracy() <= 0) || (log.getSpeedAccuracy() <= 0) || (log.getBearingAccuracyDeg() <= 0));
bool gps_lat_lng_alt_insane = ((std::abs(log.getLatitude()) > 90) || (std::abs(log.getLongitude()) > 180) || (std::abs(log.getAltitude()) > ALTITUDE_SANITY_CHECK)); bool gps_lat_lng_alt_insane = ((std::abs(log.getLatitude()) > 90) || (std::abs(log.getLongitude()) > 180) || (std::abs(log.getAltitude()) > ALTITUDE_SANITY_CHECK));
bool gps_vel_insane = (floatlist2vector(log.getVNED()).norm() > TRANS_SANITY_CHECK); bool gps_vel_insane = (floatlist2vector(log.getVNED()).norm() > TRANS_SANITY_CHECK);
if (gps_invalid_flag || gps_unreasonable || gps_accuracy_insane || gps_lat_lng_alt_insane || gps_vel_insane) { if (!log.getHasFix() || gps_unreasonable || gps_accuracy_insane || gps_lat_lng_alt_insane || gps_vel_insane) {
//this->gps_valid = false; //this->gps_valid = false;
this->determine_gps_mode(current_time); this->determine_gps_mode(current_time);
return; return;

@ -38,6 +38,7 @@ class TestLocationdProc(unittest.TestCase):
if name == "gpsLocationExternal": if name == "gpsLocationExternal":
msg.gpsLocationExternal.flags = 1 msg.gpsLocationExternal.flags = 1
msg.gpsLocationExternal.hasFix = True
msg.gpsLocationExternal.verticalAccuracy = 1.0 msg.gpsLocationExternal.verticalAccuracy = 1.0
msg.gpsLocationExternal.speedAccuracy = 1.0 msg.gpsLocationExternal.speedAccuracy = 1.0
msg.gpsLocationExternal.bearingAccuracyDeg = 1.0 msg.gpsLocationExternal.bearingAccuracyDeg = 1.0

@ -6,6 +6,7 @@ from collections import defaultdict
from enum import Enum from enum import Enum
from openpilot.tools.lib.logreader import LogReader from openpilot.tools.lib.logreader import LogReader
from openpilot.selfdrive.test.process_replay.migration import migrate_all
from openpilot.selfdrive.test.process_replay.process_replay import replay_process_with_name from openpilot.selfdrive.test.process_replay.process_replay import replay_process_with_name
TEST_ROUTE = "ff2bd20623fcaeaa|2023-09-05--10-14-54/4" TEST_ROUTE = "ff2bd20623fcaeaa|2023-09-05--10-14-54/4"
@ -107,7 +108,7 @@ class TestLocationdScenarios(unittest.TestCase):
@classmethod @classmethod
def setUpClass(cls): def setUpClass(cls):
cls.logs = list(LogReader(TEST_ROUTE)) cls.logs = migrate_all(LogReader(TEST_ROUTE))
def test_base(self): def test_base(self):
""" """

@ -113,3 +113,12 @@ def with_http_server(func, handler=http.server.BaseHTTPRequestHandler, setup=Non
with http_server_context(handler, setup) as (host, port): with http_server_context(handler, setup) as (host, port):
return func(*args, f"http://{host}:{port}", **kwargs) return func(*args, f"http://{host}:{port}", **kwargs)
return inner return inner
def DirectoryHttpServer(directory) -> type[http.server.SimpleHTTPRequestHandler]:
# creates an http server that serves files from directory
class Handler(http.server.SimpleHTTPRequestHandler):
def __init__(self, *args, **kwargs):
super().__init__(*args, directory=str(directory), **kwargs)
return Handler

@ -7,9 +7,11 @@ from openpilot.selfdrive.manager.process_config import managed_processes
from panda import Panda from panda import Panda
# TODO: message migration should happen in-place
def migrate_all(lr, old_logtime=False, manager_states=False, panda_states=False, camera_states=False): def migrate_all(lr, old_logtime=False, manager_states=False, panda_states=False, camera_states=False):
msgs = migrate_sensorEvents(lr, old_logtime) msgs = migrate_sensorEvents(lr, old_logtime)
msgs = migrate_carParams(msgs, old_logtime) msgs = migrate_carParams(msgs, old_logtime)
msgs = migrate_gpsLocation(msgs)
if manager_states: if manager_states:
msgs = migrate_managerState(msgs) msgs = migrate_managerState(msgs)
if panda_states: if panda_states:
@ -35,6 +37,21 @@ def migrate_managerState(lr):
return all_msgs return all_msgs
def migrate_gpsLocation(lr):
all_msgs = []
for msg in lr:
if msg.which() in ('gpsLocation', 'gpsLocationExternal'):
new_msg = msg.as_builder()
g = getattr(new_msg, new_msg.which())
# hasFix is a newer field
if not g.hasFix and g.flags == 1:
g.hasFix = True
all_msgs.append(new_msg.as_reader())
else:
all_msgs.append(msg)
return all_msgs
def migrate_pandaStates(lr): def migrate_pandaStates(lr):
all_msgs = [] all_msgs = []
# TODO: safety param migration should be handled automatically # TODO: safety param migration should be handled automatically

@ -1 +1 @@
43efe1cf08cba8c86bc1ae8234b3d3d084a40e5d 653f68e6be4689dc9dce1a93cb726d37b9c588d3

@ -0,0 +1,115 @@
import abc
import os
from pathlib import Path
import subprocess
from typing import List
from markdown_it import MarkdownIt
from openpilot.common.params import Params
from openpilot.common.swaglog import cloudlog
LOCK_FILE = os.getenv("UPDATER_LOCK_FILE", "/tmp/safe_staging_overlay.lock")
STAGING_ROOT = os.getenv("UPDATER_STAGING_ROOT", "/data/safe_staging")
FINALIZED = os.path.join(STAGING_ROOT, "finalized")
def run(cmd: list[str], cwd: str = None) -> str:
return subprocess.check_output(cmd, cwd=cwd, stderr=subprocess.STDOUT, encoding='utf8')
class UpdateStrategy(abc.ABC):
def __init__(self):
self.params = Params()
@abc.abstractmethod
def init(self) -> None:
pass
@abc.abstractmethod
def cleanup(self) -> None:
pass
@abc.abstractmethod
def get_available_channels(self) -> List[str]:
"""List of available channels to install, (branches, releases, etc)"""
@abc.abstractmethod
def current_channel(self) -> str:
"""Current channel installed"""
@abc.abstractmethod
def fetched_path(self) -> str:
"""Path to the fetched update"""
@property
def target_channel(self) -> str:
"""Target Channel"""
b: str | None = self.params.get("UpdaterTargetBranch", encoding='utf-8')
if b is None:
b = self.current_channel()
return b
@abc.abstractmethod
def update_ready(self) -> bool:
"""Check if an update is ready to be installed"""
@abc.abstractmethod
def update_available(self) -> bool:
"""Check if an update is available for the current channel"""
@abc.abstractmethod
def describe_current_channel(self) -> tuple[str, str]:
"""Describe the current channel installed, (description, release_notes)"""
@abc.abstractmethod
def describe_ready_channel(self) -> tuple[str, str]:
"""Describe the channel that is ready to be installed, (description, release_notes)"""
@abc.abstractmethod
def fetch_update(self) -> None:
pass
@abc.abstractmethod
def finalize_update(self) -> None:
pass
def set_consistent_flag(consistent: bool) -> None:
os.sync()
consistent_file = Path(os.path.join(FINALIZED, ".overlay_consistent"))
if consistent:
consistent_file.touch()
elif not consistent:
consistent_file.unlink(missing_ok=True)
os.sync()
def get_consistent_flag() -> bool:
consistent_file = Path(os.path.join(FINALIZED, ".overlay_consistent"))
return consistent_file.is_file()
def parse_release_notes(releases_md: str) -> str:
try:
r = releases_md.split('\n\n', 1)[0] # Slice latest release notes
try:
return str(MarkdownIt().render(r))
except Exception:
return r + "\n"
except FileNotFoundError:
pass
except Exception:
cloudlog.exception("failed to parse release notes")
return ""
def get_version(path) -> str:
with open(os.path.join(path, "common", "version.h")) as f:
return f.read().split('"')[1]
def get_release_notes(path) -> str:
with open(os.path.join(path, "RELEASES.md"), "r") as f:
return parse_release_notes(f.read())

@ -0,0 +1,236 @@
import datetime
import os
import re
import shutil
import subprocess
import time
from collections import defaultdict
from pathlib import Path
from typing import List
from openpilot.common.basedir import BASEDIR
from openpilot.common.params import Params
from openpilot.common.swaglog import cloudlog
from openpilot.selfdrive.updated.common import FINALIZED, STAGING_ROOT, UpdateStrategy, \
get_consistent_flag, get_release_notes, get_version, set_consistent_flag, run
OVERLAY_UPPER = os.path.join(STAGING_ROOT, "upper")
OVERLAY_METADATA = os.path.join(STAGING_ROOT, "metadata")
OVERLAY_MERGED = os.path.join(STAGING_ROOT, "merged")
OVERLAY_INIT = Path(os.path.join(BASEDIR, ".overlay_init"))
def setup_git_options(cwd: str) -> None:
# We sync FS object atimes (which NEOS doesn't use) and mtimes, but ctimes
# are outside user control. Make sure Git is set up to ignore system ctimes,
# because they change when we make hard links during finalize. Otherwise,
# there is a lot of unnecessary churn. This appears to be a common need on
# OSX as well: https://www.git-tower.com/blog/make-git-rebase-safe-on-osx/
# We are using copytree to copy the directory, which also changes
# inode numbers. Ignore those changes too.
# Set protocol to the new version (default after git 2.26) to reduce data
# usage on git fetch --dry-run from about 400KB to 18KB.
git_cfg = [
("core.trustctime", "false"),
("core.checkStat", "minimal"),
("protocol.version", "2"),
("gc.auto", "0"),
("gc.autoDetach", "false"),
]
for option, value in git_cfg:
run(["git", "config", option, value], cwd)
def dismount_overlay() -> None:
if os.path.ismount(OVERLAY_MERGED):
cloudlog.info("unmounting existing overlay")
run(["sudo", "umount", "-l", OVERLAY_MERGED])
def init_overlay() -> None:
# Re-create the overlay if BASEDIR/.git has changed since we created the overlay
if OVERLAY_INIT.is_file() and os.path.ismount(OVERLAY_MERGED):
git_dir_path = os.path.join(BASEDIR, ".git")
new_files = run(["find", git_dir_path, "-newer", str(OVERLAY_INIT)])
if not len(new_files.splitlines()):
# A valid overlay already exists
return
else:
cloudlog.info(".git directory changed, recreating overlay")
cloudlog.info("preparing new safe staging area")
params = Params()
params.put_bool("UpdateAvailable", False)
set_consistent_flag(False)
dismount_overlay()
run(["sudo", "rm", "-rf", STAGING_ROOT])
if os.path.isdir(STAGING_ROOT):
shutil.rmtree(STAGING_ROOT)
for dirname in [STAGING_ROOT, OVERLAY_UPPER, OVERLAY_METADATA, OVERLAY_MERGED]:
os.mkdir(dirname, 0o755)
if os.lstat(BASEDIR).st_dev != os.lstat(OVERLAY_MERGED).st_dev:
raise RuntimeError("base and overlay merge directories are on different filesystems; not valid for overlay FS!")
# Leave a timestamped canary in BASEDIR to check at startup. The device clock
# should be correct by the time we get here. If the init file disappears, or
# critical mtimes in BASEDIR are newer than .overlay_init, continue.sh can
# assume that BASEDIR has used for local development or otherwise modified,
# and skips the update activation attempt.
consistent_file = Path(os.path.join(BASEDIR, ".overlay_consistent"))
if consistent_file.is_file():
consistent_file.unlink()
OVERLAY_INIT.touch()
os.sync()
overlay_opts = f"lowerdir={BASEDIR},upperdir={OVERLAY_UPPER},workdir={OVERLAY_METADATA}"
mount_cmd = ["mount", "-t", "overlay", "-o", overlay_opts, "none", OVERLAY_MERGED]
run(["sudo"] + mount_cmd)
run(["sudo", "chmod", "755", os.path.join(OVERLAY_METADATA, "work")])
git_diff = run(["git", "diff"], OVERLAY_MERGED)
params.put("GitDiff", git_diff)
cloudlog.info(f"git diff output:\n{git_diff}")
class GitUpdateStrategy(UpdateStrategy):
def init(self) -> None:
init_overlay()
def cleanup(self) -> None:
OVERLAY_INIT.unlink(missing_ok=True)
def sync_branches(self):
excluded_branches = ('release2', 'release2-staging')
output = run(["git", "ls-remote", "--heads"], OVERLAY_MERGED)
self.branches = defaultdict(lambda: None)
for line in output.split('\n'):
ls_remotes_re = r'(?P<commit_sha>\b[0-9a-f]{5,40}\b)(\s+)(refs\/heads\/)(?P<branch_name>.*$)'
x = re.fullmatch(ls_remotes_re, line.strip())
if x is not None and x.group('branch_name') not in excluded_branches:
self.branches[x.group('branch_name')] = x.group('commit_sha')
return self.branches
def get_available_channels(self) -> List[str]:
self.sync_branches()
return list(self.branches.keys())
def update_ready(self) -> bool:
if get_consistent_flag():
hash_mismatch = self.get_commit_hash(BASEDIR) != self.branches[self.target_channel]
branch_mismatch = self.get_branch(BASEDIR) != self.target_channel
on_target_channel = self.get_branch(FINALIZED) == self.target_channel
return ((hash_mismatch or branch_mismatch) and on_target_channel)
return False
def update_available(self) -> bool:
if os.path.isdir(OVERLAY_MERGED) and len(self.get_available_channels()) > 0:
hash_mismatch = self.get_commit_hash(OVERLAY_MERGED) != self.branches[self.target_channel]
branch_mismatch = self.get_branch(OVERLAY_MERGED) != self.target_channel
return hash_mismatch or branch_mismatch
return False
def get_branch(self, path: str) -> str:
return run(["git", "rev-parse", "--abbrev-ref", "HEAD"], path).rstrip()
def get_commit_hash(self, path) -> str:
return run(["git", "rev-parse", "HEAD"], path).rstrip()
def get_current_channel(self) -> str:
return self.get_branch(BASEDIR)
def current_channel(self) -> str:
return self.get_branch(BASEDIR)
def describe_branch(self, basedir) -> str:
if not os.path.exists(basedir):
return ""
version = ""
branch = ""
commit = ""
commit_date = ""
try:
branch = self.get_branch(basedir)
commit = self.get_commit_hash(basedir)[:7]
version = get_version(basedir)
commit_unix_ts = run(["git", "show", "-s", "--format=%ct", "HEAD"], basedir).rstrip()
dt = datetime.datetime.fromtimestamp(int(commit_unix_ts))
commit_date = dt.strftime("%b %d")
except Exception:
cloudlog.exception("updater.get_description")
return f"{version} / {branch} / {commit} / {commit_date}"
def describe_current_channel(self) -> tuple[str, str]:
return self.describe_branch(BASEDIR), get_release_notes(BASEDIR)
def describe_ready_channel(self) -> tuple[str, str]:
if self.update_ready():
return self.describe_branch(FINALIZED), get_release_notes(FINALIZED)
return "", ""
def fetch_update(self):
cloudlog.info("attempting git fetch inside staging overlay")
setup_git_options(OVERLAY_MERGED)
branch = self.target_channel
git_fetch_output = run(["git", "fetch", "origin", branch], OVERLAY_MERGED)
cloudlog.info("git fetch success: %s", git_fetch_output)
cloudlog.info("git reset in progress")
cmds = [
["git", "checkout", "--force", "--no-recurse-submodules", "-B", branch, "FETCH_HEAD"],
["git", "reset", "--hard"],
["git", "clean", "-xdff"],
["git", "submodule", "sync"],
["git", "submodule", "update", "--init", "--recursive"],
["git", "submodule", "foreach", "--recursive", "git", "reset", "--hard"],
]
r = [run(cmd, OVERLAY_MERGED) for cmd in cmds]
cloudlog.info("git reset success: %s", '\n'.join(r))
def fetched_path(self):
return str(OVERLAY_MERGED)
def finalize_update(self) -> None:
"""Take the current OverlayFS merged view and finalize a copy outside of
OverlayFS, ready to be swapped-in at BASEDIR. Copy using shutil.copytree"""
# Remove the update ready flag and any old updates
cloudlog.info("creating finalized version of the overlay")
set_consistent_flag(False)
# Copy the merged overlay view and set the update ready flag
if os.path.exists(FINALIZED):
shutil.rmtree(FINALIZED)
shutil.copytree(OVERLAY_MERGED, FINALIZED, symlinks=True)
run(["git", "reset", "--hard"], FINALIZED)
run(["git", "submodule", "foreach", "--recursive", "git", "reset", "--hard"], FINALIZED)
cloudlog.info("Starting git cleanup in finalized update")
t = time.monotonic()
try:
run(["git", "gc"], FINALIZED)
run(["git", "lfs", "prune"], FINALIZED)
cloudlog.event("Done git cleanup", duration=time.monotonic() - t)
except subprocess.CalledProcessError:
cloudlog.exception(f"Failed git cleanup, took {time.monotonic() - t:.3f} s")
set_consistent_flag(True)
cloudlog.info("done finalizing overlay")

@ -1,4 +1,3 @@
#!/usr/bin/env python3
import os import os
import pathlib import pathlib
import shutil import shutil
@ -33,12 +32,14 @@ def update_release(directory, name, version, agnos_version, release_notes):
with open(directory / "launch_env.sh", "w") as f: with open(directory / "launch_env.sh", "w") as f:
f.write(f'export AGNOS_VERSION="{agnos_version}"') f.write(f'export AGNOS_VERSION="{agnos_version}"')
run(["git", "add", "."], cwd=directory)
run(["git", "commit", "-m", f"openpilot release {version}"], cwd=directory)
@pytest.mark.slow # TODO: can we test overlayfs in GHA? @pytest.mark.slow # TODO: can we test overlayfs in GHA?
class TestUpdateD(unittest.TestCase): class BaseUpdateTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
if "Base" in cls.__name__:
raise unittest.SkipTest
def setUp(self): def setUp(self):
self.tmpdir = tempfile.mkdtemp() self.tmpdir = tempfile.mkdtemp()
@ -73,21 +74,24 @@ class TestUpdateD(unittest.TestCase):
def setup_basedir_release(self, release): def setup_basedir_release(self, release):
self.params = Params() self.params = Params()
self.set_target_branch(release) self.set_target_branch(release)
run(["git", "clone", "-b", release, self.remote_dir, self.basedir])
def update_remote_release(self, release): def update_remote_release(self, release):
update_release(self.remote_dir, release, *self.MOCK_RELEASES[release]) raise NotImplementedError("")
def setup_remote_release(self, release): def setup_remote_release(self, release):
run(["git", "init"], cwd=self.remote_dir) raise NotImplementedError("")
run(["git", "checkout", "-b", release], cwd=self.remote_dir)
self.update_remote_release(release) def additional_context(self):
raise NotImplementedError("")
def tearDown(self): def tearDown(self):
mock.patch.stopall() mock.patch.stopall()
run(["sudo", "umount", "-l", str(self.staging_root / "merged")]) try:
run(["sudo", "umount", "-l", self.tmpdir]) run(["sudo", "umount", "-l", str(self.staging_root / "merged")])
shutil.rmtree(self.tmpdir) run(["sudo", "umount", "-l", self.tmpdir])
shutil.rmtree(self.tmpdir)
except Exception:
print("cleanup failed...")
def send_check_for_updates_signal(self, updated: ManagerProcess): def send_check_for_updates_signal(self, updated: ManagerProcess):
updated.signal(signal.SIGUSR1.value) updated.signal(signal.SIGUSR1.value)
@ -100,33 +104,43 @@ class TestUpdateD(unittest.TestCase):
self.assertEqual(self.params.get_bool("UpdaterFetchAvailable"), fetch_available) self.assertEqual(self.params.get_bool("UpdaterFetchAvailable"), fetch_available)
self.assertEqual(self.params.get_bool("UpdateAvailable"), update_available) self.assertEqual(self.params.get_bool("UpdateAvailable"), update_available)
def _test_update_params(self, branch, version, agnos_version, release_notes): def _test_finalized_update(self, branch, version, agnos_version, release_notes):
from openpilot.selfdrive.updated.common import get_version, get_consistent_flag # this needs to be inline because common uses environment variables
self.assertTrue(self.params.get("UpdaterNewDescription", encoding="utf-8").startswith(f"{version} / {branch}")) self.assertTrue(self.params.get("UpdaterNewDescription", encoding="utf-8").startswith(f"{version} / {branch}"))
self.assertEqual(self.params.get("UpdaterNewReleaseNotes", encoding="utf-8"), f"<p>{release_notes}</p>\n") self.assertEqual(self.params.get("UpdaterNewReleaseNotes", encoding="utf-8"), f"<p>{release_notes}</p>\n")
self.assertEqual(get_version(str(self.staging_root / "finalized")), version)
self.assertEqual(get_consistent_flag(), True)
def wait_for_idle(self, timeout=5, min_wait_time=2): def wait_for_condition(self, condition, timeout=12):
start = time.monotonic() start = time.monotonic()
time.sleep(min_wait_time)
while True: while True:
waited = time.monotonic() - start waited = time.monotonic() - start
if self.params.get("UpdaterState", encoding="utf-8") == "idle": if condition():
print(f"waited {waited}s for idle") print(f"waited {waited}s for condition ")
break return waited
if waited > timeout: if waited > timeout:
raise TimeoutError("timed out waiting for idle") raise TimeoutError("timed out waiting for condition")
time.sleep(1) time.sleep(1)
def wait_for_idle(self):
self.wait_for_condition(lambda: self.params.get("UpdaterState", encoding="utf-8") == "idle")
def wait_for_fetch_available(self):
self.wait_for_condition(lambda: self.params.get_bool("UpdaterFetchAvailable"))
def wait_for_update_available(self):
self.wait_for_condition(lambda: self.params.get_bool("UpdateAvailable"))
def test_no_update(self): def test_no_update(self):
# Start on release3, ensure we don't fetch any updates # Start on release3, ensure we don't fetch any updates
self.setup_remote_release("release3") self.setup_remote_release("release3")
self.setup_basedir_release("release3") self.setup_basedir_release("release3")
with processes_context(["updated"]) as [updated]: with self.additional_context(), processes_context(["updated"]) as [updated]:
self._test_params("release3", False, False) self._test_params("release3", False, False)
time.sleep(1) self.wait_for_idle()
self._test_params("release3", False, False) self._test_params("release3", False, False)
self.send_check_for_updates_signal(updated) self.send_check_for_updates_signal(updated)
@ -140,9 +154,9 @@ class TestUpdateD(unittest.TestCase):
self.setup_remote_release("release3") self.setup_remote_release("release3")
self.setup_basedir_release("release3") self.setup_basedir_release("release3")
with processes_context(["updated"]) as [updated]: with self.additional_context(), processes_context(["updated"]) as [updated]:
self._test_params("release3", False, False) self._test_params("release3", False, False)
time.sleep(1) self.wait_for_idle()
self._test_params("release3", False, False) self._test_params("release3", False, False)
self.MOCK_RELEASES["release3"] = ("0.1.3", "1.2", "0.1.3 release notes") self.MOCK_RELEASES["release3"] = ("0.1.3", "1.2", "0.1.3 release notes")
@ -150,16 +164,16 @@ class TestUpdateD(unittest.TestCase):
self.send_check_for_updates_signal(updated) self.send_check_for_updates_signal(updated)
self.wait_for_idle() self.wait_for_fetch_available()
self._test_params("release3", True, False) self._test_params("release3", True, False)
self.send_download_signal(updated) self.send_download_signal(updated)
self.wait_for_idle() self.wait_for_update_available()
self._test_params("release3", False, True) self._test_params("release3", False, True)
self._test_update_params("release3", *self.MOCK_RELEASES["release3"]) self._test_finalized_update("release3", *self.MOCK_RELEASES["release3"])
def test_switch_branches(self): def test_switch_branches(self):
# Start on release3, request to switch to master manually, ensure we switched # Start on release3, request to switch to master manually, ensure we switched
@ -167,7 +181,7 @@ class TestUpdateD(unittest.TestCase):
self.setup_remote_release("master") self.setup_remote_release("master")
self.setup_basedir_release("release3") self.setup_basedir_release("release3")
with processes_context(["updated"]) as [updated]: with self.additional_context(), processes_context(["updated"]) as [updated]:
self._test_params("release3", False, False) self._test_params("release3", False, False)
self.wait_for_idle() self.wait_for_idle()
self._test_params("release3", False, False) self._test_params("release3", False, False)
@ -175,30 +189,31 @@ class TestUpdateD(unittest.TestCase):
self.set_target_branch("master") self.set_target_branch("master")
self.send_check_for_updates_signal(updated) self.send_check_for_updates_signal(updated)
self.wait_for_idle() self.wait_for_fetch_available()
self._test_params("master", True, False) self._test_params("master", True, False)
self.send_download_signal(updated) self.send_download_signal(updated)
self.wait_for_idle() self.wait_for_update_available()
self._test_params("master", False, True) self._test_params("master", False, True)
self._test_update_params("master", *self.MOCK_RELEASES["master"]) self._test_finalized_update("master", *self.MOCK_RELEASES["master"])
def test_agnos_update(self): def test_agnos_update(self):
# Start on release3, push an update with an agnos change # Start on release3, push an update with an agnos change
self.setup_remote_release("release3") self.setup_remote_release("release3")
self.setup_basedir_release("release3") self.setup_basedir_release("release3")
with mock.patch("openpilot.system.hardware.AGNOS", "True"), \ with self.additional_context(), \
mock.patch("openpilot.system.hardware.tici.hardware.Tici.get_os_version", "1.2"), \ mock.patch("openpilot.system.hardware.AGNOS", "True"), \
mock.patch("openpilot.system.hardware.tici.agnos.get_target_slot_number"), \ mock.patch("openpilot.system.hardware.tici.hardware.Tici.get_os_version", "1.2"), \
mock.patch("openpilot.system.hardware.tici.agnos.flash_agnos_update"), \ mock.patch("openpilot.system.hardware.tici.agnos.get_target_slot_number"), \
mock.patch("openpilot.system.hardware.tici.agnos.flash_agnos_update"), \
processes_context(["updated"]) as [updated]: processes_context(["updated"]) as [updated]:
self._test_params("release3", False, False) self._test_params("release3", False, False)
time.sleep(1) self.wait_for_idle()
self._test_params("release3", False, False) self._test_params("release3", False, False)
self.MOCK_RELEASES["release3"] = ("0.1.3", "1.3", "0.1.3 release notes") self.MOCK_RELEASES["release3"] = ("0.1.3", "1.3", "0.1.3 release notes")
@ -206,17 +221,13 @@ class TestUpdateD(unittest.TestCase):
self.send_check_for_updates_signal(updated) self.send_check_for_updates_signal(updated)
self.wait_for_idle() self.wait_for_fetch_available()
self._test_params("release3", True, False) self._test_params("release3", True, False)
self.send_download_signal(updated) self.send_download_signal(updated)
self.wait_for_idle() self.wait_for_update_available()
self._test_params("release3", False, True) self._test_params("release3", False, True)
self._test_update_params("release3", *self.MOCK_RELEASES["release3"]) self._test_finalized_update("release3", *self.MOCK_RELEASES["release3"])
if __name__ == "__main__":
unittest.main()

@ -0,0 +1,22 @@
import contextlib
from openpilot.selfdrive.updated.tests.test_base import BaseUpdateTest, run, update_release
class TestUpdateDGitStrategy(BaseUpdateTest):
def update_remote_release(self, release):
update_release(self.remote_dir, release, *self.MOCK_RELEASES[release])
run(["git", "add", "."], cwd=self.remote_dir)
run(["git", "commit", "-m", f"openpilot release {release}"], cwd=self.remote_dir)
def setup_remote_release(self, release):
run(["git", "init"], cwd=self.remote_dir)
run(["git", "checkout", "-b", release], cwd=self.remote_dir)
self.update_remote_release(release)
def setup_basedir_release(self, release):
super().setup_basedir_release(release)
run(["git", "clone", "-b", release, self.remote_dir, self.basedir])
@contextlib.contextmanager
def additional_context(self):
yield

@ -1,35 +1,21 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os import os
import re from pathlib import Path
import datetime import datetime
import subprocess import subprocess
import psutil import psutil
import shutil
import signal import signal
import fcntl import fcntl
import time
import threading import threading
from collections import defaultdict
from pathlib import Path
from markdown_it import MarkdownIt
from openpilot.common.basedir import BASEDIR
from openpilot.common.params import Params from openpilot.common.params import Params
from openpilot.common.time import system_time_valid from openpilot.common.time import system_time_valid
from openpilot.selfdrive.updated.common import LOCK_FILE, STAGING_ROOT, UpdateStrategy, run, set_consistent_flag
from openpilot.system.hardware import AGNOS, HARDWARE from openpilot.system.hardware import AGNOS, HARDWARE
from openpilot.common.swaglog import cloudlog from openpilot.common.swaglog import cloudlog
from openpilot.selfdrive.controls.lib.alertmanager import set_offroad_alert from openpilot.selfdrive.controls.lib.alertmanager import set_offroad_alert
from openpilot.system.version import is_tested_branch from openpilot.system.version import is_tested_branch
from openpilot.selfdrive.updated.git import GitUpdateStrategy
LOCK_FILE = os.getenv("UPDATER_LOCK_FILE", "/tmp/safe_staging_overlay.lock")
STAGING_ROOT = os.getenv("UPDATER_STAGING_ROOT", "/data/safe_staging")
OVERLAY_UPPER = os.path.join(STAGING_ROOT, "upper")
OVERLAY_METADATA = os.path.join(STAGING_ROOT, "metadata")
OVERLAY_MERGED = os.path.join(STAGING_ROOT, "merged")
FINALIZED = os.path.join(STAGING_ROOT, "finalized")
OVERLAY_INIT = Path(os.path.join(BASEDIR, ".overlay_init"))
DAYS_NO_CONNECTIVITY_MAX = 14 # do not allow to engage after this many days DAYS_NO_CONNECTIVITY_MAX = 14 # do not allow to engage after this many days
DAYS_NO_CONNECTIVITY_PROMPT = 10 # send an offroad prompt after this many days DAYS_NO_CONNECTIVITY_PROMPT = 10 # send an offroad prompt after this many days
@ -71,147 +57,13 @@ def read_time_from_param(params, param) -> datetime.datetime | None:
pass pass
return None return None
def run(cmd: list[str], cwd: str = None) -> str:
return subprocess.check_output(cmd, cwd=cwd, stderr=subprocess.STDOUT, encoding='utf8')
def set_consistent_flag(consistent: bool) -> None:
os.sync()
consistent_file = Path(os.path.join(FINALIZED, ".overlay_consistent"))
if consistent:
consistent_file.touch()
elif not consistent:
consistent_file.unlink(missing_ok=True)
os.sync()
def parse_release_notes(basedir: str) -> bytes:
try:
with open(os.path.join(basedir, "RELEASES.md"), "rb") as f:
r = f.read().split(b'\n\n', 1)[0] # Slice latest release notes
try:
return bytes(MarkdownIt().render(r.decode("utf-8")), encoding="utf-8")
except Exception:
return r + b"\n"
except FileNotFoundError:
pass
except Exception:
cloudlog.exception("failed to parse release notes")
return b""
def setup_git_options(cwd: str) -> None:
# We sync FS object atimes (which NEOS doesn't use) and mtimes, but ctimes
# are outside user control. Make sure Git is set up to ignore system ctimes,
# because they change when we make hard links during finalize. Otherwise,
# there is a lot of unnecessary churn. This appears to be a common need on
# OSX as well: https://www.git-tower.com/blog/make-git-rebase-safe-on-osx/
# We are using copytree to copy the directory, which also changes
# inode numbers. Ignore those changes too.
# Set protocol to the new version (default after git 2.26) to reduce data
# usage on git fetch --dry-run from about 400KB to 18KB.
git_cfg = [
("core.trustctime", "false"),
("core.checkStat", "minimal"),
("protocol.version", "2"),
("gc.auto", "0"),
("gc.autoDetach", "false"),
]
for option, value in git_cfg:
run(["git", "config", option, value], cwd)
def dismount_overlay() -> None:
if os.path.ismount(OVERLAY_MERGED):
cloudlog.info("unmounting existing overlay")
run(["sudo", "umount", "-l", OVERLAY_MERGED])
def init_overlay() -> None:
# Re-create the overlay if BASEDIR/.git has changed since we created the overlay
if OVERLAY_INIT.is_file() and os.path.ismount(OVERLAY_MERGED):
git_dir_path = os.path.join(BASEDIR, ".git")
new_files = run(["find", git_dir_path, "-newer", str(OVERLAY_INIT)])
if not len(new_files.splitlines()):
# A valid overlay already exists
return
else:
cloudlog.info(".git directory changed, recreating overlay")
cloudlog.info("preparing new safe staging area")
params = Params()
params.put_bool("UpdateAvailable", False)
set_consistent_flag(False)
dismount_overlay()
run(["sudo", "rm", "-rf", STAGING_ROOT])
if os.path.isdir(STAGING_ROOT):
shutil.rmtree(STAGING_ROOT)
for dirname in [STAGING_ROOT, OVERLAY_UPPER, OVERLAY_METADATA, OVERLAY_MERGED]:
os.mkdir(dirname, 0o755)
if os.lstat(BASEDIR).st_dev != os.lstat(OVERLAY_MERGED).st_dev:
raise RuntimeError("base and overlay merge directories are on different filesystems; not valid for overlay FS!")
# Leave a timestamped canary in BASEDIR to check at startup. The device clock
# should be correct by the time we get here. If the init file disappears, or
# critical mtimes in BASEDIR are newer than .overlay_init, continue.sh can
# assume that BASEDIR has used for local development or otherwise modified,
# and skips the update activation attempt.
consistent_file = Path(os.path.join(BASEDIR, ".overlay_consistent"))
if consistent_file.is_file():
consistent_file.unlink()
OVERLAY_INIT.touch()
os.sync()
overlay_opts = f"lowerdir={BASEDIR},upperdir={OVERLAY_UPPER},workdir={OVERLAY_METADATA}"
mount_cmd = ["mount", "-t", "overlay", "-o", overlay_opts, "none", OVERLAY_MERGED]
run(["sudo"] + mount_cmd)
run(["sudo", "chmod", "755", os.path.join(OVERLAY_METADATA, "work")])
git_diff = run(["git", "diff"], OVERLAY_MERGED)
params.put("GitDiff", git_diff)
cloudlog.info(f"git diff output:\n{git_diff}")
def finalize_update() -> None:
"""Take the current OverlayFS merged view and finalize a copy outside of
OverlayFS, ready to be swapped-in at BASEDIR. Copy using shutil.copytree"""
# Remove the update ready flag and any old updates
cloudlog.info("creating finalized version of the overlay")
set_consistent_flag(False)
# Copy the merged overlay view and set the update ready flag
if os.path.exists(FINALIZED):
shutil.rmtree(FINALIZED)
shutil.copytree(OVERLAY_MERGED, FINALIZED, symlinks=True)
run(["git", "reset", "--hard"], FINALIZED)
run(["git", "submodule", "foreach", "--recursive", "git", "reset", "--hard"], FINALIZED)
cloudlog.info("Starting git cleanup in finalized update")
t = time.monotonic()
try:
run(["git", "gc"], FINALIZED)
run(["git", "lfs", "prune"], FINALIZED)
cloudlog.event("Done git cleanup", duration=time.monotonic() - t)
except subprocess.CalledProcessError:
cloudlog.exception(f"Failed git cleanup, took {time.monotonic() - t:.3f} s")
set_consistent_flag(True)
cloudlog.info("done finalizing overlay")
def handle_agnos_update() -> None: def handle_agnos_update(fetched_path) -> None:
from openpilot.system.hardware.tici.agnos import flash_agnos_update, get_target_slot_number from openpilot.system.hardware.tici.agnos import flash_agnos_update, get_target_slot_number
cur_version = HARDWARE.get_os_version() cur_version = HARDWARE.get_os_version()
updated_version = run(["bash", "-c", r"unset AGNOS_VERSION && source launch_env.sh && \ updated_version = run(["bash", "-c", r"unset AGNOS_VERSION && source launch_env.sh && \
echo -n $AGNOS_VERSION"], OVERLAY_MERGED).strip() echo -n $AGNOS_VERSION"], fetched_path).strip()
cloudlog.info(f"AGNOS version check: {cur_version} vs {updated_version}") cloudlog.info(f"AGNOS version check: {cur_version} vs {updated_version}")
if cur_version == updated_version: if cur_version == updated_version:
@ -223,61 +75,44 @@ def handle_agnos_update() -> None:
cloudlog.info(f"Beginning background installation for AGNOS {updated_version}") cloudlog.info(f"Beginning background installation for AGNOS {updated_version}")
set_offroad_alert("Offroad_NeosUpdate", True) set_offroad_alert("Offroad_NeosUpdate", True)
manifest_path = os.path.join(OVERLAY_MERGED, "system/hardware/tici/agnos.json") manifest_path = os.path.join(fetched_path, "system/hardware/tici/agnos.json")
target_slot_number = get_target_slot_number() target_slot_number = get_target_slot_number()
flash_agnos_update(manifest_path, target_slot_number, cloudlog) flash_agnos_update(manifest_path, target_slot_number, cloudlog)
set_offroad_alert("Offroad_NeosUpdate", False) set_offroad_alert("Offroad_NeosUpdate", False)
STRATEGY = {
"git": GitUpdateStrategy,
}
class Updater: class Updater:
def __init__(self): def __init__(self):
self.params = Params() self.params = Params()
self.branches = defaultdict(str)
self._has_internet: bool = False self._has_internet: bool = False
self.strategy: UpdateStrategy = STRATEGY[os.environ.get("UPDATER_STRATEGY", "git")]()
@property @property
def has_internet(self) -> bool: def has_internet(self) -> bool:
return self._has_internet return self._has_internet
@property def init(self):
def target_branch(self) -> str: self.strategy.init()
b: str | None = self.params.get("UpdaterTargetBranch", encoding='utf-8')
if b is None:
b = self.get_branch(BASEDIR)
return b
@property
def update_ready(self) -> bool:
consistent_file = Path(os.path.join(FINALIZED, ".overlay_consistent"))
if consistent_file.is_file():
hash_mismatch = self.get_commit_hash(BASEDIR) != self.branches[self.target_branch]
branch_mismatch = self.get_branch(BASEDIR) != self.target_branch
on_target_branch = self.get_branch(FINALIZED) == self.target_branch
return ((hash_mismatch or branch_mismatch) and on_target_branch)
return False
@property def cleanup(self):
def update_available(self) -> bool: self.strategy.cleanup()
if os.path.isdir(OVERLAY_MERGED) and len(self.branches) > 0:
hash_mismatch = self.get_commit_hash(OVERLAY_MERGED) != self.branches[self.target_branch]
branch_mismatch = self.get_branch(OVERLAY_MERGED) != self.target_branch
return hash_mismatch or branch_mismatch
return False
def get_branch(self, path: str) -> str:
return run(["git", "rev-parse", "--abbrev-ref", "HEAD"], path).rstrip()
def get_commit_hash(self, path: str = OVERLAY_MERGED) -> str:
return run(["git", "rev-parse", "HEAD"], path).rstrip()
def set_params(self, update_success: bool, failed_count: int, exception: str | None) -> None: def set_params(self, update_success: bool, failed_count: int, exception: str | None) -> None:
self.params.put("UpdateFailedCount", str(failed_count)) self.params.put("UpdateFailedCount", str(failed_count))
self.params.put("UpdaterTargetBranch", self.target_branch)
self.params.put_bool("UpdaterFetchAvailable", self.update_available) if self.params.get("UpdaterTargetBranch") is None:
if len(self.branches): self.params.put("UpdaterTargetBranch", self.strategy.current_channel())
self.params.put("UpdaterAvailableBranches", ','.join(self.branches.keys()))
self.params.put_bool("UpdaterFetchAvailable", self.strategy.update_available())
available_channels = self.strategy.get_available_channels()
self.params.put("UpdaterAvailableBranches", ','.join(available_channels))
last_update = datetime.datetime.utcnow() last_update = datetime.datetime.utcnow()
if update_success: if update_success:
@ -292,32 +127,14 @@ class Updater:
else: else:
self.params.put("LastUpdateException", exception) self.params.put("LastUpdateException", exception)
# Write out current and new version info description_current, release_notes_current = self.strategy.describe_current_channel()
def get_description(basedir: str) -> str: description_ready, release_notes_ready = self.strategy.describe_ready_channel()
if not os.path.exists(basedir):
return ""
version = "" self.params.put("UpdaterCurrentDescription", description_current)
branch = "" self.params.put("UpdaterCurrentReleaseNotes", release_notes_current)
commit = "" self.params.put("UpdaterNewDescription", description_ready)
commit_date = "" self.params.put("UpdaterNewReleaseNotes", release_notes_ready)
try: self.params.put_bool("UpdateAvailable", self.strategy.update_ready())
branch = self.get_branch(basedir)
commit = self.get_commit_hash(basedir)[:7]
with open(os.path.join(basedir, "common", "version.h")) as f:
version = f.read().split('"')[1]
commit_unix_ts = run(["git", "show", "-s", "--format=%ct", "HEAD"], basedir).rstrip()
dt = datetime.datetime.fromtimestamp(int(commit_unix_ts))
commit_date = dt.strftime("%b %d")
except Exception:
cloudlog.exception("updater.get_description")
return f"{version} / {branch} / {commit} / {commit_date}"
self.params.put("UpdaterCurrentDescription", get_description(BASEDIR))
self.params.put("UpdaterCurrentReleaseNotes", parse_release_notes(BASEDIR))
self.params.put("UpdaterNewDescription", get_description(FINALIZED))
self.params.put("UpdaterNewReleaseNotes", parse_release_notes(FINALIZED))
self.params.put_bool("UpdateAvailable", self.update_ready)
# Handle user prompt # Handle user prompt
for alert in ("Offroad_UpdateFailed", "Offroad_ConnectivityNeeded", "Offroad_ConnectivityNeededPrompt"): for alert in ("Offroad_UpdateFailed", "Offroad_ConnectivityNeeded", "Offroad_ConnectivityNeededPrompt"):
@ -341,67 +158,24 @@ class Updater:
def check_for_update(self) -> None: def check_for_update(self) -> None:
cloudlog.info("checking for updates") cloudlog.info("checking for updates")
excluded_branches = ('release2', 'release2-staging') self.strategy.update_available()
try:
run(["git", "ls-remote", "origin", "HEAD"], OVERLAY_MERGED)
self._has_internet = True
except subprocess.CalledProcessError:
self._has_internet = False
setup_git_options(OVERLAY_MERGED)
output = run(["git", "ls-remote", "--heads"], OVERLAY_MERGED)
self.branches = defaultdict(lambda: None)
for line in output.split('\n'):
ls_remotes_re = r'(?P<commit_sha>\b[0-9a-f]{5,40}\b)(\s+)(refs\/heads\/)(?P<branch_name>.*$)'
x = re.fullmatch(ls_remotes_re, line.strip())
if x is not None and x.group('branch_name') not in excluded_branches:
self.branches[x.group('branch_name')] = x.group('commit_sha')
cur_branch = self.get_branch(OVERLAY_MERGED)
cur_commit = self.get_commit_hash(OVERLAY_MERGED)
new_branch = self.target_branch
new_commit = self.branches[new_branch]
if (cur_branch, cur_commit) != (new_branch, new_commit):
cloudlog.info(f"update available, {cur_branch} ({str(cur_commit)[:7]}) -> {new_branch} ({str(new_commit)[:7]})")
else:
cloudlog.info(f"up to date on {cur_branch} ({str(cur_commit)[:7]})")
def fetch_update(self) -> None: def fetch_update(self) -> None:
cloudlog.info("attempting git fetch inside staging overlay")
self.params.put("UpdaterState", "downloading...") self.params.put("UpdaterState", "downloading...")
# TODO: cleanly interrupt this and invalidate old update # TODO: cleanly interrupt this and invalidate old update
set_consistent_flag(False) set_consistent_flag(False)
self.params.put_bool("UpdateAvailable", False) self.params.put_bool("UpdateAvailable", False)
setup_git_options(OVERLAY_MERGED) self.strategy.fetch_update()
branch = self.target_branch
git_fetch_output = run(["git", "fetch", "origin", branch], OVERLAY_MERGED)
cloudlog.info("git fetch success: %s", git_fetch_output)
cloudlog.info("git reset in progress")
cmds = [
["git", "checkout", "--force", "--no-recurse-submodules", "-B", branch, "FETCH_HEAD"],
["git", "reset", "--hard"],
["git", "clean", "-xdff"],
["git", "submodule", "sync"],
["git", "submodule", "update", "--init", "--recursive"],
["git", "submodule", "foreach", "--recursive", "git", "reset", "--hard"],
]
r = [run(cmd, OVERLAY_MERGED) for cmd in cmds]
cloudlog.info("git reset success: %s", '\n'.join(r))
# TODO: show agnos download progress # TODO: show agnos download progress
if AGNOS: if AGNOS:
handle_agnos_update() handle_agnos_update(self.strategy.fetched_path())
# Create the finalized, ready-to-swap update # Create the finalized, ready-to-swap update
self.params.put("UpdaterState", "finalizing update...") self.params.put("UpdaterState", "finalizing update...")
finalize_update() self.strategy.finalize_update()
cloudlog.info("finalize success!") cloudlog.info("finalize success!")
@ -450,7 +224,7 @@ def main() -> None:
exception = None exception = None
try: try:
# TODO: reuse overlay from previous updated instance if it looks clean # TODO: reuse overlay from previous updated instance if it looks clean
init_overlay() updater.init()
# ensure we have some params written soon after startup # ensure we have some params written soon after startup
updater.set_params(False, update_failed_count, exception) updater.set_params(False, update_failed_count, exception)
@ -486,11 +260,11 @@ def main() -> None:
returncode=e.returncode returncode=e.returncode
) )
exception = f"command failed: {e.cmd}\n{e.output}" exception = f"command failed: {e.cmd}\n{e.output}"
OVERLAY_INIT.unlink(missing_ok=True) updater.cleanup()
except Exception as e: except Exception as e:
cloudlog.exception("uncaught updated exception, shouldn't happen") cloudlog.exception("uncaught updated exception, shouldn't happen")
exception = str(e) exception = str(e)
OVERLAY_INIT.unlink(missing_ok=True) updater.cleanup()
try: try:
params.put("UpdaterState", "idle") params.put("UpdaterState", "idle")

@ -27,14 +27,18 @@ public:
"-cl-fast-relaxed-math -cl-denorms-are-zero " "-cl-fast-relaxed-math -cl-denorms-are-zero "
"-DFRAME_WIDTH=%d -DFRAME_HEIGHT=%d -DFRAME_STRIDE=%d -DFRAME_OFFSET=%d " "-DFRAME_WIDTH=%d -DFRAME_HEIGHT=%d -DFRAME_STRIDE=%d -DFRAME_OFFSET=%d "
"-DRGB_WIDTH=%d -DRGB_HEIGHT=%d -DYUV_STRIDE=%d -DUV_OFFSET=%d " "-DRGB_WIDTH=%d -DRGB_HEIGHT=%d -DYUV_STRIDE=%d -DUV_OFFSET=%d "
"-DIS_OX=%d -DCAM_NUM=%d%s", "-DIS_OX=%d -DIS_OS=%d -DIS_BGGR=%d -DCAM_NUM=%d%s",
ci->frame_width, ci->frame_height, ci->frame_stride, ci->frame_offset, ci->frame_width, ci->frame_height, ci->frame_stride, ci->frame_offset,
b->rgb_width, b->rgb_height, buf_width, uv_offset, b->rgb_width, b->rgb_height, buf_width, uv_offset,
ci->image_sensor == cereal::FrameData::ImageSensor::OX03C10, s->camera_num, s->camera_num==1 ? " -DVIGNETTING" : ""); ci->image_sensor == cereal::FrameData::ImageSensor::OX03C10,
ci->image_sensor == cereal::FrameData::ImageSensor::OS04C10,
ci->image_sensor == cereal::FrameData::ImageSensor::OS04C10,
s->camera_num, s->camera_num==1 ? " -DVIGNETTING" : "");
const char *cl_file = "cameras/real_debayer.cl"; const char *cl_file = "cameras/real_debayer.cl";
cl_program prg_debayer = cl_program_from_file(context, device_id, cl_file, args); cl_program prg_debayer = cl_program_from_file(context, device_id, cl_file, args);
krnl_ = CL_CHECK_ERR(clCreateKernel(prg_debayer, "debayer10", &err)); krnl_ = CL_CHECK_ERR(clCreateKernel(prg_debayer, "debayer10", &err));
CL_CHECK(clReleaseProgram(prg_debayer)); CL_CHECK(clReleaseProgram(prg_debayer));
} }
void queue(cl_command_queue q, cl_mem cam_buf_cl, cl_mem buf_cl, int width, int height, cl_event *debayer_event) { void queue(cl_command_queue q, cl_mem cam_buf_cl, cl_mem buf_cl, int width, int height, cl_event *debayer_event) {

@ -8,7 +8,7 @@
float3 color_correct(float3 rgb) { float3 color_correct(float3 rgb) {
// color correction // color correction
#if IS_OX #if IS_OX | IS_OS
float3 x = rgb.x * (float3)(1.5664815 , -0.29808738, -0.03973474); float3 x = rgb.x * (float3)(1.5664815 , -0.29808738, -0.03973474);
x += rgb.y * (float3)(-0.48672447, 1.41914433, -0.40295248); x += rgb.y * (float3)(-0.48672447, 1.41914433, -0.40295248);
x += rgb.z * (float3)(-0.07975703, -0.12105695, 1.44268722); x += rgb.z * (float3)(-0.07975703, -0.12105695, 1.44268722);
@ -20,6 +20,8 @@ float3 color_correct(float3 rgb) {
#if IS_OX #if IS_OX
return -0.507089*exp(-12.54124638*x)+0.9655*powr(x,0.5)-0.472597*x+0.507089; return -0.507089*exp(-12.54124638*x)+0.9655*powr(x,0.5)-0.472597*x+0.507089;
#elif IS_OS
return powr(x,0.7);
#else #else
// tone mapping params // tone mapping params
const float gamma_k = 0.75; const float gamma_k = 0.75;
@ -35,6 +37,9 @@ float3 color_correct(float3 rgb) {
} }
float get_vignetting_s(float r) { float get_vignetting_s(float r) {
#if IS_OS
r = r / 2.2545f;
#endif
if (r < 62500) { if (r < 62500) {
return (1.0f + 0.0000008f*r); return (1.0f + 0.0000008f*r);
} else if (r < 490000) { } else if (r < 490000) {
@ -85,6 +90,24 @@ float4 val4_from_12(uchar8 pvs, float gain) {
} }
float4 val4_from_10(uchar8 pvs, uchar ext, bool aligned, float gain) {
uint4 parsed;
if (aligned) {
parsed = (uint4)(((uint)pvs.s0 << 2) + (pvs.s1 & 0b00000011),
((uint)pvs.s2 << 2) + ((pvs.s6 & 0b11000000) / 64),
((uint)pvs.s3 << 2) + ((pvs.s6 & 0b00110000) / 16),
((uint)pvs.s4 << 2) + ((pvs.s6 & 0b00001100) / 4));
} else {
parsed = (uint4)(((uint)pvs.s0 << 2) + ((pvs.s3 & 0b00110000) / 16),
((uint)pvs.s1 << 2) + ((pvs.s3 & 0b00001100) / 4),
((uint)pvs.s2 << 2) + ((pvs.s3 & 0b00000011)),
((uint)pvs.s4 << 2) + ((ext & 0b11000000) / 64));
}
float4 pv = convert_float4(parsed) / 1024.0;
return clamp(pv*gain, 0.0, 1.0);
}
float get_k(float a, float b, float c, float d) { float get_k(float a, float b, float c, float d) {
return 2.0 - (fabs(a - b) + fabs(c - d)); return 2.0 - (fabs(a - b) + fabs(c - d));
} }
@ -94,20 +117,51 @@ __kernel void debayer10(const __global uchar * in, __global uchar * out)
const int gid_x = get_global_id(0); const int gid_x = get_global_id(0);
const int gid_y = get_global_id(1); const int gid_y = get_global_id(1);
const int y_top_mod = (gid_y == 0) ? 2: 0; const int row_before_offset = (gid_y == 0) ? 2 : 0;
const int y_bot_mod = (gid_y == (RGB_HEIGHT/2 - 1)) ? 1: 3; const int row_after_offset = (gid_y == (RGB_HEIGHT/2 - 1)) ? 1 : 3;
float3 rgb; float3 rgb;
uchar3 rgb_out[4]; uchar3 rgb_out[4];
int start = (2 * gid_y - 1) * FRAME_STRIDE + (3 * gid_x - 2) + (FRAME_STRIDE * FRAME_OFFSET); #if IS_BGGR
constant int row_read_order[] = {3, 2, 1, 0};
constant int rgb_write_order[] = {2, 3, 0, 1};
#else
constant int row_read_order[] = {0, 1, 2, 3};
constant int rgb_write_order[] = {0, 1, 2, 3};
#endif
int start_idx;
#if IS_10BIT
bool aligned10;
if (gid_x % 2 == 0) {
aligned10 = true;
start_idx = (2 * gid_y - 1) * FRAME_STRIDE + (5 * gid_x / 2 - 2) + (FRAME_STRIDE * FRAME_OFFSET);
} else {
aligned10 = false;
start_idx = (2 * gid_y - 1) * FRAME_STRIDE + (5 * (gid_x - 1) / 2 + 1) + (FRAME_STRIDE * FRAME_OFFSET);
}
#else
start_idx = (2 * gid_y - 1) * FRAME_STRIDE + (3 * gid_x - 2) + (FRAME_STRIDE * FRAME_OFFSET);
#endif
// read in 8x4 chars // read in 8x4 chars
uchar8 dat[4]; uchar8 dat[4];
dat[0] = vload8(0, in + start + FRAME_STRIDE*y_top_mod); dat[0] = vload8(0, in + start_idx + FRAME_STRIDE*row_before_offset);
dat[1] = vload8(0, in + start + FRAME_STRIDE*1); dat[1] = vload8(0, in + start_idx + FRAME_STRIDE*1);
dat[2] = vload8(0, in + start + FRAME_STRIDE*2); dat[2] = vload8(0, in + start_idx + FRAME_STRIDE*2);
dat[3] = vload8(0, in + start + FRAME_STRIDE*y_bot_mod); dat[3] = vload8(0, in + start_idx + FRAME_STRIDE*row_after_offset);
// need extra bit for 10-bit
#if IS_10BIT
uchar extra[4];
if (!aligned10) {
extra[0] = in[start_idx + FRAME_STRIDE*row_before_offset + 8];
extra[1] = in[start_idx + FRAME_STRIDE*1 + 8];
extra[2] = in[start_idx + FRAME_STRIDE*2 + 8];
extra[3] = in[start_idx + FRAME_STRIDE*row_after_offset + 8];
}
#endif
// correct vignetting // correct vignetting
#if VIGNETTING #if VIGNETTING
@ -118,60 +172,69 @@ __kernel void debayer10(const __global uchar * in, __global uchar * out)
const float gain = 1.0; const float gain = 1.0;
#endif #endif
// process them to floats float4 v_rows[4];
float4 va = val4_from_12(dat[0], gain); // parse into floats
float4 vb = val4_from_12(dat[1], gain); #if IS_10BIT
float4 vc = val4_from_12(dat[2], gain); v_rows[row_read_order[0]] = val4_from_10(dat[0], extra[0], aligned10, 1.0);
float4 vd = val4_from_12(dat[3], gain); v_rows[row_read_order[1]] = val4_from_10(dat[1], extra[1], aligned10, 1.0);
v_rows[row_read_order[2]] = val4_from_10(dat[2], extra[2], aligned10, 1.0);
v_rows[row_read_order[3]] = val4_from_10(dat[3], extra[3], aligned10, 1.0);
#else
v_rows[row_read_order[0]] = val4_from_12(dat[0], gain);
v_rows[row_read_order[1]] = val4_from_12(dat[1], gain);
v_rows[row_read_order[2]] = val4_from_12(dat[2], gain);
v_rows[row_read_order[3]] = val4_from_12(dat[3], gain);
#endif
// mirror padding
if (gid_x == 0) { if (gid_x == 0) {
va.s0 = va.s2; v_rows[0].s0 = v_rows[0].s2;
vb.s0 = vb.s2; v_rows[1].s0 = v_rows[1].s2;
vc.s0 = vc.s2; v_rows[2].s0 = v_rows[2].s2;
vd.s0 = vd.s2; v_rows[3].s0 = v_rows[3].s2;
} else if (gid_x == RGB_WIDTH/2 - 1) { } else if (gid_x == RGB_WIDTH/2 - 1) {
va.s3 = va.s1; v_rows[0].s3 = v_rows[0].s1;
vb.s3 = vb.s1; v_rows[1].s3 = v_rows[1].s1;
vc.s3 = vc.s1; v_rows[2].s3 = v_rows[2].s1;
vd.s3 = vd.s1; v_rows[3].s3 = v_rows[3].s1;
} }
// a simplified version of https://opensignalprocessingjournal.com/contents/volumes/V6/TOSIGPJ-6-1/TOSIGPJ-6-1.pdf // a simplified version of https://opensignalprocessingjournal.com/contents/volumes/V6/TOSIGPJ-6-1/TOSIGPJ-6-1.pdf
const float k01 = get_k(va.s0, vb.s1, va.s2, vb.s1); const float k01 = get_k(v_rows[0].s0, v_rows[1].s1, v_rows[0].s2, v_rows[1].s1);
const float k02 = get_k(va.s2, vb.s1, vc.s2, vb.s1); const float k02 = get_k(v_rows[0].s2, v_rows[1].s1, v_rows[2].s2, v_rows[1].s1);
const float k03 = get_k(vc.s0, vb.s1, vc.s2, vb.s1); const float k03 = get_k(v_rows[2].s0, v_rows[1].s1, v_rows[2].s2, v_rows[1].s1);
const float k04 = get_k(va.s0, vb.s1, vc.s0, vb.s1); const float k04 = get_k(v_rows[0].s0, v_rows[1].s1, v_rows[2].s0, v_rows[1].s1);
rgb.x = (k02*vb.s2+k04*vb.s0)/(k02+k04); // R_G1 rgb.x = (k02*v_rows[1].s2+k04*v_rows[1].s0)/(k02+k04); // R_G1
rgb.y = vb.s1; // G1(R) rgb.y = v_rows[1].s1; // G1(R)
rgb.z = (k01*va.s1+k03*vc.s1)/(k01+k03); // B_G1 rgb.z = (k01*v_rows[0].s1+k03*v_rows[2].s1)/(k01+k03); // B_G1
rgb_out[0] = convert_uchar3_sat(color_correct(clamp(rgb, 0.0, 1.0)) * 255.0); rgb_out[rgb_write_order[0]] = convert_uchar3_sat(color_correct(clamp(rgb, 0.0, 1.0)) * 255.0);
const float k11 = get_k(va.s1, vc.s1, va.s3, vc.s3); const float k11 = get_k(v_rows[0].s1, v_rows[2].s1, v_rows[0].s3, v_rows[2].s3);
const float k12 = get_k(va.s2, vb.s1, vb.s3, vc.s2); const float k12 = get_k(v_rows[0].s2, v_rows[1].s1, v_rows[1].s3, v_rows[2].s2);
const float k13 = get_k(va.s1, va.s3, vc.s1, vc.s3); const float k13 = get_k(v_rows[0].s1, v_rows[0].s3, v_rows[2].s1, v_rows[2].s3);
const float k14 = get_k(va.s2, vb.s3, vc.s2, vb.s1); const float k14 = get_k(v_rows[0].s2, v_rows[1].s3, v_rows[2].s2, v_rows[1].s1);
rgb.x = vb.s2; // R rgb.x = v_rows[1].s2; // R
rgb.y = (k11*(va.s2+vc.s2)*0.5+k13*(vb.s3+vb.s1)*0.5)/(k11+k13); // G_R rgb.y = (k11*(v_rows[0].s2+v_rows[2].s2)*0.5+k13*(v_rows[1].s3+v_rows[1].s1)*0.5)/(k11+k13); // G_R
rgb.z = (k12*(va.s3+vc.s1)*0.5+k14*(va.s1+vc.s3)*0.5)/(k12+k14); // B_R rgb.z = (k12*(v_rows[0].s3+v_rows[2].s1)*0.5+k14*(v_rows[0].s1+v_rows[2].s3)*0.5)/(k12+k14); // B_R
rgb_out[1] = convert_uchar3_sat(color_correct(clamp(rgb, 0.0, 1.0)) * 255.0); rgb_out[rgb_write_order[1]] = convert_uchar3_sat(color_correct(clamp(rgb, 0.0, 1.0)) * 255.0);
const float k21 = get_k(vb.s0, vd.s0, vb.s2, vd.s2); const float k21 = get_k(v_rows[1].s0, v_rows[3].s0, v_rows[1].s2, v_rows[3].s2);
const float k22 = get_k(vb.s1, vc.s0, vc.s2, vd.s1); const float k22 = get_k(v_rows[1].s1, v_rows[2].s0, v_rows[2].s2, v_rows[3].s1);
const float k23 = get_k(vb.s0, vb.s2, vd.s0, vd.s2); const float k23 = get_k(v_rows[1].s0, v_rows[1].s2, v_rows[3].s0, v_rows[3].s2);
const float k24 = get_k(vb.s1, vc.s2, vd.s1, vc.s0); const float k24 = get_k(v_rows[1].s1, v_rows[2].s2, v_rows[3].s1, v_rows[2].s0);
rgb.x = (k22*(vb.s2+vd.s0)*0.5+k24*(vb.s0+vd.s2)*0.5)/(k22+k24); // R_B rgb.x = (k22*(v_rows[1].s2+v_rows[3].s0)*0.5+k24*(v_rows[1].s0+v_rows[3].s2)*0.5)/(k22+k24); // R_B
rgb.y = (k21*(vb.s1+vd.s1)*0.5+k23*(vc.s2+vc.s0)*0.5)/(k21+k23); // G_B rgb.y = (k21*(v_rows[1].s1+v_rows[3].s1)*0.5+k23*(v_rows[2].s2+v_rows[2].s0)*0.5)/(k21+k23); // G_B
rgb.z = vc.s1; // B rgb.z = v_rows[2].s1; // B
rgb_out[2] = convert_uchar3_sat(color_correct(clamp(rgb, 0.0, 1.0)) * 255.0); rgb_out[rgb_write_order[2]] = convert_uchar3_sat(color_correct(clamp(rgb, 0.0, 1.0)) * 255.0);
const float k31 = get_k(vb.s1, vc.s2, vb.s3, vc.s2); const float k31 = get_k(v_rows[1].s1, v_rows[2].s2, v_rows[1].s3, v_rows[2].s2);
const float k32 = get_k(vb.s3, vc.s2, vd.s3, vc.s2); const float k32 = get_k(v_rows[1].s3, v_rows[2].s2, v_rows[3].s3, v_rows[2].s2);
const float k33 = get_k(vd.s1, vc.s2, vd.s3, vc.s2); const float k33 = get_k(v_rows[3].s1, v_rows[2].s2, v_rows[3].s3, v_rows[2].s2);
const float k34 = get_k(vb.s1, vc.s2, vd.s1, vc.s2); const float k34 = get_k(v_rows[1].s1, v_rows[2].s2, v_rows[3].s1, v_rows[2].s2);
rgb.x = (k31*vb.s2+k33*vd.s2)/(k31+k33); // R_G2 rgb.x = (k31*v_rows[1].s2+k33*v_rows[3].s2)/(k31+k33); // R_G2
rgb.y = vc.s2; // G2(B) rgb.y = v_rows[2].s2; // G2(B)
rgb.z = (k32*vc.s3+k34*vc.s1)/(k32+k34); // B_G2 rgb.z = (k32*v_rows[2].s3+k34*v_rows[2].s1)/(k32+k34); // B_G2
rgb_out[3] = convert_uchar3_sat(color_correct(clamp(rgb, 0.0, 1.0)) * 255.0); rgb_out[rgb_write_order[3]] = convert_uchar3_sat(color_correct(clamp(rgb, 0.0, 1.0)) * 255.0);
// write ys // write ys
uchar2 yy = (uchar2)( uchar2 yy = (uchar2)(

@ -80,14 +80,14 @@ float ar0231_parse_temp_sensor(uint16_t calib1, uint16_t calib2, uint16_t data_r
AR0231::AR0231() { AR0231::AR0231() {
image_sensor = cereal::FrameData::ImageSensor::AR0231; image_sensor = cereal::FrameData::ImageSensor::AR0231;
data_word = true; data_word = true;
frame_width = FRAME_WIDTH; frame_width = 1928;
frame_height = FRAME_HEIGHT; frame_height = 1208;
frame_stride = FRAME_STRIDE; frame_stride = (frame_width * 12 / 8) + 4;
extra_height = AR0231_REGISTERS_HEIGHT + AR0231_STATS_HEIGHT; extra_height = AR0231_REGISTERS_HEIGHT + AR0231_STATS_HEIGHT;
registers_offset = 0; registers_offset = 0;
frame_offset = AR0231_REGISTERS_HEIGHT; frame_offset = AR0231_REGISTERS_HEIGHT;
stats_offset = AR0231_REGISTERS_HEIGHT + FRAME_HEIGHT; stats_offset = AR0231_REGISTERS_HEIGHT + frame_height;
start_reg_array.assign(std::begin(start_reg_array_ar0231), std::end(start_reg_array_ar0231)); start_reg_array.assign(std::begin(start_reg_array_ar0231), std::end(start_reg_array_ar0231));
init_reg_array.assign(std::begin(init_array_ar0231), std::end(init_array_ar0231)); init_reg_array.assign(std::begin(init_array_ar0231), std::end(init_array_ar0231));

@ -10,14 +10,11 @@ const float sensor_analog_gains_OS04C10[] = {
10.5, 11.0, 11.5, 12.0, 12.5, 13.0, 13.5, 14.0, 14.5, 15.0, 15.5}; 10.5, 11.0, 11.5, 12.0, 12.5, 13.0, 13.5, 14.0, 14.5, 15.0, 15.5};
const uint32_t os04c10_analog_gains_reg[] = { const uint32_t os04c10_analog_gains_reg[] = {
0x100, 0x110, 0x120, 0x130, 0x140, 0x150, 0x160, 0x170, 0x180, 0x190, 0x1B0, 0x080, 0x088, 0x090, 0x098, 0x0A0, 0x0A8, 0x0B0, 0x0B8, 0x0C0, 0x0C8, 0x0D8,
0x1D0, 0x1F0, 0x200, 0x220, 0x240, 0x260, 0x280, 0x2A0, 0x2C0, 0x2E0, 0x300, 0x0E8, 0x0F8, 0x100, 0x110, 0x120, 0x130, 0x140, 0x150, 0x160, 0x170, 0x180,
0x320, 0x360, 0x3A0, 0x3E0, 0x400, 0x440, 0x480, 0x4C0, 0x500, 0x540, 0x580, 0x190, 0x1B0, 0x1D0, 0x1F0, 0x200, 0x220, 0x240, 0x260, 0x280, 0x2A0, 0x2C0,
0x5C0, 0x600, 0x640, 0x680, 0x700, 0x780, 0x800, 0x880, 0x900, 0x980, 0xA00, 0x2E0, 0x300, 0x320, 0x340, 0x380, 0x3C0, 0x400, 0x440, 0x480, 0x4C0, 0x500,
0xA80, 0xB00, 0xB80, 0xC00, 0xC80, 0xD00, 0xD80, 0xE00, 0xE80, 0xF00, 0xF80}; 0x540, 0x580, 0x5C0, 0x600, 0x640, 0x680, 0x6C0, 0x700, 0x740, 0x780, 0x7C0};
const uint32_t VS_TIME_MIN_OS04C10 = 1;
//const uint32_t VS_TIME_MAX_OS04C10 = 34; // vs < 35
} // namespace } // namespace
@ -25,15 +22,9 @@ OS04C10::OS04C10() {
image_sensor = cereal::FrameData::ImageSensor::OS04C10; image_sensor = cereal::FrameData::ImageSensor::OS04C10;
data_word = false; data_word = false;
frame_width = 1920; frame_width = 2688;
frame_height = 1080; frame_height = 1520;
frame_stride = (1920*10/8); frame_stride = (frame_width * 12 / 8); // no alignment
/*
frame_width = 0xa80;
frame_height = 0x5f0;
frame_stride = 0xd20;
*/
extra_height = 0; extra_height = 0;
frame_offset = 0; frame_offset = 0;
@ -42,17 +33,17 @@ OS04C10::OS04C10() {
init_reg_array.assign(std::begin(init_array_os04c10), std::end(init_array_os04c10)); init_reg_array.assign(std::begin(init_array_os04c10), std::end(init_array_os04c10));
probe_reg_addr = 0x300a; probe_reg_addr = 0x300a;
probe_expected_data = 0x5304; probe_expected_data = 0x5304;
mipi_format = CAM_FORMAT_MIPI_RAW_10; mipi_format = CAM_FORMAT_MIPI_RAW_12;
frame_data_type = 0x2b; frame_data_type = 0x2c;
mclk_frequency = 24000000; // Hz mclk_frequency = 24000000; // Hz
dc_gain_factor = 7.32; dc_gain_factor = 1;
dc_gain_min_weight = 1; // always on is fine dc_gain_min_weight = 1; // always on is fine
dc_gain_max_weight = 1; dc_gain_max_weight = 1;
dc_gain_on_grey = 0.9; dc_gain_on_grey = 0.9;
dc_gain_off_grey = 1.0; dc_gain_off_grey = 1.0;
exposure_time_min = 2; // 1x exposure_time_min = 2; // 1x
exposure_time_max = 2016; exposure_time_max = 2200;
analog_gain_min_idx = 0x0; analog_gain_min_idx = 0x0;
analog_gain_rec_idx = 0x0; // 1x analog_gain_rec_idx = 0x0; // 1x
analog_gain_max_idx = 0x36; analog_gain_max_idx = 0x36;
@ -62,30 +53,22 @@ OS04C10::OS04C10() {
for (int i = 0; i <= analog_gain_max_idx; i++) { for (int i = 0; i <= analog_gain_max_idx; i++) {
sensor_analog_gains[i] = sensor_analog_gains_OS04C10[i]; sensor_analog_gains[i] = sensor_analog_gains_OS04C10[i];
} }
min_ev = (exposure_time_min + VS_TIME_MIN_OS04C10) * sensor_analog_gains[analog_gain_min_idx]; min_ev = (exposure_time_min) * sensor_analog_gains[analog_gain_min_idx];
max_ev = exposure_time_max * dc_gain_factor * sensor_analog_gains[analog_gain_max_idx]; max_ev = exposure_time_max * dc_gain_factor * sensor_analog_gains[analog_gain_max_idx];
target_grey_factor = 0.01; target_grey_factor = 0.01;
} }
std::vector<i2c_random_wr_payload> OS04C10::getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const { std::vector<i2c_random_wr_payload> OS04C10::getExposureRegisters(int exposure_time, int new_exp_g, bool dc_gain_enabled) const {
// t_HCG&t_LCG + t_VS on LPD, t_SPD on SPD uint32_t long_time = exposure_time;
uint32_t hcg_time = exposure_time;
//uint32_t lcg_time = hcg_time;
//uint32_t spd_time = std::min(std::max((uint32_t)exposure_time, (exposure_time_max + VS_TIME_MAX_OS04C10) / 3), exposure_time_max + VS_TIME_MAX_OS04C10);
//uint32_t vs_time = std::min(std::max((uint32_t)exposure_time / 40, VS_TIME_MIN_OS04C10), VS_TIME_MAX_OS04C10);
uint32_t real_gain = os04c10_analog_gains_reg[new_exp_g]; uint32_t real_gain = os04c10_analog_gains_reg[new_exp_g];
hcg_time = 100; // uint32_t short_time = long_time > exposure_time_min*8 ? long_time / 8 : exposure_time_min;
real_gain = 0x320;
return { return {
{0x3501, hcg_time>>8}, {0x3502, hcg_time&0xFF}, {0x3501, long_time>>8}, {0x3502, long_time&0xFF},
//{0x3581, lcg_time>>8}, {0x3582, lcg_time&0xFF}, // {0x3511, short_time>>8}, {0x3512, short_time&0xFF},
//{0x3541, spd_time>>8}, {0x3542, spd_time&0xFF},
//{0x35c2, vs_time&0xFF},
{0x3508, real_gain>>8}, {0x3509, real_gain&0xFF}, {0x3508, real_gain>>8}, {0x3509, real_gain&0xFF},
// {0x350c, real_gain>>8}, {0x350d, real_gain&0xFF},
}; };
} }

@ -4,43 +4,33 @@ const struct i2c_random_wr_payload start_reg_array_os04c10[] = {{0x100, 1}};
const struct i2c_random_wr_payload stop_reg_array_os04c10[] = {{0x100, 0}}; const struct i2c_random_wr_payload stop_reg_array_os04c10[] = {{0x100, 0}};
const struct i2c_random_wr_payload init_array_os04c10[] = { const struct i2c_random_wr_payload init_array_os04c10[] = {
// OS04C10_AA_00_02_17_wAO_1920x1080_MIPI728Mbps_Linear12bit_20FPS_4Lane_MCLK24MHz // OS04C10_AA_00_02_17_wAO_2688x1524_MIPI728Mbps_Linear12bit_20FPS_4Lane_MCLK24MHz
{0x0103, 0x01}, {0x0103, 0x01},
{0x0301, 0x84},
// PLL
{0x0301, 0xe4},
{0x0303, 0x01}, {0x0303, 0x01},
{0x0305, 0x5b}, {0x0305, 0xb6},
{0x0306, 0x01}, {0x0306, 0x01},
{0x0307, 0x17}, {0x0307, 0x17},
{0x0323, 0x04}, {0x0323, 0x04},
{0x0324, 0x01}, {0x0324, 0x01},
{0x0325, 0x62}, {0x0325, 0x62},
{0x3012, 0x06}, {0x3012, 0x06},
{0x3013, 0x02}, {0x3013, 0x02},
{0x3016, 0x72}, {0x3016, 0x72},
{0x3021, 0x03}, {0x3021, 0x03},
{0x3106, 0x21}, {0x3106, 0x21},
{0x3107, 0xa1}, {0x3107, 0xa1},
{0x3500, 0x00},
{0x3501, 0x00}, // ?
{0x3502, 0x40},
{0x3503, 0x88},
{0x3508, 0x07},
{0x3509, 0xc0},
{0x350a, 0x04},
{0x350b, 0x00},
{0x350c, 0x07},
{0x350d, 0xc0},
{0x350e, 0x04},
{0x350f, 0x00},
{0x3510, 0x00},
{0x3511, 0x00},
{0x3512, 0x20},
{0x3624, 0x00}, {0x3624, 0x00},
{0x3625, 0x4c}, {0x3625, 0x4c},
{0x3660, 0x00}, {0x3660, 0x04},
{0x3666, 0xa5}, {0x3666, 0xa5},
{0x3667, 0xa5}, {0x3667, 0xa5},
{0x366a, 0x64}, {0x366a, 0x50},
{0x3673, 0x0d}, {0x3673, 0x0d},
{0x3672, 0x0d}, {0x3672, 0x0d},
{0x3671, 0x0d}, {0x3671, 0x0d},
@ -63,22 +53,22 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
{0x36a0, 0x12}, {0x36a0, 0x12},
{0x36a1, 0x5d}, {0x36a1, 0x5d},
{0x36a2, 0x66}, {0x36a2, 0x66},
{0x370a, 0x00}, {0x370a, 0x02},
{0x370e, 0x0c}, {0x370e, 0x0c},
{0x3710, 0x00}, {0x3710, 0x00},
{0x3713, 0x00}, {0x3713, 0x00},
{0x3725, 0x02}, {0x3725, 0x02},
{0x372a, 0x03}, {0x372a, 0x03},
{0x3738, 0xce}, {0x3738, 0xce},
{0x3748, 0x00}, {0x3748, 0x02},
{0x374a, 0x00}, {0x374a, 0x02},
{0x374c, 0x00}, {0x374c, 0x02},
{0x374e, 0x00}, {0x374e, 0x02},
{0x3756, 0x00}, {0x3756, 0x00},
{0x3757, 0x0e}, {0x3757, 0x00},
{0x3767, 0x00}, {0x3767, 0x00},
{0x3771, 0x00}, {0x3771, 0x00},
{0x377b, 0x20}, {0x377b, 0x28},
{0x377c, 0x00}, {0x377c, 0x00},
{0x377d, 0x0c}, {0x377d, 0x0c},
{0x3781, 0x03}, {0x3781, 0x03},
@ -111,6 +101,8 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
{0x3d8d, 0xe2}, {0x3d8d, 0xe2},
{0x3f00, 0x0b}, {0x3f00, 0x0b},
{0x3f06, 0x04}, {0x3f06, 0x04},
// BLC
{0x400a, 0x01}, {0x400a, 0x01},
{0x400b, 0x50}, {0x400b, 0x50},
{0x400e, 0x08}, {0x400e, 0x08},
@ -118,7 +110,7 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
{0x4045, 0x7e}, {0x4045, 0x7e},
{0x4047, 0x7e}, {0x4047, 0x7e},
{0x4049, 0x7e}, {0x4049, 0x7e},
{0x4090, 0x14}, {0x4090, 0x04},
{0x40b0, 0x00}, {0x40b0, 0x00},
{0x40b1, 0x00}, {0x40b1, 0x00},
{0x40b2, 0x00}, {0x40b2, 0x00},
@ -128,24 +120,25 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
{0x40b7, 0x00}, {0x40b7, 0x00},
{0x40b8, 0x00}, {0x40b8, 0x00},
{0x40b9, 0x00}, {0x40b9, 0x00},
{0x40ba, 0x00}, {0x40ba, 0x01},
{0x4301, 0x00}, {0x4301, 0x00},
{0x4303, 0x00}, {0x4303, 0x00},
{0x4502, 0x04}, {0x4502, 0x04},
{0x4503, 0x00}, {0x4503, 0x00},
{0x4504, 0x06}, {0x4504, 0x06},
{0x4506, 0x00}, {0x4506, 0x00},
{0x4507, 0x64}, {0x4507, 0x47},
{0x4803, 0x00}, {0x4803, 0x00},
{0x480c, 0x32}, {0x480c, 0x32},
{0x480e, 0x00}, {0x480e, 0x04},
{0x4813, 0x00}, {0x4813, 0xe4},
{0x4819, 0x70}, {0x4819, 0x70},
{0x481f, 0x30}, {0x481f, 0x30},
{0x4823, 0x3f}, {0x4823, 0x3f},
{0x4825, 0x30}, {0x4825, 0x30},
{0x4833, 0x10}, {0x4833, 0x10},
{0x484b, 0x07}, {0x484b, 0x27},
{0x488b, 0x00}, {0x488b, 0x00},
{0x4d00, 0x04}, {0x4d00, 0x04},
{0x4d01, 0xad}, {0x4d01, 0xad},
@ -156,31 +149,37 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
{0x4d0b, 0x01}, {0x4d0b, 0x01},
{0x4e00, 0x2a}, {0x4e00, 0x2a},
{0x4e0d, 0x00}, {0x4e0d, 0x00},
// ISP
{0x5001, 0x09}, {0x5001, 0x09},
{0x5004, 0x00}, {0x5004, 0x00},
{0x5080, 0x04}, {0x5080, 0x04},
{0x5036, 0x00}, {0x5036, 0x80},
{0x5180, 0x70}, {0x5180, 0x70},
{0x5181, 0x10}, {0x5181, 0x10},
// DPC
{0x520a, 0x03}, {0x520a, 0x03},
{0x520b, 0x06}, {0x520b, 0x06},
{0x520c, 0x0c}, {0x520c, 0x0c},
{0x580b, 0x0f}, {0x580b, 0x0f},
{0x580d, 0x00}, {0x580d, 0x00},
{0x580f, 0x00}, {0x580f, 0x00},
{0x5820, 0x00}, {0x5820, 0x00},
{0x5821, 0x00}, {0x5821, 0x00},
{0x301c, 0xf8}, {0x301c, 0xf8},
{0x301e, 0xb4}, {0x301e, 0xb4},
{0x301f, 0xd0}, {0x301f, 0xf0},
{0x3022, 0x01}, {0x3022, 0x61},
{0x3109, 0xe7}, {0x3109, 0xe7},
{0x3600, 0x00}, {0x3600, 0x00},
{0x3610, 0x65}, {0x3610, 0x65},
{0x3611, 0x85}, {0x3611, 0x85},
{0x3613, 0x3a}, {0x3613, 0x3a},
{0x3615, 0x60}, {0x3615, 0x60},
{0x3621, 0x90}, {0x3621, 0xb0},
{0x3620, 0x0c}, {0x3620, 0x0c},
{0x3629, 0x00}, {0x3629, 0x00},
{0x3661, 0x04}, {0x3661, 0x04},
@ -194,9 +193,9 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
{0x3701, 0x12}, {0x3701, 0x12},
{0x3703, 0x28}, {0x3703, 0x28},
{0x3704, 0x0e}, {0x3704, 0x0e},
{0x3706, 0x4a}, {0x3706, 0x9d},
{0x3709, 0x4a}, {0x3709, 0x4a},
{0x370b, 0xa2}, {0x370b, 0x48},
{0x370c, 0x01}, {0x370c, 0x01},
{0x370f, 0x04}, {0x370f, 0x04},
{0x3714, 0x24}, {0x3714, 0x24},
@ -206,19 +205,19 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
{0x3720, 0x00}, {0x3720, 0x00},
{0x3724, 0x13}, {0x3724, 0x13},
{0x373f, 0xb0}, {0x373f, 0xb0},
{0x3741, 0x4a}, {0x3741, 0x9d},
{0x3743, 0x4a}, {0x3743, 0x9d},
{0x3745, 0x4a}, {0x3745, 0x9d},
{0x3747, 0x4a}, {0x3747, 0x9d},
{0x3749, 0xa2}, {0x3749, 0x48},
{0x374b, 0xa2}, {0x374b, 0x48},
{0x374d, 0xa2}, {0x374d, 0x48},
{0x374f, 0xa2}, {0x374f, 0x48},
{0x3755, 0x10}, {0x3755, 0x10},
{0x376c, 0x00}, {0x376c, 0x00},
{0x378d, 0x30}, {0x378d, 0x3c},
{0x3790, 0x4a}, {0x3790, 0x01},
{0x3791, 0xa2}, {0x3791, 0x01},
{0x3798, 0x40}, {0x3798, 0x40},
{0x379e, 0x00}, {0x379e, 0x00},
{0x379f, 0x04}, {0x379f, 0x04},
@ -249,29 +248,25 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
{0x4041, 0x07}, {0x4041, 0x07},
{0x4008, 0x02}, {0x4008, 0x02},
{0x4009, 0x0d}, {0x4009, 0x0d},
{0x3800, 0x01},
{0x3801, 0x80}, // 2704x1536 -> 2688x1520 out
{0x3802, 0x00}, {0x3800, 0x00}, {0x3801, 0x00},
{0x3803, 0xdc}, {0x3802, 0x00}, {0x3803, 0x00},
{0x3804, 0x09}, {0x3804, 0x0a}, {0x3805, 0x8f},
{0x3805, 0x0f}, {0x3806, 0x05}, {0x3807, 0xff},
{0x3806, 0x05}, {0x3808, 0x0a}, {0x3809, 0x80},
{0x3807, 0x23}, {0x380a, 0x05}, {0x380b, 0xf0},
{0x3808, 0x07},
{0x3809, 0x80},
{0x380a, 0x04},
{0x380b, 0x38},
{0x380c, 0x04},
{0x380d, 0x2e},
{0x380e, 0x12},
{0x380f, 0x70},
{0x3811, 0x08}, {0x3811, 0x08},
{0x3813, 0x08}, {0x3813, 0x08},
{0x3814, 0x01}, {0x3814, 0x01},
{0x3815, 0x01}, {0x3815, 0x01},
{0x3816, 0x01}, {0x3816, 0x01},
{0x3817, 0x01}, {0x3817, 0x01},
{0x3820, 0xB0},
{0x380c, 0x08}, {0x380d, 0x5c}, // HTS
{0x380e, 0x09}, {0x380f, 0x38}, // VTS
{0x3820, 0xb0},
{0x3821, 0x00}, {0x3821, 0x00},
{0x3880, 0x25}, {0x3880, 0x25},
{0x3882, 0x20}, {0x3882, 0x20},
@ -281,12 +276,12 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
{0x3cae, 0x00}, {0x3cae, 0x00},
{0x4000, 0xf3}, {0x4000, 0xf3},
{0x4001, 0x60}, {0x4001, 0x60},
{0x4003, 0x40}, {0x4003, 0x80},
{0x4300, 0xff}, {0x4300, 0xff},
{0x4302, 0x0f}, {0x4302, 0x0f},
{0x4305, 0x83}, {0x4305, 0x83},
{0x4505, 0x84}, {0x4505, 0x84},
{0x4809, 0x1e}, {0x4809, 0x0e},
{0x480a, 0x04}, {0x480a, 0x04},
{0x4837, 0x15}, {0x4837, 0x15},
{0x4c00, 0x08}, {0x4c00, 0x08},
@ -294,5 +289,25 @@ const struct i2c_random_wr_payload init_array_os04c10[] = {
{0x4c04, 0x00}, {0x4c04, 0x00},
{0x4c05, 0x00}, {0x4c05, 0x00},
{0x5000, 0xf9}, {0x5000, 0xf9},
{0x3c8c, 0x10}, {0x3822, 0x14},
// initialize exposure
{0x3503, 0x88},
// long
{0x3500, 0x00}, {0x3501, 0x00}, {0x3502, 0x80},
{0x3508, 0x00}, {0x3509, 0x80},
{0x350a, 0x04}, {0x350b, 0x00},
// short
// {0x3510, 0x00}, {0x3511, 0x00}, {0x3512, 0x10},
// {0x350c, 0x00}, {0x350d, 0x80},
// {0x350e, 0x04}, {0x350f, 0x00},
// wb
{0x5100, 0x06}, {0x5101, 0xcb},
{0x5102, 0x04}, {0x5103, 0x00},
{0x5104, 0x08}, {0x5105, 0xde},
{0x5106, 0x02}, {0x5107, 0x00},
}; };

@ -24,9 +24,9 @@ const uint32_t VS_TIME_MAX_OX03C10 = 34; // vs < 35
OX03C10::OX03C10() { OX03C10::OX03C10() {
image_sensor = cereal::FrameData::ImageSensor::OX03C10; image_sensor = cereal::FrameData::ImageSensor::OX03C10;
data_word = false; data_word = false;
frame_width = FRAME_WIDTH; frame_width = 1928;
frame_height = FRAME_HEIGHT; frame_height = 1208;
frame_stride = FRAME_STRIDE; // (0xa80*12//8) frame_stride = (frame_width * 12 / 8) + 4;
extra_height = 16; // top 2 + bot 14 extra_height = 16; // top 2 + bot 14
frame_offset = 2; frame_offset = 2;

@ -12,10 +12,6 @@
#include "system/camerad/sensors/os04c10_registers.h" #include "system/camerad/sensors/os04c10_registers.h"
#define ANALOG_GAIN_MAX_CNT 55 #define ANALOG_GAIN_MAX_CNT 55
const size_t FRAME_WIDTH = 1928;
const size_t FRAME_HEIGHT = 1208;
const size_t FRAME_STRIDE = 2896; // for 12 bit output. 1928 * 12 / 8 + 4 (alignment)
class SensorInfo { class SensorInfo {
public: public:

@ -468,8 +468,9 @@ class Tici(HardwareBase):
# use sim slot # use sim slot
'AT^SIMSWAP=1', 'AT^SIMSWAP=1',
# configure ECM mode # ethernet config
'AT$QCPCFG=usbNet,1' 'AT$QCPCFG=usbNet,0',
'AT$QCNETDEVCTL=3,1',
] ]
else: else:
cmds += [ cmds += [

@ -83,7 +83,7 @@ def main():
dt = datetime.datetime.strptime(f"{date} {gnrmc[1]}", '%d%m%y %H%M%S.%f') dt = datetime.datetime.strptime(f"{date} {gnrmc[1]}", '%d%m%y %H%M%S.%f')
gps.unixTimestampMillis = dt.timestamp()*1e3 gps.unixTimestampMillis = dt.timestamp()*1e3
gps.flags = 1 if gnrmc[1] == 'A' else 0 gps.hasFix = gnrmc[1] == 'A'
# TODO: make our own source # TODO: make our own source
gps.source = log.GpsLocationData.SensorSource.qcomdiag gps.source = log.GpsLocationData.SensorSource.qcomdiag

@ -352,8 +352,8 @@ def main() -> NoReturn:
gps.bearingAccuracyDeg = report["q_FltHeadingUncRad"] * 180/math.pi if (report["q_FltHeadingUncRad"] != 0) else 180 gps.bearingAccuracyDeg = report["q_FltHeadingUncRad"] * 180/math.pi if (report["q_FltHeadingUncRad"] != 0) else 180
gps.speedAccuracy = math.sqrt(sum([x**2 for x in vNEDsigma])) gps.speedAccuracy = math.sqrt(sum([x**2 for x in vNEDsigma]))
# quectel gps verticalAccuracy is clipped to 500, set invalid if so # quectel gps verticalAccuracy is clipped to 500, set invalid if so
gps.flags = 1 if gps.verticalAccuracy != 500 else 0 gps.hasFix = gps.verticalAccuracy != 500
if gps.flags: if gps.hasFix:
want_assistance = False want_assistance = False
stop_download_event.set() stop_download_event.set()
pm.send('gpsLocation', msg) pm.send('gpsLocation', msg)

@ -127,6 +127,7 @@ kj::Array<capnp::word> UbloxMsgParser::gen_nav_pvt(ubx_t::nav_pvt_t *msg) {
auto gpsLoc = msg_builder.initEvent().initGpsLocationExternal(); auto gpsLoc = msg_builder.initEvent().initGpsLocationExternal();
gpsLoc.setSource(cereal::GpsLocationData::SensorSource::UBLOX); gpsLoc.setSource(cereal::GpsLocationData::SensorSource::UBLOX);
gpsLoc.setFlags(msg->flags()); gpsLoc.setFlags(msg->flags());
gpsLoc.setHasFix((msg->flags() % 2) == 1);
gpsLoc.setLatitude(msg->lat() * 1e-07); gpsLoc.setLatitude(msg->lat() * 1e-07);
gpsLoc.setLongitude(msg->lon() * 1e-07); gpsLoc.setLongitude(msg->lon() * 1e-07);
gpsLoc.setAltitude(msg->height() * 1e-03); gpsLoc.setAltitude(msg->height() * 1e-03);

@ -67,6 +67,7 @@ class MetaDriveBridge(SimulatorBridge):
crash_object_done=True, crash_object_done=True,
traffic_density=0.0, traffic_density=0.0,
map_config=create_map(), map_config=create_map(),
map_region_size=2048,
decision_repeat=1, decision_repeat=1,
physics_world_step_size=self.TICKS_PER_FRAME/100, physics_world_step_size=self.TICKS_PER_FRAME/100,
preload_models=False preload_models=False

Loading…
Cancel
Save