diff --git a/.github/workflows/selfdrive_tests.yaml b/.github/workflows/selfdrive_tests.yaml
index aba630f67c..4e622f68a5 100644
--- a/.github/workflows/selfdrive_tests.yaml
+++ b/.github/workflows/selfdrive_tests.yaml
@@ -27,7 +27,6 @@ env:
RUN_CL: docker run --shm-size 1G -v $PWD:/tmp/openpilot -w /tmp/openpilot -e PYTHONWARNINGS=error -e PYTHONPATH=/tmp/openpilot -e NUM_JOBS -e JOB_ID -e GITHUB_ACTION -e GITHUB_REF -e GITHUB_HEAD_REF -e GITHUB_SHA -e GITHUB_REPOSITORY -e GITHUB_RUN_ID -v $GITHUB_WORKSPACE/.ci_cache/scons_cache:/tmp/scons_cache -v $GITHUB_WORKSPACE/.ci_cache/comma_download_cache:/tmp/comma_download_cache -v $GITHUB_WORKSPACE/.ci_cache/openpilot_cache:/tmp/openpilot_cache $CL_BASE_IMAGE /bin/sh -c
PYTEST: pytest --continue-on-collection-errors --cov --cov-report=xml --cov-append --durations=0 --durations-min=5 --hypothesis-seed 0
- XDIST: -n auto --dist=loadscope
jobs:
build_release:
@@ -58,7 +57,7 @@ jobs:
run: |
cd $STRIPPED_DIR
${{ env.RUN }} "release/check-dirty.sh && \
- MAX_EXAMPLES=5 $PYTEST $XDIST selfdrive/car"
+ MAX_EXAMPLES=5 $PYTEST selfdrive/car"
- name: pre-commit
timeout-minutes: 3
run: |
@@ -176,7 +175,7 @@ jobs:
- name: Run unit tests
timeout-minutes: 15
run: |
- ${{ env.RUN }} "$PYTEST $XDIST --timeout 30 -o cpp_files=test_* -m 'not slow' && \
+ ${{ env.RUN }} "$PYTEST --timeout 30 -m 'not slow' && \
./selfdrive/ui/tests/create_test_translations.sh && \
QT_QPA_PLATFORM=offscreen ./selfdrive/ui/tests/test_translations && \
./selfdrive/ui/tests/test_translations.py && \
@@ -209,30 +208,20 @@ jobs:
run: |
${{ env.RUN }} "scons -j$(nproc)"
- name: Run replay
+ id: run-replay
timeout-minutes: 30
run: |
- ${{ env.RUN }} "CI=1 coverage run selfdrive/test/process_replay/test_processes.py -j$(nproc) && \
- chmod -R 777 /tmp/comma_download_cache && \
- coverage combine && \
- coverage xml"
- - name: Print diff
- id: print-diff
- if: always()
- run: cat selfdrive/test/process_replay/diff.txt
- - uses: actions/upload-artifact@v3
- if: always()
- continue-on-error: true
- with:
- name: process_replay_diff.txt
- path: selfdrive/test/process_replay/diff.txt
- - name: Upload reference logs
- if: ${{ failure() && steps.print-diff.outcome == 'success' && github.repository == 'commaai/openpilot' && env.AZURE_TOKEN != '' }}
- run: |
- ${{ env.RUN }} "unset PYTHONWARNINGS && CI=1 AZURE_TOKEN='$AZURE_TOKEN' python selfdrive/test/process_replay/test_processes.py -j$(nproc) --upload-only"
+ ${{ env.RUN }} "CI=1 $PYTEST -n auto --dist=loadscope selfdrive/test/process_replay/test_processes.py --long-diff && \
+ chmod -R 777 /tmp/comma_download_cache"
- name: "Upload coverage to Codecov"
uses: codecov/codecov-action@v3
with:
name: ${{ github.job }}
+ - name: Upload reference logs
+ if: ${{ failure() && github.repository == 'commaai/openpilot' && env.AZURE_TOKEN != '' }}
+ run: |
+ ${{ env.RUN }} "unset PYTHONWARNINGS && CI=1 AZURE_TOKEN='$AZURE_TOKEN' \
+ pytest -n auto --dist=loadscope selfdrive/test/process_replay/test_processes.py --upload-only"
regen:
name: regen
@@ -258,7 +247,7 @@ jobs:
- name: Run regen
timeout-minutes: 30
run: |
- ${{ env.RUN_CL }} "ONNXCPU=1 $PYTEST $XDIST selfdrive/test/process_replay/test_regen.py && \
+ ${{ env.RUN_CL }} "ONNXCPU=1 $PYTEST selfdrive/test/process_replay/test_regen.py && \
chmod -R 777 /tmp/comma_download_cache"
test_modeld:
@@ -318,7 +307,7 @@ jobs:
- name: Test car models
timeout-minutes: 25
run: |
- ${{ env.RUN }} "$PYTEST $XDIST selfdrive/car/tests/test_models.py && \
+ ${{ env.RUN }} "$PYTEST selfdrive/car/tests/test_models.py && \
chmod -R 777 /tmp/comma_download_cache"
env:
NUM_JOBS: 5
diff --git a/Jenkinsfile b/Jenkinsfile
index 6857ce36e1..1e1d485b10 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -14,6 +14,8 @@ export GIT_BRANCH=${env.GIT_BRANCH}
export GIT_COMMIT=${env.GIT_COMMIT}
export AZURE_TOKEN='${env.AZURE_TOKEN}'
export MAPBOX_TOKEN='${env.MAPBOX_TOKEN}'
+export PYTEST_ADDOPTS="-c selfdrive/test/pytest-tici.ini --rootdir ."
+
export GIT_SSH_COMMAND="ssh -i /data/gitkey"
@@ -159,7 +161,7 @@ node {
["build openpilot", "cd selfdrive/manager && ./build.py"],
["check dirty", "release/check-dirty.sh"],
["onroad tests", "pytest selfdrive/test/test_onroad.py -s"],
- ["time to onroad", "cd selfdrive/test/ && pytest test_time_to_onroad.py"],
+ ["time to onroad", "pytest selfdrive/test/test_time_to_onroad.py"],
])
},
'HW + Unit Tests': {
@@ -194,17 +196,17 @@ node {
'sensord': {
deviceStage("LSM + MMC", "tici-lsmc", ["UNSAFE=1"], [
["build", "cd selfdrive/manager && ./build.py"],
- ["test sensord", "cd system/sensord/tests && pytest test_sensord.py"],
+ ["test sensord", "pytest system/sensord/tests/test_sensord.py"],
])
deviceStage("BMX + LSM", "tici-bmx-lsm", ["UNSAFE=1"], [
["build", "cd selfdrive/manager && ./build.py"],
- ["test sensord", "cd system/sensord/tests && pytest test_sensord.py"],
+ ["test sensord", "pytest system/sensord/tests/test_sensord.py"],
])
},
'replay': {
deviceStage("tici", "tici-replay", ["UNSAFE=1"], [
["build", "cd selfdrive/manager && ./build.py"],
- ["model replay", "cd selfdrive/test/process_replay && ./model_replay.py"],
+ ["model replay", "selfdrive/test/process_replay/model_replay.py"],
])
},
'tizi': {
@@ -231,7 +233,7 @@ node {
sh label: "build", script: "selfdrive/manager/build.py"
sh label: "test_models.py", script: "INTERNAL_SEG_CNT=250 INTERNAL_SEG_LIST=selfdrive/car/tests/test_models_segs.txt FILEREADER_CACHE=1 \
pytest -n42 --dist=loadscope selfdrive/car/tests/test_models.py"
- sh label: "test_car_interfaces.py", script: "MAX_EXAMPLES=100 pytest -n42 selfdrive/car/tests/test_car_interfaces.py"
+ sh label: "test_car_interfaces.py", script: "MAX_EXAMPLES=100 pytest -n42 --dist=load selfdrive/car/tests/test_car_interfaces.py"
}
},
diff --git a/RELEASES.md b/RELEASES.md
index 1d88480278..ec1e67e3a7 100644
--- a/RELEASES.md
+++ b/RELEASES.md
@@ -13,6 +13,7 @@ Version 0.9.5 (2023-11-16)
* Kia K8 Hybrid (with HDA II) 2023 support thanks to sunnyhaibin!
* Kia Sorento Hybrid 2023 support thanks to sunnyhaibin!
* Kia Optima Hybrid 2019 support
+* Lexus GS F 2016 support thanks to snyperifle!
* Lexus IS 2023 support thanks to L3R5!
Version 0.9.4 (2023-07-27)
diff --git a/common/prefix.py b/common/prefix.py
index c1744e8ff7..c5ae4393cd 100644
--- a/common/prefix.py
+++ b/common/prefix.py
@@ -21,6 +21,7 @@ class OpenpilotPrefix:
except FileExistsError:
pass
os.makedirs(Paths.log_root(), exist_ok=True)
+ os.makedirs(Paths.download_cache_root(), exist_ok=True)
return self
diff --git a/docs/CARS.md b/docs/CARS.md
index b7f083b8e8..044db1f0ec 100644
--- a/docs/CARS.md
+++ b/docs/CARS.md
@@ -4,7 +4,7 @@
A supported vehicle is one that just works when you install a comma device. All supported cars provide a better experience than any stock system. Supported vehicles reference the US market unless otherwise specified.
-# 267 Supported Cars
+# 268 Supported Cars
|Make|Model|Supported Package|ACC|No ACC accel below|No ALC below|Steering Torque|Resume from stop|
Hardware Needed
|Video|
|---|---|---|:---:|:---:|:---:|:---:|:---:|:---:|:---:|
@@ -146,13 +146,14 @@ A supported vehicle is one that just works when you install a comma device. All
|Kia|Sportage 2023[6](#footnotes)|Smart Cruise Control (SCC)|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai N connector
- 1 RJ45 cable (7 ft)
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Kia|Sportage Hybrid 2023[6](#footnotes)|Smart Cruise Control (SCC)|openpilot available[1](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai N connector
- 1 RJ45 cable (7 ft)
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Kia|Stinger 2018-20|Smart Cruise Control (SCC)|openpilot available[1](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai C connector
- 1 RJ45 cable (7 ft)
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here |
|
-|Kia|Stinger 2022|All|openpilot available[1](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai K connector
- 1 RJ45 cable (7 ft)
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
+|Kia|Stinger 2022-23|All|openpilot available[1](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai K connector
- 1 RJ45 cable (7 ft)
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Kia|Telluride 2020-22|All|openpilot available[1](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai H connector
- 1 RJ45 cable (7 ft)
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Lexus|CT Hybrid 2017-18|Lexus Safety System+|openpilot available[2](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Lexus|ES 2017-18|All|openpilot available[2](#footnotes)|19 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Lexus|ES 2019-24|All|openpilot|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Lexus|ES Hybrid 2017-18|All|openpilot available[2](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Lexus|ES Hybrid 2019-23|All|openpilot|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here |
|
+|Lexus|GS F 2016|All|Stock|19 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Lexus|IS 2017-19|All|Stock|19 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Lexus|IS 2022-23|All|openpilot|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Lexus|NX 2018-19|All|openpilot available[2](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
@@ -190,7 +191,7 @@ A supported vehicle is one that just works when you install a comma device. All
|Subaru|XV 2018-19|EyeSight Driver Assistance[7](#footnotes)|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Subaru A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy HereTools
- 1 Pry Tool
- 1 Socket Wrench 8mm or 5/16" (deep) |
|
|Subaru|XV 2020-21|EyeSight Driver Assistance[7](#footnotes)|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Subaru A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy HereTools
- 1 Pry Tool
- 1 Socket Wrench 8mm or 5/16" (deep) ||
|Škoda|Fabia 2022-23[11](#footnotes)|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here [13](#footnotes)||
-|Škoda|Kamiq 2021[9,11](#footnotes)|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here [13](#footnotes)||
+|Škoda|Kamiq 2021-23[9,11](#footnotes)|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here [13](#footnotes)||
|Škoda|Karoq 2019-23[11](#footnotes)|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Škoda|Kodiaq 2017-23[11](#footnotes)|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Škoda|Octavia 2015-19[11](#footnotes)|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
@@ -210,7 +211,7 @@ A supported vehicle is one that just works when you install a comma device. All
|Toyota|C-HR Hybrid 2017-20|All|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Toyota|C-HR Hybrid 2021-22|All|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Toyota|Camry 2018-20|All|Stock|0 mph[8](#footnotes)|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here |
|
-|Toyota|Camry 2021-23|All|openpilot|0 mph[8](#footnotes)|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
+|Toyota|Camry 2021-24|All|openpilot|0 mph[8](#footnotes)|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Toyota|Camry Hybrid 2018-20|All|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here |
|
|Toyota|Camry Hybrid 2021-24|All|openpilot|0 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Toyota|Corolla 2017-19|All|openpilot available[2](#footnotes)|19 mph|0 mph|[](##)|[](##)|Parts
- 1 RJ45 cable (7 ft)
- 1 Toyota A connector
- 1 comma 3X
- 1 comma power v2
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
@@ -267,7 +268,7 @@ A supported vehicle is one that just works when you install a comma device. All
|Volkswagen|Polo 2018-23|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here [13](#footnotes)||
|Volkswagen|Polo GTI 2018-23|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here [13](#footnotes)||
|Volkswagen|T-Cross 2021|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here [13](#footnotes)||
-|Volkswagen|T-Roc 2021|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here [13](#footnotes)||
+|Volkswagen|T-Roc 2018-22|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here [13](#footnotes)||
|Volkswagen|Taos 2022-23|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Volkswagen|Teramont 2018-22|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Volkswagen|Teramont Cross Sport 2021-22|Adaptive Cruise Control (ACC) & Lane Assist|openpilot available[1,12](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 J533 connector
- 1 USB-C coupler
- 1 comma 3X
- 1 harness box
- 1 long OBD-C cable
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
diff --git a/pyproject.toml b/pyproject.toml
index 16dad2c643..4e494e65fc 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,7 +1,7 @@
[tool.pytest.ini_options]
minversion = "6.0"
-addopts = "--ignore=openpilot/ --ignore=cereal/ --ignore=opendbc/ --ignore=panda/ --ignore=rednose_repo/ --ignore=tinygrad_repo/ --ignore=laika_repo/ -Werror --strict-config --strict-markers --durations=10"
-#cpp_files = "test_*" # uncomment when agnos has pytest-cpp and remove from CI
+addopts = "--ignore=openpilot/ --ignore=cereal/ --ignore=opendbc/ --ignore=panda/ --ignore=rednose_repo/ --ignore=tinygrad_repo/ --ignore=laika_repo/ -Werror --strict-config --strict-markers --durations=10 -n auto --dist=loadscope"
+cpp_files = "test_*"
python_files = "test_*.py"
#timeout = "30" # you get this long by default
markers = [
diff --git a/release/files_common b/release/files_common
index 5a076f2b1a..b2623e0113 100644
--- a/release/files_common
+++ b/release/files_common
@@ -293,6 +293,7 @@ selfdrive/test/helpers.py
selfdrive/test/setup_device_ci.sh
selfdrive/test/test_onroad.py
selfdrive/test/test_time_to_onroad.py
+selfdrive/test/pytest-tici.ini
selfdrive/ui/.gitignore
selfdrive/ui/SConscript
diff --git a/selfdrive/car/hyundai/values.py b/selfdrive/car/hyundai/values.py
index edd4f5177c..2d64d7a100 100644
--- a/selfdrive/car/hyundai/values.py
+++ b/selfdrive/car/hyundai/values.py
@@ -267,7 +267,7 @@ CAR_INFO: Dict[str, Optional[Union[HyundaiCarInfo, List[HyundaiCarInfo]]]] = {
CAR.KIA_SPORTAGE_HYBRID_5TH_GEN: HyundaiCarInfo("Kia Sportage Hybrid 2023", car_parts=CarParts.common([CarHarness.hyundai_n])),
CAR.KIA_STINGER: HyundaiCarInfo("Kia Stinger 2018-20", video_link="https://www.youtube.com/watch?v=MJ94qoofYw0",
car_parts=CarParts.common([CarHarness.hyundai_c])),
- CAR.KIA_STINGER_2022: HyundaiCarInfo("Kia Stinger 2022", "All", car_parts=CarParts.common([CarHarness.hyundai_k])),
+ CAR.KIA_STINGER_2022: HyundaiCarInfo("Kia Stinger 2022-23", "All", car_parts=CarParts.common([CarHarness.hyundai_k])),
CAR.KIA_CEED: HyundaiCarInfo("Kia Ceed 2019", car_parts=CarParts.common([CarHarness.hyundai_e])),
CAR.KIA_EV6: [
HyundaiCarInfo("Kia EV6 (Southeast Asia only) 2022-23", "All", car_parts=CarParts.common([CarHarness.hyundai_p])),
@@ -1130,21 +1130,26 @@ FW_VERSIONS = {
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00CK__ SCC F-CUP 1.00 1.00 99110-J5500 ',
b'\xf1\x00CK__ SCC FHCUP 1.00 1.00 99110-J5500 ',
+ b'\xf1\x00CK__ SCC FHCUP 1.00 1.00 99110-J5600 ',
],
(Ecu.engine, 0x7e0, None): [
b'\xf1\x81640R0051\x00\x00\x00\x00\x00\x00\x00\x00',
+ b'\xf1\x81640N2051\x00\x00\x00\x00\x00\x00\x00\x00',
b'\xf1\x81HM6M1_0a0_H00',
],
(Ecu.eps, 0x7d4, None): [
b'\xf1\x00CK MDPS R 1.00 5.03 57700-J5380 4C2VR503',
b'\xf1\x00CK MDPS R 1.00 5.03 57700-J5300 4C2CL503',
+ b'\xf1\x00CK MDPS R 1.00 5.04 57700-J5520 4C4VL504',
],
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00CK MFC AT AUS RHD 1.00 1.00 99211-J5500 210622',
b'\xf1\x00CK MFC AT KOR LHD 1.00 1.00 99211-J5500 210622',
+ b'\xf1\x00CK MFC AT USA LHD 1.00 1.00 99211-J5500 210622',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x87VCNLF11383972DK1vffV\x99\x99\x89\x98\x86eUU\x88wg\x89vfff\x97fff\x99\x87o\xff"\xc1\xf1\x81E30\x00\x00\x00\x00\x00\x00\x00\xf1\x00bcsh8p54 E30\x00\x00\x00\x00\x00\x00\x00SCK0T33GH0\xbe`\xfb\xc6',
+ b'\xf1\x00bcsh8p54 E31\x00\x00\x00\x00\x00\x00\x00SCK0T33NH07\xdf\xf0\xc1',
b'\xf1\x00bcsh8p54 E31\x00\x00\x00\x00\x00\x00\x00SCK0T25KH2B\xfbI\xe2',
],
},
diff --git a/selfdrive/car/mazda/values.py b/selfdrive/car/mazda/values.py
index 849cf229a3..1547f69b04 100644
--- a/selfdrive/car/mazda/values.py
+++ b/selfdrive/car/mazda/values.py
@@ -93,6 +93,7 @@ FW_VERSIONS = {
b'PX85-188K2-E\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'SH54-188K2-D\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'PXFG-188K2-C\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
+ b'PEW5-188K2-A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x764, None): [
b'K131-67XK2-F\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
@@ -113,6 +114,7 @@ FW_VERSIONS = {
b'PXDL-21PS1-B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'PXFG-21PS1-A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
b'PXFG-21PS1-B\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
+ b'PG69-21PS1-A\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
],
},
CAR.CX5: {
diff --git a/selfdrive/car/tests/routes.py b/selfdrive/car/tests/routes.py
index 8cf5f3debd..dc9fa32b97 100755
--- a/selfdrive/car/tests/routes.py
+++ b/selfdrive/car/tests/routes.py
@@ -193,6 +193,7 @@ routes = [
CarTestRoute("da23c367491f53e2|2021-05-21--09-09-11", TOYOTA.LEXUS_CTH, segment=3),
CarTestRoute("37041c500fd30100|2020-12-30--12-17-24", TOYOTA.LEXUS_ESH),
CarTestRoute("32696cea52831b02|2021-11-19--18-13-30", TOYOTA.LEXUS_RC),
+ CarTestRoute("ab9b64a5e5960cba|2023-10-24--17-32-08", TOYOTA.LEXUS_GS_F),
CarTestRoute("886fcd8408d570e9|2020-01-29--02-18-55", TOYOTA.LEXUS_RX),
CarTestRoute("d27ad752e9b08d4f|2021-05-26--19-39-51", TOYOTA.LEXUS_RXH),
CarTestRoute("01b22eb2ed121565|2020-02-02--11-25-51", TOYOTA.LEXUS_RX_TSS2),
diff --git a/selfdrive/car/torque_data/override.yaml b/selfdrive/car/torque_data/override.yaml
index 29e616b102..12b483cfa7 100644
--- a/selfdrive/car/torque_data/override.yaml
+++ b/selfdrive/car/torque_data/override.yaml
@@ -63,6 +63,7 @@ HYUNDAI AZERA 6TH GEN: [1.8, 1.8, 0.1]
HYUNDAI AZERA HYBRID 6TH GEN: [1.8, 1.8, 0.1]
KIA K8 HYBRID 1ST GEN: [2.5, 2.5, 0.1]
HYUNDAI CUSTIN 1ST GEN: [2.5, 2.5, 0.1]
+LEXUS GS F 2016: [2.5, 2.5, 0.08]
# Dashcam or fallback configured as ideal car
mock: [10.0, 10, 0.0]
diff --git a/selfdrive/car/toyota/interface.py b/selfdrive/car/toyota/interface.py
index 84f12c19cc..5380891574 100644
--- a/selfdrive/car/toyota/interface.py
+++ b/selfdrive/car/toyota/interface.py
@@ -163,6 +163,12 @@ class CarInterface(CarInterfaceBase):
ret.tireStiffnessFactor = 0.444
ret.mass = 3736.8 * CV.LB_TO_KG
+ elif candidate == CAR.LEXUS_GS_F:
+ ret.wheelbase = 2.84988
+ ret.steerRatio = 13.3
+ ret.tireStiffnessFactor = 0.444
+ ret.mass = 4034. * CV.LB_TO_KG
+
elif candidate == CAR.LEXUS_CTH:
stop_and_go = True
ret.wheelbase = 2.60
diff --git a/selfdrive/car/toyota/values.py b/selfdrive/car/toyota/values.py
index bac1fb84b9..4a1011982c 100644
--- a/selfdrive/car/toyota/values.py
+++ b/selfdrive/car/toyota/values.py
@@ -86,6 +86,7 @@ class CAR(StrEnum):
LEXUS_RX = "LEXUS RX 2016"
LEXUS_RXH = "LEXUS RX HYBRID 2017"
LEXUS_RX_TSS2 = "LEXUS RX 2020"
+ LEXUS_GS_F = "LEXUS GS F 2016"
class Footnote(Enum):
@@ -123,7 +124,7 @@ CAR_INFO: Dict[str, Union[ToyotaCarInfo, List[ToyotaCarInfo]]] = {
ToyotaCarInfo("Toyota Camry Hybrid 2018-20", video_link="https://www.youtube.com/watch?v=Q2DYY0AWKgk"),
],
CAR.CAMRY_TSS2: [
- ToyotaCarInfo("Toyota Camry 2021-23", footnotes=[Footnote.CAMRY]),
+ ToyotaCarInfo("Toyota Camry 2021-24", footnotes=[Footnote.CAMRY]),
ToyotaCarInfo("Toyota Camry Hybrid 2021-24"),
],
CAR.CHR: [
@@ -194,6 +195,7 @@ CAR_INFO: Dict[str, Union[ToyotaCarInfo, List[ToyotaCarInfo]]] = {
],
CAR.LEXUS_IS: ToyotaCarInfo("Lexus IS 2017-19"),
CAR.LEXUS_IS_TSS2: ToyotaCarInfo("Lexus IS 2022-23"),
+ CAR.LEXUS_GS_F: ToyotaCarInfo("Lexus GS F 2016"),
CAR.LEXUS_NX: [
ToyotaCarInfo("Lexus NX 2018-19"),
ToyotaCarInfo("Lexus NX Hybrid 2018-19"),
@@ -1717,6 +1719,26 @@ FW_VERSIONS = {
b'8646F3302200\x00\x00\x00\x00',
],
},
+ CAR.LEXUS_GS_F: {
+ (Ecu.engine, 0x7E0, None): [
+ b'\x0233075200\x00\x00\x00\x00\x00\x00\x00\x00530B9000\x00\x00\x00\x00\x00\x00\x00\x00',
+ ],
+ (Ecu.abs, 0x7b0, None): [
+ b'F152630700\x00\x00\x00\x00\x00\x00',
+ ],
+ (Ecu.dsu, 0x791, None): [
+ b'881513016200\x00\x00\x00\x00',
+ ],
+ (Ecu.eps, 0x7a1, None): [
+ b'8965B30551\x00\x00\x00\x00\x00\x00',
+ ],
+ (Ecu.fwdRadar, 0x750, 0xf): [
+ b'8821F4702000\x00\x00\x00\x00',
+ ],
+ (Ecu.fwdCamera, 0x750, 0x6d): [
+ b'8646F3002100\x00\x00\x00\x00',
+ ],
+ },
CAR.LEXUS_NX: {
(Ecu.engine, 0x700, None): [
b'\x01896637850000\x00\x00\x00\x00',
@@ -1794,6 +1816,7 @@ FW_VERSIONS = {
},
CAR.LEXUS_RC: {
(Ecu.engine, 0x700, None): [
+ b'\x01896632461100\x00\x00\x00\x00',
b'\x01896632478200\x00\x00\x00\x00',
],
(Ecu.engine, 0x7e0, None): [
@@ -1804,17 +1827,20 @@ FW_VERSIONS = {
b'F152624221\x00\x00\x00\x00\x00\x00',
],
(Ecu.dsu, 0x791, None): [
+ b'881512404100\x00\x00\x00\x00',
b'881512407000\x00\x00\x00\x00',
b'881512409100\x00\x00\x00\x00',
],
(Ecu.eps, 0x7a1, None): [
b'8965B24081\x00\x00\x00\x00\x00\x00',
+ b'8965B24240\x00\x00\x00\x00\x00\x00',
b'8965B24320\x00\x00\x00\x00\x00\x00',
],
(Ecu.fwdRadar, 0x750, 0xf): [
b'8821F4702300\x00\x00\x00\x00',
],
(Ecu.fwdCamera, 0x750, 0x6d): [
+ b'8646F2401100\x00\x00\x00\x00',
b'8646F2401200\x00\x00\x00\x00',
b'8646F2402200\x00\x00\x00\x00',
],
@@ -2081,6 +2107,7 @@ DBC = {
CAR.PRIUS_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.MIRAI: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
CAR.ALPHARD_TSS2: dbc_dict('toyota_nodsu_pt_generated', 'toyota_tss2_adas'),
+ CAR.LEXUS_GS_F: dbc_dict('toyota_new_mc_pt_generated', 'toyota_adas'),
}
# These cars have non-standard EPS torque scale factors. All others are 73
@@ -2094,7 +2121,7 @@ TSS2_CAR = {CAR.RAV4_TSS2, CAR.RAV4_TSS2_2022, CAR.RAV4_TSS2_2023, CAR.COROLLA_T
NO_DSU_CAR = TSS2_CAR | {CAR.CHR, CAR.CAMRY}
# the DSU uses the AEB message for longitudinal on these cars
-UNSUPPORTED_DSU_CAR = {CAR.LEXUS_IS, CAR.LEXUS_RC}
+UNSUPPORTED_DSU_CAR = {CAR.LEXUS_IS, CAR.LEXUS_RC, CAR.LEXUS_GS_F}
# these cars have a radar which sends ACC messages instead of the camera
RADAR_ACC_CAR = {CAR.RAV4_TSS2_2022, CAR.RAV4_TSS2_2023, CAR.CHR_TSS2}
diff --git a/selfdrive/car/volkswagen/values.py b/selfdrive/car/volkswagen/values.py
index 3c9aeb55d7..c8e6daaef8 100644
--- a/selfdrive/car/volkswagen/values.py
+++ b/selfdrive/car/volkswagen/values.py
@@ -247,7 +247,7 @@ CAR_INFO: Dict[str, Union[VWCarInfo, List[VWCarInfo]]] = {
VWCarInfo("Volkswagen Caravelle 2020"),
VWCarInfo("Volkswagen California 2021-23"),
],
- CAR.TROC_MK1: VWCarInfo("Volkswagen T-Roc 2021", footnotes=[Footnote.VW_MQB_A0]),
+ CAR.TROC_MK1: VWCarInfo("Volkswagen T-Roc 2018-22", footnotes=[Footnote.VW_MQB_A0]),
CAR.AUDI_A3_MK3: [
VWCarInfo("Audi A3 2014-19"),
VWCarInfo("Audi A3 Sportback e-tron 2017-18"),
@@ -259,7 +259,7 @@ CAR_INFO: Dict[str, Union[VWCarInfo, List[VWCarInfo]]] = {
CAR.SEAT_ATECA_MK1: VWCarInfo("SEAT Ateca 2018"),
CAR.SEAT_LEON_MK3: VWCarInfo("SEAT Leon 2014-20"),
CAR.SKODA_FABIA_MK4: VWCarInfo("Škoda Fabia 2022-23", footnotes=[Footnote.VW_MQB_A0]),
- CAR.SKODA_KAMIQ_MK1: VWCarInfo("Škoda Kamiq 2021", footnotes=[Footnote.VW_MQB_A0, Footnote.KAMIQ]),
+ CAR.SKODA_KAMIQ_MK1: VWCarInfo("Škoda Kamiq 2021-23", footnotes=[Footnote.VW_MQB_A0, Footnote.KAMIQ]),
CAR.SKODA_KAROQ_MK1: VWCarInfo("Škoda Karoq 2019-23"),
CAR.SKODA_KODIAQ_MK1: VWCarInfo("Škoda Kodiaq 2017-23"),
CAR.SKODA_SCALA_MK1: VWCarInfo("Škoda Scala 2020-23", footnotes=[Footnote.VW_MQB_A0]),
@@ -491,6 +491,7 @@ FW_VERSIONS = {
b'\xf1\x870CW300047E \xf1\x895261',
b'\xf1\x870CW300048J \xf1\x890611',
b'\xf1\x870CW300049H \xf1\x890905',
+ b'\xf1\x870CW300050G \xf1\x891905',
b'\xf1\x870D9300012 \xf1\x894904',
b'\xf1\x870D9300012 \xf1\x894913',
b'\xf1\x870D9300012 \xf1\x894937',
@@ -532,6 +533,7 @@ FW_VERSIONS = {
b'\xf1\x875Q0959655BT\xf1\x890403\xf1\x82\x13141600111233003142404A2252229333463100',
b'\xf1\x875Q0959655BT\xf1\x890403\xf1\x82\x13141600111233003142405A2251229333463100',
b'\xf1\x875Q0959655BT\xf1\x890403\xf1\x82\x13141600111233003142405A2252229333463100',
+ b'\xf1\x875Q0959655CA\xf1\x890403\xf1\x82\x1314160011123300314240012250229333463100',
b'\xf1\x875Q0959655C \xf1\x890361\xf1\x82\x111413001112120004110415121610169112',
b'\xf1\x875Q0959655D \xf1\x890388\xf1\x82\x111413001113120006110417121A101A9113',
b'\xf1\x875Q0959655J \xf1\x890830\xf1\x82\x13271112111312--071104171825102591131211',
@@ -658,6 +660,7 @@ FW_VERSIONS = {
(Ecu.engine, 0x7e0, None): [
b'\xf1\x8703N906026E \xf1\x892114',
b'\xf1\x8704E906023AH\xf1\x893379',
+ b'\xf1\x8704E906023BM\xf1\x894522',
b'\xf1\x8704L906026DP\xf1\x891538',
b'\xf1\x8704L906026ET\xf1\x891990',
b'\xf1\x8704L906026FP\xf1\x892012',
@@ -673,6 +676,7 @@ FW_VERSIONS = {
b'\xf1\x870D9300014L \xf1\x895002',
b'\xf1\x870D9300041A \xf1\x894801',
b'\xf1\x870DD300045T \xf1\x891601',
+ b'\xf1\x870DD300046H \xf1\x891601',
b'\xf1\x870DL300011H \xf1\x895201',
b'\xf1\x870CW300042H \xf1\x891601',
b'\xf1\x870CW300042H \xf1\x891607',
@@ -826,6 +830,7 @@ FW_VERSIONS = {
b'\xf1\x8704E906027NB\xf1\x899504',
b'\xf1\x8704L906026EJ\xf1\x893661',
b'\xf1\x8704L906027G \xf1\x899893',
+ b'\xf1\x8705E906018BS\xf1\x890914',
b'\xf1\x875N0906259 \xf1\x890002',
b'\xf1\x875NA906259H \xf1\x890002',
b'\xf1\x875NA907115E \xf1\x890003',
@@ -857,6 +862,7 @@ FW_VERSIONS = {
b'\xf1\x870DL300014C \xf1\x893703',
b'\xf1\x870DD300046K \xf1\x892302',
b'\xf1\x870GC300013P \xf1\x892401',
+ b'\xf1\x870GC300046Q \xf1\x892802',
],
(Ecu.srs, 0x715, None): [
b'\xf1\x875Q0959655AR\xf1\x890317\xf1\x82\02331310031333334313132573732379333313100',
@@ -891,6 +897,7 @@ FW_VERSIONS = {
],
(Ecu.fwdRadar, 0x757, None): [
b'\xf1\x872Q0907572AA\xf1\x890396',
+ b'\xf1\x872Q0907572AB\xf1\x890397',
b'\xf1\x872Q0907572J \xf1\x890156',
b'\xf1\x872Q0907572M \xf1\x890233',
b'\xf1\x872Q0907572Q \xf1\x890342',
@@ -955,20 +962,29 @@ FW_VERSIONS = {
CAR.TROC_MK1: {
(Ecu.engine, 0x7e0, None): [
b'\xf1\x8705E906018AT\xf1\x899640',
+ b'\xf1\x8705E906018CK\xf1\x890863',
+ b'\xf1\x8705E906018P \xf1\x896020',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x870CW300050J \xf1\x891911',
b'\xf1\x870CW300051M \xf1\x891925',
+ b'\xf1\x870CW300051M \xf1\x891928',
+ b'\xf1\x870CW300041S \xf1\x891615',
],
(Ecu.srs, 0x715, None): [
b'\xf1\x875Q0959655BT\xf1\x890403\xf1\x82\x1311110012333300314240681152119333463100',
b'\xf1\x875Q0959655CG\xf1\x890421\xf1\x82\x13111100123333003142404M1152119333613100',
+ b'\xf1\x875Q0959655CF\xf1\x890421\xf1\x82\x1311110012333300314240021150119333613100',
+ b'\xf1\x873Q0959655BH\xf1\x890712\xf1\x82\x0e1111001111001105111111052900',
],
(Ecu.eps, 0x712, None): [
b'\xf1\x875Q0909144AB\xf1\x891082\xf1\x82\x0521060405A1',
+ b'\xf1\x875WA907144M \xf1\x891051\xf1\x82\x001T06081T7N',
+ b'\xf1\x875Q0909144AA\xf1\x891081\xf1\x82\x0521060403A1',
],
(Ecu.fwdRadar, 0x757, None): [
b'\xf1\x872Q0907572T \xf1\x890383',
+ b'\xf1\x872Q0907572M \xf1\x890233',
],
},
CAR.AUDI_A3_MK3: {
@@ -1186,18 +1202,23 @@ FW_VERSIONS = {
CAR.SKODA_KAMIQ_MK1: {
(Ecu.engine, 0x7e0, None): [
b'\xf1\x8705C906032M \xf1\x891333',
+ b'\xf1\x8705E906013CK\xf1\x892540',
],
(Ecu.transmission, 0x7e1, None): [
b'\xf1\x870CW300020 \xf1\x891906',
+ b'\xf1\x870CW300020T \xf1\x892204',
],
(Ecu.srs, 0x715, None): [
b'\xf1\x872Q0959655AM\xf1\x890351\xf1\x82\0222221042111042121040404042E2711152H14',
+ b'\xf1\x872Q0959655BJ\xf1\x890412\xf1\x82\x132223042111042121040404042B251215391423',
],
(Ecu.eps, 0x712, None): [
b'\xf1\x872Q1909144M \xf1\x896041',
+ b'\xf1\x872Q1909144AB\xf1\x896050',
],
(Ecu.fwdRadar, 0x757, None): [
b'\xf1\x872Q0907572T \xf1\x890383',
+ b'\xf1\x872Q0907572AA\xf1\x890396',
],
},
CAR.SKODA_KAROQ_MK1: {
diff --git a/selfdrive/test/process_replay/conftest.py b/selfdrive/test/process_replay/conftest.py
new file mode 100644
index 0000000000..f3794d26ac
--- /dev/null
+++ b/selfdrive/test/process_replay/conftest.py
@@ -0,0 +1,37 @@
+import pytest
+
+from openpilot.selfdrive.test.process_replay.helpers import ALL_PROCS
+from openpilot.selfdrive.test.process_replay.test_processes import ALL_CARS
+
+
+def pytest_addoption(parser: pytest.Parser):
+ parser.addoption("--whitelist-procs", type=str, nargs="*", default=ALL_PROCS,
+ help="Whitelist given processes from the test (e.g. controlsd)")
+ parser.addoption("--whitelist-cars", type=str, nargs="*", default=ALL_CARS,
+ help="Whitelist given cars from the test (e.g. HONDA)")
+ parser.addoption("--blacklist-procs", type=str, nargs="*", default=[],
+ help="Blacklist given processes from the test (e.g. controlsd)")
+ parser.addoption("--blacklist-cars", type=str, nargs="*", default=[],
+ help="Blacklist given cars from the test (e.g. HONDA)")
+ parser.addoption("--ignore-fields", type=str, nargs="*", default=[],
+ help="Extra fields or msgs to ignore (e.g. carState.events)")
+ parser.addoption("--ignore-msgs", type=str, nargs="*", default=[],
+ help="Msgs to ignore (e.g. carEvents)")
+ parser.addoption("--update-refs", action="store_true",
+ help="Updates reference logs using current commit")
+ parser.addoption("--upload-only", action="store_true",
+ help="Skips testing processes and uploads logs from previous test run")
+ parser.addoption("--long-diff", action="store_true",
+ help="Outputs diff in long format")
+
+
+@pytest.fixture(scope="class", autouse=True)
+def process_replay_test_arguments(request):
+ if hasattr(request.cls, "segment"): # check if a subclass of TestProcessReplayBase
+ request.cls.tested_procs = list(set(request.config.getoption("--whitelist-procs")) - set(request.config.getoption("--blacklist-procs")))
+ request.cls.tested_cars = list({c.upper() for c in set(request.config.getoption("--whitelist-cars")) - set(request.config.getoption("--blacklist-cars"))})
+ request.cls.ignore_fields = request.config.getoption("--ignore-fields")
+ request.cls.ignore_msgs = request.config.getoption("--ignore-msgs")
+ request.cls.upload_only = request.config.getoption("--upload-only")
+ request.cls.update_refs = request.config.getoption("--update-refs")
+ request.cls.long_diff = request.config.getoption("--long-diff")
\ No newline at end of file
diff --git a/selfdrive/test/process_replay/helpers.py b/selfdrive/test/process_replay/helpers.py
new file mode 100755
index 0000000000..0952a01870
--- /dev/null
+++ b/selfdrive/test/process_replay/helpers.py
@@ -0,0 +1,150 @@
+#!/usr/bin/env python3
+import os
+import sys
+import unittest
+
+from parameterized import parameterized
+from typing import Optional, Union, List
+
+
+from openpilot.selfdrive.test.openpilotci import get_url, upload_file
+from openpilot.selfdrive.test.process_replay.compare_logs import compare_logs, format_process_diff
+from openpilot.selfdrive.test.process_replay.process_replay import CONFIGS, PROC_REPLAY_DIR, FAKEDATA, replay_process
+from openpilot.system.version import get_commit
+from openpilot.tools.lib.filereader import FileReader
+from openpilot.tools.lib.helpers import save_log
+from openpilot.tools.lib.logreader import LogReader, LogIterable
+
+
+BASE_URL = "https://commadataci.blob.core.windows.net/openpilotci/"
+REF_COMMIT_FN = os.path.join(PROC_REPLAY_DIR, "ref_commit")
+EXCLUDED_PROCS = {"modeld", "dmonitoringmodeld"}
+
+
+def get_log_data(segment):
+ r, n = segment.rsplit("--", 1)
+ with FileReader(get_url(r, n)) as f:
+ return f.read()
+
+
+ALL_PROCS = sorted({cfg.proc_name for cfg in CONFIGS if cfg.proc_name not in EXCLUDED_PROCS})
+PROC_TO_CFG = {cfg.proc_name: cfg for cfg in CONFIGS}
+
+cpu_count = os.cpu_count() or 1
+
+
+class TestProcessReplayBase(unittest.TestCase):
+ """
+ Base class that replays all processes within test_proceses from a segment,
+ and puts the log messages in self.log_msgs for analysis by other tests.
+ """
+ segment: Optional[Union[str, LogIterable]] = None
+ tested_procs: List[str] = ALL_PROCS
+
+ @classmethod
+ def setUpClass(cls, create_logs=True):
+ if "Base" in cls.__name__:
+ raise unittest.SkipTest("skipping base class")
+
+ if isinstance(cls.segment, str):
+ cls.log_reader = LogReader.from_bytes(get_log_data(cls.segment))
+ else:
+ cls.log_reader = cls.segment
+
+ if create_logs:
+ cls._create_log_msgs()
+
+ @classmethod
+ def _run_replay(cls, cfg):
+ try:
+ return replay_process(cfg, cls.log_reader, disable_progress=True)
+ except Exception as e:
+ raise Exception(f"failed on segment: {cls.segment} \n{e}") from e
+
+ @classmethod
+ def _create_log_msgs(cls):
+ cls.log_msgs = {}
+ cls.proc_cfg = {}
+
+ for proc in cls.tested_procs:
+ cfg = PROC_TO_CFG[proc]
+
+ log_msgs = cls._run_replay(cfg)
+
+ cls.log_msgs[proc] = log_msgs
+ cls.proc_cfg[proc] = cfg
+
+
+class TestProcessReplayDiffBase(TestProcessReplayBase):
+ """
+ Base class for checking for diff between process outputs.
+ """
+ update_refs = False
+ upload_only = False
+ long_diff = False
+ ignore_msgs: List[str] = []
+ ignore_fields: List[str] = []
+
+ def setUp(self):
+ super().setUp()
+ if self.upload_only:
+ raise unittest.SkipTest("skipping test, uploading only")
+
+ @classmethod
+ def setUpClass(cls):
+ super().setUpClass(not cls.upload_only)
+
+ if cls.long_diff:
+ cls.maxDiff = None
+
+ os.makedirs(os.path.dirname(FAKEDATA), exist_ok=True)
+
+ cls.cur_commit = get_commit()
+ cls.assertNotEqual(cls.cur_commit, None, "Couldn't get current commit")
+
+ cls.upload = cls.update_refs or cls.upload_only
+
+ try:
+ with open(REF_COMMIT_FN) as f:
+ cls.ref_commit = f.read().strip()
+ except FileNotFoundError:
+ print("Couldn't find reference commit")
+ sys.exit(1)
+
+ cls._create_ref_log_msgs()
+
+ @classmethod
+ def _create_ref_log_msgs(cls):
+ cls.ref_log_msgs = {}
+
+ for proc in cls.tested_procs:
+ cur_log_fn = os.path.join(FAKEDATA, f"{cls.segment}_{proc}_{cls.cur_commit}.bz2")
+ if cls.update_refs: # reference logs will not exist if routes were just regenerated
+ ref_log_path = get_url(*cls.segment.rsplit("--", 1))
+ else:
+ ref_log_fn = os.path.join(FAKEDATA, f"{cls.segment}_{proc}_{cls.ref_commit}.bz2")
+ ref_log_path = ref_log_fn if os.path.exists(ref_log_fn) else BASE_URL + os.path.basename(ref_log_fn)
+
+ if not cls.upload_only:
+ save_log(cur_log_fn, cls.log_msgs[proc])
+ cls.ref_log_msgs[proc] = list(LogReader(ref_log_path))
+
+ if cls.upload:
+ assert os.path.exists(cur_log_fn), f"Cannot find log to upload: {cur_log_fn}"
+ upload_file(cur_log_fn, os.path.basename(cur_log_fn))
+ os.remove(cur_log_fn)
+
+ @parameterized.expand(ALL_PROCS)
+ def test_process_diff(self, proc):
+ if proc not in self.tested_procs:
+ raise unittest.SkipTest(f"{proc} was not requested to be tested")
+
+ cfg = self.proc_cfg[proc]
+ log_msgs = self.log_msgs[proc]
+ ref_log_msgs = self.ref_log_msgs[proc]
+
+ diff = compare_logs(ref_log_msgs, log_msgs, self.ignore_fields + cfg.ignore, self.ignore_msgs)
+
+ diff_short, diff_long = format_process_diff(diff)
+
+ self.assertEqual(len(diff), 0, "\n" + diff_long if self.long_diff else diff_short)
\ No newline at end of file
diff --git a/selfdrive/test/process_replay/regen_all.py b/selfdrive/test/process_replay/regen_all.py
index 656a5b89e1..070cb5f783 100755
--- a/selfdrive/test/process_replay/regen_all.py
+++ b/selfdrive/test/process_replay/regen_all.py
@@ -8,7 +8,8 @@ from tqdm import tqdm
from openpilot.common.prefix import OpenpilotPrefix
from openpilot.selfdrive.test.process_replay.regen import regen_and_save
-from openpilot.selfdrive.test.process_replay.test_processes import FAKEDATA, source_segments as segments
+from openpilot.selfdrive.test.process_replay.process_replay import FAKEDATA
+from openpilot.selfdrive.test.process_replay.test_processes import source_segments as segments
from openpilot.tools.lib.route import SegmentName
diff --git a/selfdrive/test/process_replay/test_processes.py b/selfdrive/test/process_replay/test_processes.py
index 5429c9b63e..efdd166cac 100755
--- a/selfdrive/test/process_replay/test_processes.py
+++ b/selfdrive/test/process_replay/test_processes.py
@@ -1,20 +1,15 @@
#!/usr/bin/env python3
-import argparse
-import concurrent.futures
-import os
+import unittest
+import pytest
import sys
-from collections import defaultdict
-from tqdm import tqdm
-from typing import Any, DefaultDict, Dict
+
+from parameterized import parameterized_class
+from typing import List, Optional
from openpilot.selfdrive.car.car_helpers import interface_names
-from openpilot.selfdrive.test.openpilotci import get_url, upload_file
-from openpilot.selfdrive.test.process_replay.compare_logs import compare_logs, format_diff
-from openpilot.selfdrive.test.process_replay.process_replay import CONFIGS, PROC_REPLAY_DIR, FAKEDATA, check_openpilot_enabled, replay_process
-from openpilot.system.version import get_commit
-from openpilot.tools.lib.filereader import FileReader
-from openpilot.tools.lib.logreader import LogReader
-from openpilot.tools.lib.helpers import save_log
+from openpilot.selfdrive.test.process_replay.process_replay import check_openpilot_enabled
+from openpilot.selfdrive.test.process_replay.helpers import TestProcessReplayDiffBase
+
source_segments = [
("BODY", "937ccb7243511b65|2022-05-24--16-03-09--1"), # COMMA.BODY
@@ -63,166 +58,41 @@ segments = [
# dashcamOnly makes don't need to be tested until a full port is done
excluded_interfaces = ["mock", "tesla"]
-BASE_URL = "https://commadataci.blob.core.windows.net/openpilotci/"
-REF_COMMIT_FN = os.path.join(PROC_REPLAY_DIR, "ref_commit")
-EXCLUDED_PROCS = {"modeld", "dmonitoringmodeld"}
-
-
-def run_test_process(data):
- segment, cfg, args, cur_log_fn, ref_log_path, lr_dat = data
- res = None
- if not args.upload_only:
- lr = LogReader.from_bytes(lr_dat)
- res, log_msgs = test_process(cfg, lr, segment, ref_log_path, cur_log_fn, args.ignore_fields, args.ignore_msgs)
- # save logs so we can upload when updating refs
- save_log(cur_log_fn, log_msgs)
-
- if args.update_refs or args.upload_only:
- print(f'Uploading: {os.path.basename(cur_log_fn)}')
- assert os.path.exists(cur_log_fn), f"Cannot find log to upload: {cur_log_fn}"
- upload_file(cur_log_fn, os.path.basename(cur_log_fn))
- os.remove(cur_log_fn)
- return (segment, cfg.proc_name, res)
-
-
-def get_log_data(segment):
- r, n = segment.rsplit("--", 1)
- with FileReader(get_url(r, n)) as f:
- return (segment, f.read())
-
-
-def test_process(cfg, lr, segment, ref_log_path, new_log_path, ignore_fields=None, ignore_msgs=None):
- if ignore_fields is None:
- ignore_fields = []
- if ignore_msgs is None:
- ignore_msgs = []
-
- ref_log_msgs = list(LogReader(ref_log_path))
-
- try:
- log_msgs = replay_process(cfg, lr, disable_progress=True)
- except Exception as e:
- raise Exception("failed on segment: " + segment) from e
-
- # check to make sure openpilot is engaged in the route
- if cfg.proc_name == "controlsd":
- if not check_openpilot_enabled(log_msgs):
- return f"Route did not enable at all or for long enough: {new_log_path}", log_msgs
-
- try:
- return compare_logs(ref_log_msgs, log_msgs, ignore_fields + cfg.ignore, ignore_msgs, cfg.tolerance), log_msgs
- except Exception as e:
- return str(e), log_msgs
-
-
-if __name__ == "__main__":
- all_cars = {car for car, _ in segments}
- all_procs = {cfg.proc_name for cfg in CONFIGS if cfg.proc_name not in EXCLUDED_PROCS}
-
- cpu_count = os.cpu_count() or 1
-
- parser = argparse.ArgumentParser(description="Regression test to identify changes in a process's output")
- parser.add_argument("--whitelist-procs", type=str, nargs="*", default=all_procs,
- help="Whitelist given processes from the test (e.g. controlsd)")
- parser.add_argument("--whitelist-cars", type=str, nargs="*", default=all_cars,
- help="Whitelist given cars from the test (e.g. HONDA)")
- parser.add_argument("--blacklist-procs", type=str, nargs="*", default=[],
- help="Blacklist given processes from the test (e.g. controlsd)")
- parser.add_argument("--blacklist-cars", type=str, nargs="*", default=[],
- help="Blacklist given cars from the test (e.g. HONDA)")
- parser.add_argument("--ignore-fields", type=str, nargs="*", default=[],
- help="Extra fields or msgs to ignore (e.g. carState.events)")
- parser.add_argument("--ignore-msgs", type=str, nargs="*", default=[],
- help="Msgs to ignore (e.g. carEvents)")
- parser.add_argument("--update-refs", action="store_true",
- help="Updates reference logs using current commit")
- parser.add_argument("--upload-only", action="store_true",
- help="Skips testing processes and uploads logs from previous test run")
- parser.add_argument("-j", "--jobs", type=int, default=max(cpu_count - 2, 1),
- help="Max amount of parallel jobs")
- args = parser.parse_args()
-
- tested_procs = set(args.whitelist_procs) - set(args.blacklist_procs)
- tested_cars = set(args.whitelist_cars) - set(args.blacklist_cars)
- tested_cars = {c.upper() for c in tested_cars}
-
- full_test = (tested_procs == all_procs) and (tested_cars == all_cars) and all(len(x) == 0 for x in (args.ignore_fields, args.ignore_msgs))
- upload = args.update_refs or args.upload_only
- os.makedirs(os.path.dirname(FAKEDATA), exist_ok=True)
-
- if upload:
- assert full_test, "Need to run full test when updating refs"
-
- try:
- ref_commit = open(REF_COMMIT_FN).read().strip()
- except FileNotFoundError:
- print("Couldn't find reference commit")
- sys.exit(1)
-
- cur_commit = get_commit()
- if cur_commit is None:
- raise Exception("Couldn't get current commit")
-
- print(f"***** testing against commit {ref_commit} *****")
-
- # check to make sure all car brands are tested
- if full_test:
- untested = (set(interface_names) - set(excluded_interfaces)) - {c.lower() for c in tested_cars}
- assert len(untested) == 0, f"Cars missing routes: {str(untested)}"
-
- log_paths: DefaultDict[str, Dict[str, Dict[str, str]]] = defaultdict(lambda: defaultdict(dict))
- with concurrent.futures.ProcessPoolExecutor(max_workers=args.jobs) as pool:
- if not args.upload_only:
- download_segments = [seg for car, seg in segments if car in tested_cars]
- log_data: Dict[str, LogReader] = {}
- p1 = pool.map(get_log_data, download_segments)
- for segment, lr in tqdm(p1, desc="Getting Logs", total=len(download_segments)):
- log_data[segment] = lr
-
- pool_args: Any = []
- for car_brand, segment in segments:
- if car_brand not in tested_cars:
- continue
-
- for cfg in CONFIGS:
- if cfg.proc_name not in tested_procs:
- continue
-
- cur_log_fn = os.path.join(FAKEDATA, f"{segment}_{cfg.proc_name}_{cur_commit}.bz2")
- if args.update_refs: # reference logs will not exist if routes were just regenerated
- ref_log_path = get_url(*segment.rsplit("--", 1))
- else:
- ref_log_fn = os.path.join(FAKEDATA, f"{segment}_{cfg.proc_name}_{ref_commit}.bz2")
- ref_log_path = ref_log_fn if os.path.exists(ref_log_fn) else BASE_URL + os.path.basename(ref_log_fn)
-
- dat = None if args.upload_only else log_data[segment]
- pool_args.append((segment, cfg, args, cur_log_fn, ref_log_path, dat))
-
- log_paths[segment][cfg.proc_name]['ref'] = ref_log_path
- log_paths[segment][cfg.proc_name]['new'] = cur_log_fn
-
- results: Any = defaultdict(dict)
- p2 = pool.map(run_test_process, pool_args)
- for (segment, proc, result) in tqdm(p2, desc="Running Tests", total=len(pool_args)):
- if not args.upload_only:
- results[segment][proc] = result
-
- diff_short, diff_long, failed = format_diff(results, log_paths, ref_commit)
- if not upload:
- with open(os.path.join(PROC_REPLAY_DIR, "diff.txt"), "w") as f:
- f.write(diff_long)
- print(diff_short)
-
- if failed:
- print("TEST FAILED")
- print("\n\nTo push the new reference logs for this commit run:")
- print("./test_processes.py --upload-only")
- else:
- print("TEST SUCCEEDED")
-
- else:
- with open(REF_COMMIT_FN, "w") as f:
- f.write(cur_commit)
- print(f"\n\nUpdated reference logs for commit: {cur_commit}")
-
- sys.exit(int(failed))
+ALL_CARS = sorted({car for car, _ in segments})
+
+
+@pytest.mark.slow
+@parameterized_class(('case_name', 'segment'), segments)
+class TestCarProcessReplay(TestProcessReplayDiffBase):
+ """
+ Runs a replay diff on a segment for each car.
+ """
+
+ case_name: Optional[str] = None
+ tested_cars: List[str] = ALL_CARS
+
+ @classmethod
+ def setUpClass(cls):
+ if cls.case_name not in cls.tested_cars:
+ raise unittest.SkipTest(f"{cls.case_name} was not requested to be tested")
+ super().setUpClass()
+
+ def test_all_makes_are_tested(self):
+ if set(self.tested_cars) != set(ALL_CARS):
+ raise unittest.SkipTest("skipping check because some cars were skipped via command line")
+
+ # check to make sure all car brands are tested
+ untested = (set(interface_names) - set(excluded_interfaces)) - {c.lower() for c in self.tested_cars}
+ self.assertEqual(len(untested), 0, f"Cars missing routes: {str(untested)}")
+
+ def test_controlsd_engaged(self):
+ if "controlsd" not in self.tested_procs:
+ raise unittest.SkipTest("controlsd was not requested to be tested")
+
+ # check to make sure openpilot is engaged in the route
+ log_msgs = self.log_msgs["controlsd"]
+ self.assertTrue(check_openpilot_enabled(log_msgs), f"Route did not enable at all or for long enough: {self.segment}")
+
+
+if __name__ == '__main__':
+ pytest.main([*sys.argv[1:], __file__])
\ No newline at end of file
diff --git a/selfdrive/test/pytest-tici.ini b/selfdrive/test/pytest-tici.ini
new file mode 100644
index 0000000000..a553018309
--- /dev/null
+++ b/selfdrive/test/pytest-tici.ini
@@ -0,0 +1,5 @@
+[pytest]
+addopts = -Werror --strict-config --strict-markers
+markers =
+ slow: tests that take awhile to run and can be skipped with -m 'not slow'
+ tici: tests that are only meant to run on the C3/C3X
diff --git a/system/hardware/tici/esim.nmconnection b/system/hardware/tici/esim.nmconnection
new file mode 100644
index 0000000000..d170f9d066
--- /dev/null
+++ b/system/hardware/tici/esim.nmconnection
@@ -0,0 +1,28 @@
+[connection]
+id=esim
+uuid=fff6553c-3284-4707-a6b1-acc021caaafb
+type=gsm
+permissions=
+autoconnect=true
+autoconnect-retries=100
+
+[gsm]
+apn=
+home-only=false
+auto-config=true
+sim-id=
+
+[ipv4]
+route-metric=1000
+dns-priority=1000
+dns-search=
+method=auto
+
+[ipv6]
+ddr-gen-mode=stable-privacy
+dns-search=
+route-metric=1000
+dns-priority=1000
+method=auto
+
+[proxy]
diff --git a/system/hardware/tici/hardware.py b/system/hardware/tici/hardware.py
index d3375831ad..8b1f9abe74 100644
--- a/system/hardware/tici/hardware.py
+++ b/system/hardware/tici/hardware.py
@@ -3,6 +3,7 @@ import math
import os
import subprocess
import time
+import tempfile
from enum import IntEnum
from functools import cached_property, lru_cache
from pathlib import Path
@@ -532,9 +533,22 @@ class Tici(HardwareBase):
except Exception:
pass
- # blue prime config
- if sim_id.startswith('8901410'):
- os.system('mmcli -m any --3gpp-set-initial-eps-bearer-settings="apn=Broadband"')
+ # blue prime
+ blue_prime = sim_id.startswith('8901410')
+ initial_apn = "Broadband" if blue_prime else ""
+ os.system(f'mmcli -m any --3gpp-set-initial-eps-bearer-settings="apn={initial_apn}"')
+
+ # eSIM prime
+ if sim_id.startswith('8985235'):
+ with open('/data/openpilot/system/hardware/tici/esim.nmconnection') as f, tempfile.NamedTemporaryFile(mode='w') as tf:
+ dat = f.read()
+ dat = dat.replace("sim-id=", f"sim-id={sim_id}")
+ tf.write(dat)
+ tf.flush()
+
+ # needs to be root
+ os.system(f"sudo cp {tf.name} /data/etc/NetworkManager/system-connections/esim.nmconnection")
+ os.system("sudo nmcli con reload")
def get_networks(self):
r = {}
diff --git a/system/sensord/rawgps/compare.py b/system/sensord/rawgps/compare.py
deleted file mode 100755
index e1daa7f918..0000000000
--- a/system/sensord/rawgps/compare.py
+++ /dev/null
@@ -1,66 +0,0 @@
-#!/usr/bin/env python3
-import cereal.messaging as messaging
-from laika import constants
-
-if __name__ == "__main__":
- sm = messaging.SubMaster(['ubloxGnss', 'qcomGnss'])
-
- meas = None
- while 1:
- sm.update()
- if sm['ubloxGnss'].which() == "measurementReport":
- meas = sm['ubloxGnss'].measurementReport.measurements
- if not sm.updated['qcomGnss'] or meas is None:
- continue
- report = sm['qcomGnss'].measurementReport
- if report.source not in [0, 1]:
- continue
- GLONASS = report.source == 1
- recv_time = report.milliseconds / 1000
-
- car = []
- print("qcom has ", sorted([x.svId for x in report.sv]))
- print("ublox has", sorted([x.svId for x in meas if x.gnssId == (6 if GLONASS else 0)]))
- for i in report.sv:
- # match to ublox
- tm = None
- for m in meas:
- if i.svId == m.svId and m.gnssId == 0 and m.sigId == 0 and not GLONASS:
- tm = m
- if (i.svId-64) == m.svId and m.gnssId == 6 and m.sigId == 0 and GLONASS:
- tm = m
- if tm is None:
- continue
-
- if not i.measurementStatus.measurementNotUsable and i.measurementStatus.satelliteTimeIsKnown:
- sat_time = (i.unfilteredMeasurementIntegral + i.unfilteredMeasurementFraction + i.latency) / 1000
- ublox_psuedorange = tm.pseudorange
- qcom_psuedorange = (recv_time - sat_time)*constants.SPEED_OF_LIGHT
- if GLONASS:
- glonass_freq = tm.glonassFrequencyIndex - 7
- ublox_speed = -(constants.SPEED_OF_LIGHT / (constants.GLONASS_L1 + glonass_freq*constants.GLONASS_L1_DELTA)) * (tm.doppler)
- else:
- ublox_speed = -(constants.SPEED_OF_LIGHT / constants.GPS_L1) * tm.doppler
- qcom_speed = i.unfilteredSpeed
- car.append((i.svId, tm.pseudorange, ublox_speed, qcom_psuedorange, qcom_speed, tm.cno))
-
- if len(car) == 0:
- print("nothing to compare")
- continue
-
- pr_err, speed_err = 0., 0.
- for c in car:
- ublox_psuedorange, ublox_speed, qcom_psuedorange, qcom_speed = c[1:5]
- pr_err += ublox_psuedorange - qcom_psuedorange
- speed_err += ublox_speed - qcom_speed
- pr_err /= len(car)
- speed_err /= len(car)
- print("avg psuedorange err %f avg speed err %f" % (pr_err, speed_err))
- for c in sorted(car, key=lambda x: abs(x[1] - x[3] - pr_err)):
- svid, ublox_psuedorange, ublox_speed, qcom_psuedorange, qcom_speed, cno = c
- print("svid: %3d pseudorange: %10.2f m speed: %8.2f m/s meas: %12.2f speed: %10.2f meas_err: %10.3f speed_err: %8.3f cno: %d" %
- (svid, ublox_psuedorange, ublox_speed, qcom_psuedorange, qcom_speed,
- ublox_psuedorange - qcom_psuedorange - pr_err, ublox_speed - qcom_speed - speed_err, cno))
-
-
-
diff --git a/system/sensord/rawgps/rawgpsd.py b/system/sensord/rawgps/rawgpsd.py
index b947c54872..787a9316d3 100755
--- a/system/sensord/rawgps/rawgpsd.py
+++ b/system/sensord/rawgps/rawgpsd.py
@@ -8,7 +8,7 @@ import time
import pycurl
import shutil
import subprocess
-from datetime import datetime
+import datetime
from multiprocessing import Process, Event
from typing import NoReturn, Optional
from struct import unpack_from, calcsize, pack
@@ -16,9 +16,6 @@ from struct import unpack_from, calcsize, pack
from cereal import log
import cereal.messaging as messaging
from openpilot.common.gpio import gpio_init, gpio_set
-from laika.gps_time import GPSTime, utc_to_gpst, get_leap_seconds
-from laika.helpers import get_prn_from_nmea_id
-from laika.constants import SECS_IN_HR, SECS_IN_DAY, SECS_IN_WEEK
from openpilot.system.hardware.tici.pins import GPIO
from openpilot.system.swaglog import cloudlog
from openpilot.system.sensord.rawgps.modemdiag import ModemDiag, DIAG_LOG_F, setup_logs, send_recv
@@ -211,7 +208,7 @@ def setup_quectel(diag: ModemDiag) -> bool:
inject_assistance()
os.remove(ASSIST_DATA_FILE)
#at_cmd("AT+QGPSXTRADATA?")
- time_str = datetime.utcnow().strftime("%Y/%m/%d,%H:%M:%S")
+ time_str = datetime.datetime.utcnow().strftime("%Y/%m/%d,%H:%M:%S")
at_cmd(f"AT+QGPSXTRATIME=0,\"{time_str}\",1,1,1000")
at_cmd("AT+QGPSCFG=\"outport\",\"usbnmea\"")
@@ -293,7 +290,6 @@ def main() -> NoReturn:
diag = ModemDiag()
r = setup_quectel(diag)
want_assistance = not r
- current_gps_time = utc_to_gpst(GPSTime.from_datetime(datetime.utcnow()))
cloudlog.warning("quectel setup done")
gpio_init(GPIO.GNSS_PWR_EN, True)
gpio_set(GPIO.GNSS_PWR_EN, True)
@@ -366,8 +362,6 @@ def main() -> NoReturn:
setattr(sv.measurementStatus, kk, bool(v & (1< NoReturn:
gps.altitude = report["q_FltFinalPosAlt"]
gps.speed = math.sqrt(sum([x**2 for x in vNED]))
gps.bearingDeg = report["q_FltHeadingRad"] * 180/math.pi
- gps.unixTimestampMillis = GPSTime(report['w_GpsWeekNumber'],
- 1e-3*report['q_GpsFixTimeMs']).as_unix_timestamp()*1e3
+
+ # TODO needs update if there is another leap second, after june 2024?
+ dt_timestamp = (datetime.datetime(1980, 1, 6, 0, 0, 0, 0, None) +
+ datetime.timedelta(weeks=report['w_GpsWeekNumber']) +
+ datetime.timedelta(seconds=(1e-3*report['q_GpsFixTimeMs'] - 18)))
+ gps.unixTimestampMillis = dt_timestamp.timestamp()*1e3
gps.source = log.GpsLocationData.SensorSource.qcomdiag
gps.vNED = vNED
gps.verticalAccuracy = report["q_FltVdop"]
@@ -395,8 +393,6 @@ def main() -> NoReturn:
if gps.flags:
want_assistance = False
stop_download_event.set()
-
-
pm.send('gpsLocation', msg)
elif log_type == LOG_GNSS_OEMDRE_SVPOLY_REPORT:
@@ -415,6 +411,10 @@ def main() -> NoReturn:
else:
setattr(poly, k, v)
+ '''
+ # Timestamp glonass polys with GPSTime
+ from laika.gps_time import GPSTime, utc_to_gpst, get_leap_seconds
+ from laika.helpers import get_prn_from_nmea_id
prn = get_prn_from_nmea_id(poly.svId)
if prn[0] == 'R':
epoch = GPSTime(current_gps_time.week, (poly.t0 - 3*SECS_IN_HR + SECS_IN_DAY) % (SECS_IN_WEEK) + get_leap_seconds(current_gps_time))
@@ -429,6 +429,7 @@ def main() -> NoReturn:
poly.gpsWeek = epoch.week
poly.gpsTow = epoch.tow
+ '''
pm.send('qcomGnss', msg)
elif log_type in [LOG_GNSS_GPS_MEASUREMENT_REPORT, LOG_GNSS_GLONASS_MEASUREMENT_REPORT]:
diff --git a/system/sensord/tests/test_sensord.py b/system/sensord/tests/test_sensord.py
index 0905248071..3075c8a343 100755
--- a/system/sensord/tests/test_sensord.py
+++ b/system/sensord/tests/test_sensord.py
@@ -106,7 +106,7 @@ class TestSensord(unittest.TestCase):
os.environ["LSM_SELF_TEST"] = "1"
# read initial sensor values every test case can use
- os.system("pkill -f ./sensord")
+ os.system("pkill -f \\\\./sensord")
try:
managed_processes["sensord"].start()
cls.sample_secs = int(os.getenv("SAMPLE_SECS", "10"))
diff --git a/system/ubloxd/tests/test_ublox_processing.py b/system/ubloxd/tests/test_ublox_processing.py
deleted file mode 100755
index 311604881a..0000000000
--- a/system/ubloxd/tests/test_ublox_processing.py
+++ /dev/null
@@ -1,117 +0,0 @@
-#!/usr/bin/env python3
-import unittest
-import time
-import numpy as np
-
-from laika import AstroDog
-from laika.helpers import ConstellationId
-from laika.raw_gnss import correct_measurements, process_measurements, read_raw_ublox
-from laika.opt import calc_pos_fix
-from openpilot.selfdrive.test.openpilotci import get_url
-from openpilot.system.hardware.hw import Paths
-from openpilot.tools.lib.logreader import LogReader
-from openpilot.selfdrive.test.helpers import with_processes
-import cereal.messaging as messaging
-
-def get_gnss_measurements(log_reader):
- gnss_measurements = []
- for msg in log_reader:
- if msg.which() == "ubloxGnss":
- ublox_msg = msg.ubloxGnss
- if ublox_msg.which == 'measurementReport':
- report = ublox_msg.measurementReport
- if len(report.measurements) > 0:
- gnss_measurements.append(read_raw_ublox(report))
- return gnss_measurements
-
-def get_ublox_raw(log_reader):
- ublox_raw = []
- for msg in log_reader:
- if msg.which() == "ubloxRaw":
- ublox_raw.append(msg)
- return ublox_raw
-
-class TestUbloxProcessing(unittest.TestCase):
- NUM_TEST_PROCESS_MEAS = 10
-
- @classmethod
- def setUpClass(cls):
- lr = LogReader(get_url("4cf7a6ad03080c90|2021-09-29--13-46-36", 0))
- cls.gnss_measurements = get_gnss_measurements(lr)
-
- # test gps ephemeris continuity check (drive has ephemeris issues with cutover data)
- lr = LogReader(get_url("37b6542f3211019a|2023-01-15--23-45-10", 14))
- cls.ublox_raw = get_ublox_raw(lr)
-
- def test_read_ublox_raw(self):
- count_gps = 0
- count_glonass = 0
- for measurements in self.gnss_measurements:
- for m in measurements:
- if m.constellation_id == ConstellationId.GPS:
- count_gps += 1
- elif m.constellation_id == ConstellationId.GLONASS:
- count_glonass += 1
-
- self.assertEqual(count_gps, 5036)
- self.assertEqual(count_glonass, 3651)
-
- def test_get_fix(self):
- dog = AstroDog(cache_dir=Paths.download_cache_root())
- position_fix_found = 0
- count_processed_measurements = 0
- count_corrected_measurements = 0
- position_fix_found_after_correcting = 0
-
- pos_ests = []
- for measurements in self.gnss_measurements[:self.NUM_TEST_PROCESS_MEAS]:
- processed_meas = process_measurements(measurements, dog)
- count_processed_measurements += len(processed_meas)
- pos_fix = calc_pos_fix(processed_meas)
- if len(pos_fix) > 0 and all(p != 0 for p in pos_fix[0]):
- position_fix_found += 1
-
- corrected_meas = correct_measurements(processed_meas, pos_fix[0][:3], dog)
- count_corrected_measurements += len(corrected_meas)
-
- pos_fix = calc_pos_fix(corrected_meas)
- if len(pos_fix) > 0 and all(p != 0 for p in pos_fix[0]):
- pos_ests.append(pos_fix[0])
- position_fix_found_after_correcting += 1
-
- mean_fix = np.mean(np.array(pos_ests)[:, :3], axis=0)
- np.testing.assert_allclose(mean_fix, [-2452306.662377, -4778343.136806, 3428550.090557], rtol=0, atol=1)
-
- # Note that can happen that there are less corrected measurements compared to processed when they are invalid.
- # However, not for the current segment
- self.assertEqual(position_fix_found, self.NUM_TEST_PROCESS_MEAS)
- self.assertEqual(position_fix_found_after_correcting, self.NUM_TEST_PROCESS_MEAS)
- self.assertEqual(count_processed_measurements, 69)
- self.assertEqual(count_corrected_measurements, 69)
-
- @with_processes(['ubloxd'])
- def test_ublox_gps_cutover(self):
- time.sleep(2)
- ugs = messaging.sub_sock("ubloxGnss", timeout=0.1)
- ur_pm = messaging.PubMaster(['ubloxRaw'])
-
- def replay_segment():
- rcv_msgs = []
- for msg in self.ublox_raw:
- ur_pm.send(msg.which(), msg.as_builder())
- time.sleep(0.001)
- rcv_msgs += messaging.drain_sock(ugs)
-
- time.sleep(0.1)
- rcv_msgs += messaging.drain_sock(ugs)
- return rcv_msgs
-
- # replay twice to enforce cutover data on rewind
- rcv_msgs = replay_segment()
- rcv_msgs += replay_segment()
-
- ephems_cnt = sum(m.ubloxGnss.which() == 'ephemeris' for m in rcv_msgs)
- self.assertEqual(ephems_cnt, 15)
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/tools/cabana/streams/replaystream.cc b/tools/cabana/streams/replaystream.cc
index e94aefec2b..c61c81d56f 100644
--- a/tools/cabana/streams/replaystream.cc
+++ b/tools/cabana/streams/replaystream.cc
@@ -46,7 +46,7 @@ void ReplayStream::mergeSegments() {
bool ReplayStream::loadRoute(const QString &route, const QString &data_dir, uint32_t replay_flags) {
replay.reset(new Replay(route, {"can", "roadEncodeIdx", "driverEncodeIdx", "wideRoadEncodeIdx", "carParams"},
- {}, {}, nullptr, replay_flags, data_dir, this));
+ {}, nullptr, replay_flags, data_dir, this));
replay->setSegmentCacheLimit(settings.max_cached_minutes);
replay->installEventFilter(event_filter, this);
QObject::connect(replay.get(), &Replay::seekedTo, this, &AbstractStream::seekedTo);
diff --git a/tools/cabana/videowidget.cc b/tools/cabana/videowidget.cc
index 1ecdc8da1c..a6fd0b2b64 100644
--- a/tools/cabana/videowidget.cc
+++ b/tools/cabana/videowidget.cc
@@ -14,7 +14,6 @@
#include
#include "tools/cabana/streams/replaystream.h"
-#include "tools/cabana/util.h"
const int MIN_VIDEO_HEIGHT = 100;
const int THUMBNAIL_MARGIN = 3;
@@ -290,12 +289,23 @@ void Slider::paintEvent(QPaintEvent *ev) {
double min = minimum() / factor;
double max = maximum() / factor;
- for (auto [begin, end, type] : qobject_cast(can)->getReplay()->getTimeline()) {
- if (begin > max || end < min)
- continue;
+ auto fillRange = [&](double begin, double end, const QColor &color) {
+ if (begin > max || end < min) return;
r.setLeft(((std::max(min, begin) - min) / (max - min)) * width());
r.setRight(((std::min(max, end) - min) / (max - min)) * width());
- p.fillRect(r, timeline_colors[(int)type]);
+ p.fillRect(r, color);
+ };
+
+ const auto replay = qobject_cast(can)->getReplay();
+ for (auto [begin, end, type] : replay->getTimeline()) {
+ fillRange(begin, end, timeline_colors[(int)type]);
+ }
+
+ QColor empty_color = palette().color(QPalette::Window);
+ empty_color.setAlpha(160);
+ for (const auto &[n, seg] : replay->segments()) {
+ if (!(seg && seg->isLoaded()))
+ fillRange(n * 60.0, (n + 1) * 60.0, empty_color);
}
QStyleOptionSlider opt;
diff --git a/tools/cabana/videowidget.h b/tools/cabana/videowidget.h
index e163241fb1..67c2c8a29f 100644
--- a/tools/cabana/videowidget.h
+++ b/tools/cabana/videowidget.h
@@ -8,7 +8,6 @@
#include
#include
#include
-#include
#include "selfdrive/ui/qt/widgets/cameraview.h"
#include "tools/cabana/util.h"
diff --git a/tools/gpstest/.gitignore b/tools/gpstest/.gitignore
deleted file mode 100644
index 992088ef34..0000000000
--- a/tools/gpstest/.gitignore
+++ /dev/null
@@ -1,4 +0,0 @@
-LimeGPS/
-LimeSuite/
-hackrf/
-gps-sdr-sim/
diff --git a/tools/gpstest/README.md b/tools/gpstest/README.md
deleted file mode 100644
index 01f44df0ce..0000000000
--- a/tools/gpstest/README.md
+++ /dev/null
@@ -1,33 +0,0 @@
-# GPS test setup
-Testing the GPS receiver using GPS spoofing. At the moment only
-static location relpay is supported.
-
-# Usage
-on C3 run `rpc_server.py`, on host PC run `fuzzy_testing.py`
-
-`simulate_gps_signal.py` downloads the latest ephemeris file from
-https://cddis.nasa.gov/archive/gnss/data/daily/20xx/brdc/.
-
-
-# Hardware Setup
-* [LimeSDR USB](https://wiki.myriadrf.org/LimeSDR-USB)
-* Asus AX58BT antenna
-
-# Software Setup
-* https://github.com/myriadrf/LimeSuite
-To communicate with LimeSDR the LimeSuite is needed it abstracts the direct
-communication. It also contains examples for a quick start.
-
-The latest stable version (22.09) does not have the corresponding firmware
-download available at https://downloads.myriadrf.org/project/limesuite. Therefore
-version 20.10 was chosen.
-
-* https://github.com/osqzss/LimeGPS
-Built on top of LimeSuite (libLimeSuite.so.20.10-1), generates the GPS signal.
-
-```
-./LimeGPS -e -l
-
-# Example
-./LimeGPS -e /pathTo/brdc2660.22n -l 47.202028,15.740394,100
-```
diff --git a/tools/gpstest/fuzzy_testing.py b/tools/gpstest/fuzzy_testing.py
deleted file mode 100755
index 532fd2d34c..0000000000
--- a/tools/gpstest/fuzzy_testing.py
+++ /dev/null
@@ -1,111 +0,0 @@
-#!/usr/bin/env python3
-import argparse
-import multiprocessing
-import rpyc
-from collections import defaultdict
-
-from helper import download_rinex, exec_LimeGPS_bin
-from helper import get_random_coords, get_continuous_coords
-
-#------------------------------------------------------------------------------
-# this script is supposed to run on HOST PC
-# limeSDR is unreliable via c3 USB
-#------------------------------------------------------------------------------
-
-
-def run_lime_gps(rinex_file: str, location: str, timeout: int):
- # needs to run longer than the checker
- timeout += 10
- print(f"LimeGPS {location} {timeout}")
- p = multiprocessing.Process(target=exec_LimeGPS_bin,
- args=(rinex_file, location, timeout))
- p.start()
- return p
-
-con = None
-def run_remote_checker(lat, lon, alt, duration, ip_addr):
- global con
- try:
- con = rpyc.connect(ip_addr, 18861)
- con._config['sync_request_timeout'] = duration+20
- except ConnectionRefusedError:
- print("could not run remote checker is 'rpc_server.py' running???")
- return False, None, None
-
- matched, log, info = con.root.exposed_run_checker(lat, lon, alt,
- timeout=duration)
- con.close() # TODO: might wanna fetch more logs here
- con = None
-
- print(f"Remote Checker: {log} {info}")
- return matched, log, info
-
-
-stats = defaultdict(int) # type: ignore
-keys = ['success', 'failed', 'ublox_fail', 'proc_crash', 'checker_crash']
-
-def print_report():
- print("\nFuzzy testing report summary:")
- for k in keys:
- print(f" {k}: {stats[k]}")
-
-
-def update_stats(matched, log, info):
- if matched:
- stats['success'] += 1
- return
-
- stats['failed'] += 1
- if log == "PROC CRASH":
- stats['proc_crash'] += 1
- if log == "CHECKER CRASHED":
- stats['checker_crash'] += 1
- if log == "TIMEOUT":
- stats['ublox_fail'] += 1
-
-
-def main(ip_addr, continuous_mode, timeout, pos):
- rinex_file = download_rinex()
-
- lat, lon, alt = pos
- if lat == 0 and lon == 0 and alt == 0:
- lat, lon, alt = get_random_coords(47.2020, 15.7403)
-
- try:
- while True:
- # spoof random location
- spoof_proc = run_lime_gps(rinex_file, f"{lat},{lon},{alt}", timeout)
-
- # remote checker execs blocking
- matched, log, info = run_remote_checker(lat, lon, alt, timeout, ip_addr)
- update_stats(matched, log, info)
- spoof_proc.terminate()
- spoof_proc = None
-
- if continuous_mode:
- lat, lon, alt = get_continuous_coords(lat, lon, alt)
- else:
- lat, lon, alt = get_random_coords(lat, lon)
- except KeyboardInterrupt:
- if spoof_proc is not None:
- spoof_proc.terminate()
-
- if con is not None and not con.closed:
- con.root.exposed_kill_procs()
- con.close()
-
- print_report()
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(description="Fuzzy test GPS stack with random locations.")
- parser.add_argument("ip_addr", type=str)
- parser.add_argument("-c", "--contin", type=bool, nargs='?', default=False, help='Continous location change')
- parser.add_argument("-t", "--timeout", type=int, nargs='?', default=180, help='Timeout to get location')
-
- # for replaying a location
- parser.add_argument("lat", type=float, nargs='?', default=0)
- parser.add_argument("lon", type=float, nargs='?', default=0)
- parser.add_argument("alt", type=float, nargs='?', default=0)
- args = parser.parse_args()
- main(args.ip_addr, args.contin, args.timeout, (args.lat, args.lon, args.alt))
diff --git a/tools/gpstest/helper.py b/tools/gpstest/helper.py
deleted file mode 100644
index 4f62e60db0..0000000000
--- a/tools/gpstest/helper.py
+++ /dev/null
@@ -1,53 +0,0 @@
-import random
-import datetime as dt
-import subprocess as sp
-from typing import Tuple
-
-from laika.downloader import download_nav
-from laika.gps_time import GPSTime
-from laika.helpers import ConstellationId
-
-
-def download_rinex():
- # TODO: check if there is a better way to get the full brdc file for LimeGPS
- gps_time = GPSTime.from_datetime(dt.datetime.utcnow())
- utc_time = dt.datetime.utcnow() - dt.timedelta(1)
- gps_time = GPSTime.from_datetime(dt.datetime(utc_time.year, utc_time.month, utc_time.day))
- return download_nav(gps_time, '/tmp/gpstest/', ConstellationId.GPS)
-
-
-def exec_LimeGPS_bin(rinex_file: str, location: str, duration: int):
- # this functions should never return, cause return means, timeout is
- # reached or it crashed
- try:
- cmd = ["LimeGPS/LimeGPS", "-e", rinex_file, "-l", location]
- sp.check_output(cmd, timeout=duration)
- except sp.TimeoutExpired:
- print("LimeGPS timeout reached!")
- except Exception as e:
- print(f"LimeGPS crashed: {str(e)}")
-
-
-def get_random_coords(lat, lon) -> Tuple[float, float, int]:
- # jump around the world
- # max values, lat: -90 to 90, lon: -180 to 180
-
- lat_add = random.random()*20 + 10
- lon_add = random.random()*20 + 20
- alt = random.randint(-10**3, 4*10**3)
-
- lat = ((lat + lat_add + 90) % 180) - 90
- lon = ((lon + lon_add + 180) % 360) - 180
- return round(lat, 5), round(lon, 5), alt
-
-
-def get_continuous_coords(lat, lon, alt) -> Tuple[float, float, int]:
- # continuously move around the world
- lat_add = random.random()*0.01
- lon_add = random.random()*0.01
- alt_add = random.randint(-100, 100)
-
- lat = ((lat + lat_add + 90) % 180) - 90
- lon = ((lon + lon_add + 180) % 360) - 180
- alt += alt_add
- return round(lat, 5), round(lon, 5), alt
diff --git a/tools/gpstest/patches/hackrf.patch b/tools/gpstest/patches/hackrf.patch
deleted file mode 100644
index afc9ac437b..0000000000
--- a/tools/gpstest/patches/hackrf.patch
+++ /dev/null
@@ -1,44 +0,0 @@
-diff --git a/host/hackrf-tools/src/CMakeLists.txt b/host/hackrf-tools/src/CMakeLists.txt
-index 7115151c..a51388ba 100644
---- a/host/hackrf-tools/src/CMakeLists.txt
-+++ b/host/hackrf-tools/src/CMakeLists.txt
-@@ -23,20 +23,20 @@
-
- set(INSTALL_DEFAULT_BINDIR "bin" CACHE STRING "Appended to CMAKE_INSTALL_PREFIX")
-
--find_package(FFTW REQUIRED)
--include_directories(${FFTW_INCLUDES})
--get_filename_component(FFTW_LIBRARY_DIRS ${FFTW_LIBRARIES} DIRECTORY)
--link_directories(${FFTW_LIBRARY_DIRS})
-+#find_package(FFTW REQUIRED)
-+#include_directories(${FFTW_INCLUDES})
-+#get_filename_component(FFTW_LIBRARY_DIRS ${FFTW_LIBRARIES} DIRECTORY)
-+#link_directories(${FFTW_LIBRARY_DIRS})
-
- SET(TOOLS
- hackrf_transfer
-- hackrf_spiflash
-- hackrf_cpldjtag
-+ #hackrf_spiflash
-+ #hackrf_cpldjtag
- hackrf_info
-- hackrf_debug
-- hackrf_clock
-- hackrf_sweep
-- hackrf_operacake
-+ #hackrf_debug
-+ #hackrf_clock
-+ #hackrf_sweep
-+ #hackrf_operacake
- )
-
- if(MSVC)
-@@ -45,7 +45,7 @@ if(MSVC)
- )
- LIST(APPEND TOOLS_LINK_LIBS ${FFTW_LIBRARIES})
- else()
-- LIST(APPEND TOOLS_LINK_LIBS m fftw3f)
-+ LIST(APPEND TOOLS_LINK_LIBS m)# fftw3f)
- endif()
-
- if(NOT libhackrf_SOURCE_DIR)
diff --git a/tools/gpstest/patches/limeGPS/inc_ephem_array_size.patch b/tools/gpstest/patches/limeGPS/inc_ephem_array_size.patch
deleted file mode 100644
index 9a3525d346..0000000000
--- a/tools/gpstest/patches/limeGPS/inc_ephem_array_size.patch
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/gpssim.h b/gpssim.h
-index c30b227..2ae0802 100644
---- a/gpssim.h
-+++ b/gpssim.h
-@@ -75,7 +75,7 @@
- #define SC08 (8)
- #define SC16 (16)
-
--#define EPHEM_ARRAY_SIZE (13) // for daily GPS broadcast ephemers file (brdc)
-+#define EPHEM_ARRAY_SIZE (20) // for daily GPS broadcast ephemers file (brdc)
-
- /*! \brief Structure representing GPS time */
- typedef struct
diff --git a/tools/gpstest/patches/limeGPS/makefile.patch b/tools/gpstest/patches/limeGPS/makefile.patch
deleted file mode 100644
index f99ce551db..0000000000
--- a/tools/gpstest/patches/limeGPS/makefile.patch
+++ /dev/null
@@ -1,11 +0,0 @@
-diff --git a/makefile b/makefile
-index 51bfabf..d0ea1eb 100644
---- a/makefile
-+++ b/makefile
-@@ -1,5 +1,4 @@
- CC=gcc -O2 -Wall
-
- all: limegps.c gpssim.c
-- $(CC) -o LimeGPS limegps.c gpssim.c -lm -lpthread -lLimeSuite
--
-+ $(CC) -o LimeGPS limegps.c gpssim.c -lm -lpthread -lLimeSuite -I../LimeSuite/src -L../LimeSuite/builddir/src -Wl,-rpath="$(PWD)/../LimeSuite/builddir/src"
diff --git a/tools/gpstest/patches/limeSuite/mcu_error.patch b/tools/gpstest/patches/limeSuite/mcu_error.patch
deleted file mode 100644
index 91790a4a2b..0000000000
--- a/tools/gpstest/patches/limeSuite/mcu_error.patch
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/src/lms7002m/LMS7002M_RxTxCalibrations.cpp b/src/lms7002m/LMS7002M_RxTxCalibrations.cpp
-index 41a37044..ac29c6b6 100644
---- a/src/lms7002m/LMS7002M_RxTxCalibrations.cpp
-+++ b/src/lms7002m/LMS7002M_RxTxCalibrations.cpp
-@@ -254,7 +254,7 @@ int LMS7002M::CalibrateTx(float_type bandwidth_Hz, bool useExtLoopback)
- mcuControl->RunProcedure(useExtLoopback ? MCU_FUNCTION_CALIBRATE_TX_EXTLOOPB : MCU_FUNCTION_CALIBRATE_TX);
- status = mcuControl->WaitForMCU(1000);
- if(status != MCU_BD::MCU_NO_ERROR)
-- return ReportError(EINVAL, "Tx Calibration: MCU error %i (%s)", status, MCU_BD::MCUStatusMessage(status));
-+ return -1; //ReportError(EINVAL, "Tx Calibration: MCU error %i (%s)", status, MCU_BD::MCUStatusMessage(status));
- }
-
- //sync registers to cache
diff --git a/tools/gpstest/patches/limeSuite/reference_print.patch b/tools/gpstest/patches/limeSuite/reference_print.patch
deleted file mode 100644
index 5bd7cdf1ed..0000000000
--- a/tools/gpstest/patches/limeSuite/reference_print.patch
+++ /dev/null
@@ -1,13 +0,0 @@
-diff --git a/src/FPGA_common/FPGA_common.cpp b/src/FPGA_common/FPGA_common.cpp
-index 4e81f33e..7381c475 100644
---- a/src/FPGA_common/FPGA_common.cpp
-+++ b/src/FPGA_common/FPGA_common.cpp
-@@ -946,7 +946,7 @@ double FPGA::DetectRefClk(double fx3Clk)
-
- if (i == 0)
- return -1;
-- lime::info("Reference clock %1.2f MHz", clkTbl[i - 1] / 1e6);
-+ //lime::info("Reference clock %1.2f MHz", clkTbl[i - 1] / 1e6);
- return clkTbl[i - 1];
- }
-
diff --git a/tools/gpstest/run_unittest.sh b/tools/gpstest/run_unittest.sh
deleted file mode 100755
index e0ca017a6d..0000000000
--- a/tools/gpstest/run_unittest.sh
+++ /dev/null
@@ -1,16 +0,0 @@
-#!/bin/bash
-
-# NOTE: can only run inside limeGPS test box!
-
-# run limeGPS with random static location
-timeout 300 ./simulate_gps_signal.py 32.7518 -117.1962 &
-gps_PID=$(ps -aux | grep -m 1 "timeout 300" | awk '{print $2}')
-
-echo "starting limeGPS..."
-sleep 10
-
-# run unit tests (skipped when module not present)
-python -m unittest test_gps.py
-python -m unittest test_gps_qcom.py
-
-kill $gps_PID
diff --git a/tools/gpstest/setup.sh b/tools/gpstest/setup.sh
deleted file mode 100755
index ddf41dd260..0000000000
--- a/tools/gpstest/setup.sh
+++ /dev/null
@@ -1,25 +0,0 @@
-#!/bin/bash
-set -e
-
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
-cd $DIR
-
-if [ ! -d LimeSuite ]; then
- git clone https://github.com/myriadrf/LimeSuite.git
- cd LimeSuite
- # checkout latest version which has firmware updates available
- git checkout v20.10.0
- git apply ../patches/limeSuite/*
- mkdir builddir && cd builddir
- cmake -DCMAKE_BUILD_TYPE=Release ..
- make -j4
- cd ../..
-fi
-
-if [ ! -d LimeGPS ]; then
- git clone https://github.com/osqzss/LimeGPS.git
- cd LimeGPS
- git apply ../patches/limeGPS/*
- make
- cd ..
-fi
diff --git a/tools/gpstest/setup_hackrf.sh b/tools/gpstest/setup_hackrf.sh
deleted file mode 100755
index e504ec9447..0000000000
--- a/tools/gpstest/setup_hackrf.sh
+++ /dev/null
@@ -1,21 +0,0 @@
-#!/bin/bash
-set -e
-
-DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
-cd $DIR
-
-if [ ! -d gps-sdr-sim ]; then
- git clone https://github.com/osqzss/gps-sdr-sim.git
- cd gps-sdr-sim
- make
- cd ..
-fi
-
-if [ ! -d hackrf ]; then
- git clone https://github.com/greatscottgadgets/hackrf.git
- cd hackrf/host
- git apply ../../patches/hackrf.patch
- cmake .
- make
-fi
-
diff --git a/tools/gpstest/simulate_gps_signal.py b/tools/gpstest/simulate_gps_signal.py
deleted file mode 100755
index da0f64eaca..0000000000
--- a/tools/gpstest/simulate_gps_signal.py
+++ /dev/null
@@ -1,151 +0,0 @@
-#!/usr/bin/env python3
-import os
-import random
-import argparse
-import datetime as dt
-import subprocess as sp
-from typing import Tuple
-
-from laika.downloader import download_nav
-from laika.gps_time import GPSTime
-from laika.helpers import ConstellationId
-
-cache_dir = '/tmp/gpstest/'
-
-
-def download_rinex():
- # TODO: check if there is a better way to get the full brdc file for LimeGPS
- gps_time = GPSTime.from_datetime(dt.datetime.utcnow())
- utc_time = dt.datetime.utcnow()# - dt.timedelta(1)
- gps_time = GPSTime.from_datetime(dt.datetime(utc_time.year, utc_time.month, utc_time.day))
- return download_nav(gps_time, cache_dir, ConstellationId.GPS)
-
-def get_coords(lat, lon, s1, s2, o1=0, o2=0) -> Tuple[int, int]:
- lat_add = random.random()*s1 + o1
- lon_add = random.random()*s2 + o2
-
- lat = ((lat + lat_add + 90) % 180) - 90
- lon = ((lon + lon_add + 180) % 360) - 180
- return round(lat, 5), round(lon, 5)
-
-def get_continuous_coords(lat, lon) -> Tuple[int, int]:
- # continuously move around the world
- return get_coords(lat, lon, 0.01, 0.01)
-
-def get_random_coords(lat, lon) -> Tuple[int, int]:
- # jump around the world
- return get_coords(lat, lon, 20, 20, 10, 20)
-
-def run_limeSDR_loop(lat, lon, alt, contin_sim, rinex_file, timeout):
- while True:
- try:
- # TODO: add starttime setting and altitude
- # -t 2023/01/15,00:00:00 -T 2023/01/15,00:00:00
- # this needs to match the date of the navigation file
- print(f"starting LimeGPS, Location: {lat} {lon} {alt}")
- cmd = ["LimeGPS/LimeGPS", "-e", rinex_file, "-l", f"{lat},{lon},{alt}"]
- print(f"CMD: {cmd}")
- sp.check_output(cmd, stderr=sp.PIPE, timeout=timeout)
- except KeyboardInterrupt:
- print("stopping LimeGPS")
- return
- except sp.TimeoutExpired:
- print("LimeGPS timeout reached!")
- except Exception as e:
- out_stderr = e.stderr.decode('utf-8')# pylint:disable=no-member
- if "Device is busy." in out_stderr:
- print("GPS simulation is already running, Device is busy!")
- return
-
- print(f"LimeGPS crashed: {str(e)}")
- print(f"stderr:\n{e.stderr.decode('utf-8')}")# pylint:disable=no-member
- return
-
- if contin_sim:
- lat, lon = get_continuous_coords(lat, lon)
- else:
- lat, lon = get_random_coords(lat, lon)
-
-def run_hackRF_loop(lat, lon, rinex_file, timeout):
-
- if timeout is not None:
- print("no jump mode for hackrf!")
- return
-
- try:
- print(f"starting gps-sdr-sim, Location: {lat},{lon}")
- # create 30second file and replay with hackrf endless
- cmd = ["gps-sdr-sim/gps-sdr-sim", "-e", rinex_file, "-l", f"{lat},{lon},-200", "-d", "30"]
- sp.check_output(cmd, stderr=sp.PIPE, timeout=timeout)
- # created in current working directory
- except Exception:
- print("Failed to generate gpssim.bin")
-
- try:
- print("starting hackrf_transfer")
- # create 30second file and replay with hackrf endless
- cmd = ["hackrf/host/hackrf-tools/src/hackrf_transfer", "-t", "gpssim.bin",
- "-f", "1575420000", "-s", "2600000", "-a", "1", "-R"]
- sp.check_output(cmd, stderr=sp.PIPE, timeout=timeout)
- except KeyboardInterrupt:
- print("stopping hackrf_transfer")
- return
- except Exception as e:
- print(f"hackrf_transfer crashed:{str(e)}")
-
-
-def main(lat, lon, alt, jump_sim, contin_sim, hackrf_mode):
-
- if hackrf_mode:
- if not os.path.exists('hackrf'):
- print("hackrf not found run 'setup_hackrf.sh' first")
- return
-
- if not os.path.exists('gps-sdr-sim'):
- print("gps-sdr-sim not found run 'setup_hackrf.sh' first")
- return
-
- output = sp.check_output(["hackrf/host/hackrf-tools/src/hackrf_info"])
- if output.strip() == b"" or b"No HackRF boards found." in output:
- print("No HackRF boards found!")
- return
-
- else:
- if not os.path.exists('LimeGPS'):
- print("LimeGPS not found run 'setup.sh' first")
- return
-
- if not os.path.exists('LimeSuite'):
- print("LimeSuite not found run 'setup.sh' first")
- return
-
- output = sp.check_output(["LimeSuite/builddir/LimeUtil/LimeUtil", "--find"])
- if output.strip() == b"":
- print("No LimeSDR device found!")
- return
- print(f"Device: {output.strip().decode('utf-8')}")
-
- if lat == 0 and lon == 0:
- lat, lon = get_random_coords(47.2020, 15.7403)
-
- rinex_file = download_rinex()
-
- timeout = None
- if jump_sim:
- timeout = 30
-
- if hackrf_mode:
- run_hackRF_loop(lat, lon, rinex_file, timeout)
- else:
- run_limeSDR_loop(lat, lon, alt, contin_sim, rinex_file, timeout)
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(description="Simulate static [or random jumping] GPS signal.")
- parser.add_argument("lat", type=float, nargs='?', default=0)
- parser.add_argument("lon", type=float, nargs='?', default=0)
- parser.add_argument("alt", type=float, nargs='?', default=0)
- parser.add_argument("--jump", action="store_true", help="signal that jumps around the world")
- parser.add_argument("--contin", action="store_true", help="continuously/slowly moving around the world")
- parser.add_argument("--hackrf", action="store_true", help="hackrf mode (DEFAULT: LimeSDR)")
- args = parser.parse_args()
- main(args.lat, args.lon, args.alt, args.jump, args.contin, args.hackrf)
diff --git a/tools/gpstest/test_gps.py b/tools/gpstest/test_gps.py
deleted file mode 100755
index bbd53ebfff..0000000000
--- a/tools/gpstest/test_gps.py
+++ /dev/null
@@ -1,189 +0,0 @@
-#!/usr/bin/env python3
-import pytest
-import time
-import unittest
-import struct
-
-from openpilot.common.params import Params
-import cereal.messaging as messaging
-import openpilot.system.sensord.pigeond as pd
-from openpilot.selfdrive.test.helpers import with_processes
-
-
-def read_events(service, duration_sec):
- service_sock = messaging.sub_sock(service, timeout=0.1)
- start_time_sec = time.monotonic()
- events = []
- while time.monotonic() - start_time_sec < duration_sec:
- events += messaging.drain_sock(service_sock)
- time.sleep(0.1)
-
- assert len(events) != 0, f"No '{service}'events collected!"
- return events
-
-
-def create_backup(pigeon):
- # controlled GNSS stop
- pigeon.send(b"\xB5\x62\x06\x04\x04\x00\x00\x00\x08\x00\x16\x74")
-
- # store almanac in flash
- pigeon.send(b"\xB5\x62\x09\x14\x04\x00\x00\x00\x00\x00\x21\xEC")
- try:
- if not pigeon.wait_for_ack(ack=pd.UBLOX_SOS_ACK, nack=pd.UBLOX_SOS_NACK):
- raise RuntimeError("Could not store almanac")
- except TimeoutError:
- pass
-
-
-def verify_ubloxgnss_data(socket: messaging.SubSocket, max_time: int):
- start_time = 0
- end_time = 0
- events = messaging.drain_sock(socket)
- assert len(events) != 0, "no ublxGnss measurements"
-
- for event in events:
- if event.ubloxGnss.which() != "measurementReport":
- continue
-
- if start_time == 0:
- start_time = event.logMonoTime
-
- if event.ubloxGnss.measurementReport.numMeas != 0:
- end_time = event.logMonoTime
- break
-
- assert end_time != 0, "no ublox measurements received!"
-
- ttfm = (end_time - start_time)/1e9
- assert ttfm < max_time, f"Time to first measurement > {max_time}s, {ttfm}"
-
- # check for satellite count in measurements
- sat_count = []
- end_id = events.index(event)# pylint:disable=undefined-loop-variable
- for event in events[end_id:]:
- if event.ubloxGnss.which() == "measurementReport":
- sat_count.append(event.ubloxGnss.measurementReport.numMeas)
-
- num_sat = int(sum(sat_count)/len(sat_count))
- assert num_sat >= 5, f"Not enough satellites {num_sat} (TestBox setup!)"
-
-
-def verify_gps_location(socket: messaging.SubSocket, max_time: int):
- events = messaging.drain_sock(socket)
- assert len(events) != 0, "no gpsLocationExternal measurements"
-
- start_time = events[0].logMonoTime
- end_time = 0
- for event in events:
- gps_valid = event.gpsLocationExternal.flags % 2
-
- if gps_valid:
- end_time = event.logMonoTime
- break
-
- assert end_time != 0, "GPS location never converged!"
-
- ttfl = (end_time - start_time)/1e9
- assert ttfl < max_time, f"Time to first location > {max_time}s, {ttfl}"
-
- hacc = events[-1].gpsLocationExternal.accuracy
- vacc = events[-1].gpsLocationExternal.verticalAccuracy
- assert hacc < 20, f"Horizontal accuracy too high, {hacc}"
- assert vacc < 45, f"Vertical accuracy too high, {vacc}"
-
-
-def verify_time_to_first_fix(pigeon):
- # get time to first fix from nav status message
- nav_status = b""
- while True:
- pigeon.send(b"\xb5\x62\x01\x03\x00\x00\x04\x0d")
- nav_status = pigeon.receive()
- if nav_status[:4] == b"\xb5\x62\x01\x03":
- break
-
- values = struct.unpack(" 40s, {ttff}"
-
-
-@pytest.mark.tici
-class TestGPS(unittest.TestCase):
- @classmethod
- def setUpClass(cls):
- ublox_available = Params().get_bool("UbloxAvailable")
- if not ublox_available:
- raise unittest.SkipTest
-
-
- def tearDown(self):
- pd.set_power(False)
-
- @with_processes(['ubloxd'])
- def test_a_ublox_reset(self):
-
- pigeon, pm = pd.create_pigeon()
- pd.init_baudrate(pigeon)
- assert pigeon.reset_device(), "Could not reset device!"
-
- pd.initialize_pigeon(pigeon)
-
- ugs = messaging.sub_sock("ubloxGnss", timeout=0.1)
- gle = messaging.sub_sock("gpsLocationExternal", timeout=0.1)
-
- # receive some messages (restart after cold start takes up to 30seconds)
- pd.run_receiving(pigeon, pm, 60)
-
- # store almanac for next test
- create_backup(pigeon)
-
- verify_ubloxgnss_data(ugs, 60)
- verify_gps_location(gle, 60)
-
- # skip for now, this might hang for a while
- #verify_time_to_first_fix(pigeon)
-
-
- @with_processes(['ubloxd'])
- def test_b_ublox_almanac(self):
- pigeon, pm = pd.create_pigeon()
- pd.init_baudrate(pigeon)
-
- # device cold start
- pigeon.send(b"\xb5\x62\x06\x04\x04\x00\xff\xff\x00\x00\x0c\x5d")
- time.sleep(1) # wait for cold start
- pd.init_baudrate(pigeon)
-
- # clear configuration
- pigeon.send_with_ack(b"\xb5\x62\x06\x09\x0d\x00\x00\x00\x1f\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x17\x71\x5b")
-
- # restoring almanac backup
- pigeon.send(b"\xB5\x62\x09\x14\x00\x00\x1D\x60")
- status = pigeon.wait_for_backup_restore_status()
- assert status == 2, "Could not restore almanac backup"
-
- pd.initialize_pigeon(pigeon)
-
- ugs = messaging.sub_sock("ubloxGnss", timeout=0.1)
- gle = messaging.sub_sock("gpsLocationExternal", timeout=0.1)
-
- pd.run_receiving(pigeon, pm, 15)
- verify_ubloxgnss_data(ugs, 15)
- verify_gps_location(gle, 20)
-
-
- @with_processes(['ubloxd'])
- def test_c_ublox_startup(self):
- pigeon, pm = pd.create_pigeon()
- pd.init_baudrate(pigeon)
- pd.initialize_pigeon(pigeon)
-
- ugs = messaging.sub_sock("ubloxGnss", timeout=0.1)
- gle = messaging.sub_sock("gpsLocationExternal", timeout=0.1)
- pd.run_receiving(pigeon, pm, 10)
- verify_ubloxgnss_data(ugs, 10)
- verify_gps_location(gle, 10)
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/tools/gpstest/test_gps_qcom.py b/tools/gpstest/test_gps_qcom.py
deleted file mode 100755
index 2ea5556684..0000000000
--- a/tools/gpstest/test_gps_qcom.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python3
-import pytest
-import time
-import unittest
-import subprocess as sp
-
-from openpilot.common.params import Params
-import cereal.messaging as messaging
-from openpilot.selfdrive.manager.process_config import managed_processes
-
-
-def exec_mmcli(cmd):
- cmd = "mmcli -m 0 " + cmd
- p = sp.Popen(cmd, shell=True, stdout=sp.PIPE, stderr=sp.PIPE)
- return p.communicate()
-
-
-def wait_for_location(socket, timeout):
- while True:
- events = messaging.drain_sock(socket)
- for event in events:
- if event.gpsLocation.flags % 2:
- return False
-
- timeout -= 1
- if timeout <= 0:
- return True
-
- time.sleep(0.1)
- continue
-
-
-@pytest.mark.tici
-class TestGPS(unittest.TestCase):
- @classmethod
- def setUpClass(cls):
- ublox_available = Params().get_bool("UbloxAvailable")
- if ublox_available:
- raise unittest.SkipTest
-
- def test_a_quectel_cold_start(self):
- # delete assistance data to enforce cold start for GNSS
- # testing shows that this takes up to 20min
-
- _, err = exec_mmcli("--command='AT+QGPSDEL=0'")
- assert len(err) == 0, f"GPSDEL failed: {err}"
-
- managed_processes['rawgpsd'].start()
- start_time = time.monotonic()
- glo = messaging.sub_sock("gpsLocation", timeout=0.1)
-
- timeout = 10*60*3 # 3 minute
- timedout = wait_for_location(glo, timeout)
- managed_processes['rawgpsd'].stop()
-
- assert timedout is False, "Waiting for location timed out (3min)!"
-
- duration = time.monotonic() - start_time
- assert duration < 60, f"Received GPS location {duration}!"
-
-
- def test_b_quectel_startup(self):
- managed_processes['rawgpsd'].start()
- start_time = time.monotonic()
- glo = messaging.sub_sock("gpsLocation", timeout=0.1)
-
- timeout = 10*60 # 1 minute
- timedout = wait_for_location(glo, timeout)
- managed_processes['rawgpsd'].stop()
-
- assert timedout is False, "Waiting for location timed out (3min)!"
-
- duration = time.monotonic() - start_time
- assert duration < 60, f"Received GPS location {duration}!"
-
-
-if __name__ == "__main__":
- unittest.main()
diff --git a/tools/plotjuggler/layouts/gps_vs_llk.xml b/tools/plotjuggler/layouts/gps_vs_llk.xml
new file mode 100644
index 0000000000..44980712ed
--- /dev/null
+++ b/tools/plotjuggler/layouts/gps_vs_llk.xml
@@ -0,0 +1,83 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ R = 6378.137 -- Radius of earth in KM
+ -- Compute the Haversine distance between
+-- two points defined by latitude and longitude.
+-- Return the distance in meters
+lat1, lon1 = value, v1
+lat2, lon2 = v2, v3
+dLat = (lat2 - lat1) * math.pi / 180
+dLon = (lon2 - lon1) * math.pi / 180
+a = math.sin(dLat/2) * math.sin(dLat/2) +
+math.cos(lat1 * math.pi / 180) * math.cos(lat2 * math.pi / 180) *
+math.sin(dLon/2) * math.sin(dLon/2)
+c = 2 * math.atan(math.sqrt(a), math.sqrt(1-a))
+d = R * c
+distance = d * 1000 -- meters
+return distance
+ /gpsLocationExternal/latitude
+
+ /gpsLocationExternal/longitude
+ /liveLocationKalman/positionGeodetic/value/0
+ /liveLocationKalman/positionGeodetic/value/1
+
+
+
+
+
+
+
diff --git a/tools/replay/SConscript b/tools/replay/SConscript
index bce7512e44..db8447003b 100644
--- a/tools/replay/SConscript
+++ b/tools/replay/SConscript
@@ -1,25 +1,21 @@
-import os
-Import('env', 'qt_env', 'arch', 'common', 'messaging', 'visionipc',
- 'cereal', 'transformations')
+Import('env', 'qt_env', 'arch', 'common', 'messaging', 'visionipc', 'cereal')
base_frameworks = qt_env['FRAMEWORKS']
-base_libs = [common, messaging, cereal, visionipc, transformations, 'zmq',
- 'capnp', 'kj', 'm', 'ssl', 'crypto', 'pthread'] + qt_env["LIBS"]
+base_libs = [common, messaging, cereal, visionipc, 'zmq',
+ 'capnp', 'kj', 'm', 'ssl', 'crypto', 'pthread', 'qt_util'] + qt_env["LIBS"]
if arch == "Darwin":
base_frameworks.append('OpenCL')
else:
base_libs.append('OpenCL')
-qt_libs = ['qt_util'] + base_libs
qt_env['CXXFLAGS'] += ["-Wno-deprecated-declarations"]
replay_lib_src = ["replay.cc", "consoleui.cc", "camera.cc", "filereader.cc", "logreader.cc", "framereader.cc", "route.cc", "util.cc"]
-
-replay_lib = qt_env.Library("qt_replay", replay_lib_src, LIBS=qt_libs, FRAMEWORKS=base_frameworks)
+replay_lib = qt_env.Library("qt_replay", replay_lib_src, LIBS=base_libs, FRAMEWORKS=base_frameworks)
Export('replay_lib')
-replay_libs = [replay_lib, 'avutil', 'avcodec', 'avformat', 'bz2', 'curl', 'yuv', 'ncurses'] + qt_libs
+replay_libs = [replay_lib, 'avutil', 'avcodec', 'avformat', 'bz2', 'curl', 'yuv', 'ncurses'] + base_libs
qt_env.Program("replay", ["main.cc"], LIBS=replay_libs, FRAMEWORKS=base_frameworks)
if GetOption('extras'):
- qt_env.Program('tests/test_replay', ['tests/test_runner.cc', 'tests/test_replay.cc'], LIBS=[replay_libs, qt_libs])
+ qt_env.Program('tests/test_replay', ['tests/test_runner.cc', 'tests/test_replay.cc'], LIBS=[replay_libs, base_libs])
diff --git a/tools/replay/logreader.cc b/tools/replay/logreader.cc
index 74aebceae5..c92ff4753f 100644
--- a/tools/replay/logreader.cc
+++ b/tools/replay/logreader.cc
@@ -1,6 +1,7 @@
#include "tools/replay/logreader.h"
#include
+#include "tools/replay/filereader.h"
#include "tools/replay/util.h"
Event::Event(const kj::ArrayPtr &amsg, bool frame) : reader(amsg), frame(frame) {
@@ -40,9 +41,7 @@ LogReader::~LogReader() {
}
}
-bool LogReader::load(const std::string &url, std::atomic *abort,
- const std::set &allow,
- bool local_cache, int chunk_size, int retries) {
+bool LogReader::load(const std::string &url, std::atomic *abort, bool local_cache, int chunk_size, int retries) {
raw_ = FileReader(local_cache, chunk_size, retries).read(url, abort);
if (raw_.empty()) return false;
@@ -50,15 +49,15 @@ bool LogReader::load(const std::string &url, std::atomic *abort,
raw_ = decompressBZ2(raw_, abort);
if (raw_.empty()) return false;
}
- return parse(allow, abort);
+ return parse(abort);
}
bool LogReader::load(const std::byte *data, size_t size, std::atomic *abort) {
raw_.assign((const char *)data, size);
- return parse({}, abort);
+ return parse(abort);
}
-bool LogReader::parse(const std::set &allow, std::atomic *abort) {
+bool LogReader::parse(std::atomic *abort) {
try {
kj::ArrayPtr words((const capnp::word *)raw_.data(), raw_.size() / sizeof(capnp::word));
while (words.size() > 0 && !(abort && *abort)) {
@@ -67,12 +66,6 @@ bool LogReader::parse(const std::set &allow, std::atomicwhich) == allow.end()) {
- words = kj::arrayPtr(evt->reader.getEnd(), words.end());
- delete evt;
- continue;
- }
-
// Add encodeIdx packet again as a frame packet for the video stream
if (evt->which == cereal::Event::ROAD_ENCODE_IDX ||
evt->which == cereal::Event::DRIVER_ENCODE_IDX ||
diff --git a/tools/replay/logreader.h b/tools/replay/logreader.h
index 77d751a91b..73f822d16c 100644
--- a/tools/replay/logreader.h
+++ b/tools/replay/logreader.h
@@ -6,13 +6,11 @@
#endif
#include
-#include
#include
#include
#include "cereal/gen/cpp/log.capnp.h"
#include "system/camerad/cameras/camera_common.h"
-#include "tools/replay/filereader.h"
const CameraType ALL_CAMERAS[] = {RoadCam, DriverCam, WideRoadCam};
const int MAX_CAMERAS = std::size(ALL_CAMERAS);
@@ -55,13 +53,13 @@ class LogReader {
public:
LogReader(size_t memory_pool_block_size = DEFAULT_EVENT_MEMORY_POOL_BLOCK_SIZE);
~LogReader();
- bool load(const std::string &url, std::atomic *abort = nullptr, const std::set &allow = {},
+ bool load(const std::string &url, std::atomic *abort = nullptr,
bool local_cache = false, int chunk_size = -1, int retries = 0);
bool load(const std::byte *data, size_t size, std::atomic *abort = nullptr);
std::vector events;
private:
- bool parse(const std::set &allow, std::atomic *abort);
+ bool parse(std::atomic *abort);
std::string raw_;
#ifdef HAS_MEMORY_RESOURCE
std::unique_ptr mbr_;
diff --git a/tools/replay/main.cc b/tools/replay/main.cc
index 98a0bb3333..945cb4cd09 100644
--- a/tools/replay/main.cc
+++ b/tools/replay/main.cc
@@ -13,7 +13,6 @@ int main(int argc, char *argv[]) {
QCoreApplication app(argc, argv);
- const QStringList base_blacklist = {"uiDebug", "userFlag"};
const std::tuple flags[] = {
{"dcam", REPLAY_FLAG_DCAM, "load driver camera"},
{"ecam", REPLAY_FLAG_ECAM, "load wide road camera"},
@@ -22,7 +21,7 @@ int main(int argc, char *argv[]) {
{"qcam", REPLAY_FLAG_QCAMERA, "load qcamera"},
{"no-hw-decoder", REPLAY_FLAG_NO_HW_DECODER, "disable HW video decoding"},
{"no-vipc", REPLAY_FLAG_NO_VIPC, "do not output video"},
- {"all", REPLAY_FLAG_ALL_SERVICES, "do output all messages including " + base_blacklist.join(", ") +
+ {"all", REPLAY_FLAG_ALL_SERVICES, "do output all messages including uiDebug, userFlag"
". this may causes issues when used along with UI"}
};
@@ -64,7 +63,7 @@ int main(int argc, char *argv[]) {
op_prefix.reset(new OpenpilotPrefix(prefix.toStdString()));
}
- Replay *replay = new Replay(route, allow, block, base_blacklist, nullptr, replay_flags, parser.value("data_dir"), &app);
+ Replay *replay = new Replay(route, allow, block, nullptr, replay_flags, parser.value("data_dir"), &app);
if (!parser.value("c").isEmpty()) {
replay->setSegmentCacheLimit(parser.value("c").toInt());
}
diff --git a/tools/replay/replay.cc b/tools/replay/replay.cc
index b83f657e39..1ec484d677 100644
--- a/tools/replay/replay.cc
+++ b/tools/replay/replay.cc
@@ -7,41 +7,25 @@
#include "cereal/services.h"
#include "common/params.h"
#include "common/timing.h"
-#include "system/hardware/hw.h"
#include "tools/replay/util.h"
-Replay::Replay(QString route, QStringList allow, QStringList block, QStringList base_blacklist, SubMaster *sm_, uint32_t flags, QString data_dir, QObject *parent)
- : sm(sm_), flags_(flags), QObject(parent) {
- std::vector s;
+Replay::Replay(QString route, QStringList allow, QStringList block, SubMaster *sm_,
+ uint32_t flags, QString data_dir, QObject *parent) : sm(sm_), flags_(flags), QObject(parent) {
+ if (!(flags_ & REPLAY_FLAG_ALL_SERVICES)) {
+ block << "uiDebug" << "userFlag";
+ }
auto event_struct = capnp::Schema::from().asStruct();
sockets_.resize(event_struct.getUnionFields().size());
- for (const auto &it : services) {
- auto name = it.second.name.c_str();
- uint16_t which = event_struct.getFieldByName(name).getProto().getDiscriminantValue();
- if ((which == cereal::Event::Which::UI_DEBUG || which == cereal::Event::Which::USER_FLAG) &&
- !(flags & REPLAY_FLAG_ALL_SERVICES) &&
- !allow.contains(name)) {
- continue;
- }
-
- if ((allow.empty() || allow.contains(name)) && !block.contains(name)) {
- sockets_[which] = name;
- if (!allow.empty() || !block.empty()) {
- allow_list.insert((cereal::Event::Which)which);
- }
- s.push_back(name);
- }
- }
-
- if (!allow_list.empty()) {
- // the following events are needed for replay to work properly.
- allow_list.insert(cereal::Event::Which::INIT_DATA);
- allow_list.insert(cereal::Event::Which::CAR_PARAMS);
- if (sockets_[cereal::Event::Which::PANDA_STATES] != nullptr) {
- allow_list.insert(cereal::Event::Which::PANDA_STATE_D_E_P_R_E_C_A_T_E_D);
+ for (const auto &[name, _] : services) {
+ if (!block.contains(name.c_str()) && (allow.empty() || allow.contains(name.c_str()))) {
+ uint16_t which = event_struct.getFieldByName(name).getProto().getDiscriminantValue();
+ sockets_[which] = name.c_str();
}
}
+ std::vector s;
+ std::copy_if(sockets_.begin(), sockets_.end(), std::back_inserter(s),
+ [](const char *name) { return name != nullptr; });
qDebug() << "services " << s;
qDebug() << "loading route " << route;
@@ -154,7 +138,7 @@ void Replay::buildTimeline() {
const auto &route_segments = route_->segments();
for (auto it = route_segments.cbegin(); it != route_segments.cend() && !exit_; ++it) {
std::shared_ptr log(new LogReader());
- if (!log->load(it->second.qlog.toStdString(), &exit_, {}, !hasFlag(REPLAY_FLAG_NO_FILE_CACHE), 0, 3)) continue;
+ if (!log->load(it->second.qlog.toStdString(), &exit_, !hasFlag(REPLAY_FLAG_NO_FILE_CACHE), 0, 3)) continue;
for (const Event *e : log->events) {
if (e->which == cereal::Event::Which::CONTROLS_STATE) {
@@ -237,30 +221,17 @@ void Replay::segmentLoadFinished(bool success) {
}
void Replay::queueSegment() {
- if (segments_.empty()) return;
-
- SegmentMap::iterator begin, cur;
- begin = cur = segments_.lower_bound(std::min(current_segment_.load(), segments_.rbegin()->first));
- int distance = std::max(std::ceil(segment_cache_limit / 2.0) - 1, segment_cache_limit - std::distance(cur, segments_.end()));
- for (int i = 0; begin != segments_.begin() && i < distance; ++i) {
- --begin;
- }
- auto end = begin;
- for (int i = 0; end != segments_.end() && i < segment_cache_limit; ++i) {
- ++end;
- }
+ auto cur = segments_.lower_bound(current_segment_.load());
+ if (cur == segments_.end()) return;
+ auto begin = std::prev(cur, std::min(segment_cache_limit / 2, std::distance(segments_.begin(), cur)));
+ auto end = std::next(begin, std::min(segment_cache_limit, segments_.size()));
// load one segment at a time
- for (auto it = cur; it != end; ++it) {
- auto &[n, seg] = *it;
- if ((seg && !seg->isLoaded()) || !seg) {
- if (!seg) {
- rDebug("loading segment %d...", n);
- seg = std::make_unique(n, route_->at(n), flags_, allow_list);
- QObject::connect(seg.get(), &Segment::loadFinished, this, &Replay::segmentLoadFinished);
- }
- break;
- }
+ auto it = std::find_if(cur, end, [](auto &it) { return !it.second || !it.second->isLoaded(); });
+ if (it != end && !it->second) {
+ rDebug("loading segment %d...", it->first);
+ it->second = std::make_unique(it->first, route_->at(it->first), flags_);
+ QObject::connect(it->second.get(), &Segment::loadFinished, this, &Replay::segmentLoadFinished);
}
mergeSegments(begin, end);
@@ -297,13 +268,11 @@ void Replay::mergeSegments(const SegmentMap::iterator &begin, const SegmentMap::
new_events_->clear();
new_events_->reserve(new_events_size);
for (int n : segments_need_merge) {
- const auto &e = segments_[n]->log->events;
- if (e.size() > 0) {
- auto insert_from = e.begin();
- if (new_events_->size() > 0 && (*insert_from)->which == cereal::Event::Which::INIT_DATA) ++insert_from;
- auto middle = new_events_->insert(new_events_->end(), insert_from, e.end());
- std::inplace_merge(new_events_->begin(), middle, new_events_->end(), Event::lessThan());
- }
+ size_t size = new_events_->size();
+ const auto &events = segments_[n]->log->events;
+ std::copy_if(events.begin(), events.end(), std::back_inserter(*new_events_),
+ [this](auto e) { return e->which < sockets_.size() && sockets_[e->which] != nullptr; });
+ std::inplace_merge(new_events_->begin(), new_events_->begin() + size, new_events_->end(), Event::lessThan());
}
if (stream_thread_) {
@@ -418,17 +387,7 @@ void Replay::stream() {
cur_mono_time_ = evt->mono_time;
setCurrentSegment(toSeconds(cur_mono_time_) / 60);
- // migration for pandaState -> pandaStates to keep UI working for old segments
- if (cur_which == cereal::Event::Which::PANDA_STATE_D_E_P_R_E_C_A_T_E_D &&
- sockets_[cereal::Event::Which::PANDA_STATES] != nullptr) {
- MessageBuilder msg;
- auto ps = msg.initEvent().initPandaStates(1);
- ps[0].setIgnitionLine(true);
- ps[0].setPandaType(cereal::PandaState::PandaType::DOS);
- pm->send(sockets_[cereal::Event::Which::PANDA_STATES], msg);
- }
-
- if (cur_which < sockets_.size() && sockets_[cur_which] != nullptr) {
+ if (sockets_[cur_which] != nullptr) {
// keep time
long etime = (cur_mono_time_ - evt_start_ts) / speed_;
long rtime = nanos_since_boot() - loop_start_ts;
diff --git a/tools/replay/replay.h b/tools/replay/replay.h
index e26ef883b6..1144da2601 100644
--- a/tools/replay/replay.h
+++ b/tools/replay/replay.h
@@ -4,7 +4,6 @@
#include