diff --git a/.github/labeler.yaml b/.github/labeler.yaml
new file mode 100644
index 0000000000..94bc9074cd
--- /dev/null
+++ b/.github/labeler.yaml
@@ -0,0 +1,42 @@
+CI / testing:
+ - all: ['.github/**']
+ - all: ['**/test_*']
+
+car:
+ - all: ['selfdrive/car/**']
+
+body:
+ - all: ['selfdrive/car/body/*']
+chrysler:
+ - all: ['selfdrive/car/chrysler/*']
+ford:
+ - all: ['selfdrive/car/ford/*']
+gm:
+ - all: ['selfdrive/car/gm/*']
+honda:
+ - all: ['selfdrive/car/honda/*']
+hyundai:
+ - all: ['selfdrive/car/hyundai/*']
+mazda:
+ - all: ['selfdrive/car/mazda/*']
+nissan:
+ - all: ['selfdrive/car/nissan/*']
+subaru:
+ - all: ['selfdrive/car/subaru/*']
+tesla:
+ - all: ['selfdrive/car/tesla/*']
+toyota:
+ - all: ['selfdrive/car/toyota/*']
+volkswagen:
+ - all: ['selfdrive/car/volkswagen/*']
+
+simulation:
+ - all: ['tools/sim/**']
+ui:
+ - all: ['selfdrive/ui/**']
+tools:
+ - all: ['tools/**']
+
+multilanguage:
+ - all: ['selfdrive/ui/translations/**']
+
diff --git a/.github/workflows/compile-openpilot/action.yaml b/.github/workflows/compile-openpilot/action.yaml
new file mode 100644
index 0000000000..8775c96262
--- /dev/null
+++ b/.github/workflows/compile-openpilot/action.yaml
@@ -0,0 +1,27 @@
+name: 'compile openpilot'
+
+inputs:
+ cache_key_prefix:
+ description: 'Prefix for caching key'
+ required: false
+ default: 'scons'
+
+runs:
+ using: "composite"
+ steps:
+ - shell: bash
+ name: Build openpilot with all flags
+ run: |
+ ${{ env.RUN }} "scons -j$(nproc)"
+ ${{ env.RUN }} "release/check-dirty.sh"
+ - shell: bash
+ name: Cleanup scons cache and rebuild
+ run: |
+ ${{ env.RUN }} "rm -rf /tmp/scons_cache/* && \
+ scons -j$(nproc) --cache-populate"
+ - name: Save scons cache
+ uses: actions/cache/save@v3
+ if: github.ref == 'refs/heads/master'
+ with:
+ path: .ci_cache/scons_cache
+ key: ${{ inputs.cache_key_prefix }}-${{ env.CACHE_COMMIT_DATE }}-${{ github.sha }}
diff --git a/.github/workflows/labeler.yaml b/.github/workflows/labeler.yaml
new file mode 100644
index 0000000000..7c91b1591c
--- /dev/null
+++ b/.github/workflows/labeler.yaml
@@ -0,0 +1,18 @@
+name: "Pull Request Labeler"
+on:
+ pull_request_target:
+
+jobs:
+ labeler:
+ permissions:
+ contents: read
+ pull-requests: write
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ submodules: false
+ - uses: actions/labeler@v4
+ with:
+ dot: true
+ configuration-path: .github/labeler.yaml
\ No newline at end of file
diff --git a/.github/workflows/selfdrive_tests.yaml b/.github/workflows/selfdrive_tests.yaml
index 3644062268..bce0d553ea 100644
--- a/.github/workflows/selfdrive_tests.yaml
+++ b/.github/workflows/selfdrive_tests.yaml
@@ -67,30 +67,22 @@ jobs:
cd $STRIPPED_DIR
${{ env.RUN }} "unset PYTHONWARNINGS && pre-commit run --all"
- build_all:
- name: build all
- runs-on: ubuntu-20.04
+ build:
+ strategy:
+ matrix:
+ arch: ${{ fromJson( (github.repository == 'commaai/openpilot') && '["x86_64", "aarch64"]' || '["x86_64"]' ) }}
+ runs-on: ${{ (matrix.arch == 'aarch64') && 'buildjet-2vcpu-ubuntu-2204-arm' || 'ubuntu-20.04' }}
steps:
- uses: actions/checkout@v3
with:
submodules: true
- uses: ./.github/workflows/setup-with-retry
- - name: Build openpilot with all flags
- timeout-minutes: ${{ ((steps.restore-scons-cache.outputs.cache-hit == 'true') && 12 || 30) }} # allow more time when we missed the scons cache
- run: |
- ${{ env.RUN }} "scons -j$(nproc)"
- ${{ env.RUN }} "release/check-dirty.sh"
- - name: Cleanup scons cache and rebuild
- timeout-minutes: ${{ ((steps.restore-scons-cache.outputs.cache-hit == 'true') && 2 || 30) }} # allow more time when we missed the scons cache
- run: |
- ${{ env.RUN }} "rm -rf /tmp/scons_cache/* && \
- scons -j$(nproc) --cache-populate"
- - name: Save scons cache
- uses: actions/cache/save@v3
- if: github.ref == 'refs/heads/master'
with:
- path: .ci_cache/scons_cache
- key: scons-${{ env.CACHE_COMMIT_DATE }}-${{ github.sha }}
+ cache_key_prefix: scons_${{ matrix.arch }}
+ - uses: ./.github/workflows/compile-openpilot
+ timeout-minutes: ${{ ((steps.restore-scons-cache.outputs.cache-hit == 'true') && 15 || 30) }} # allow more time when we missed the scons cache
+ with:
+ cache_key_prefix: scons_${{ matrix.arch }}
build_mac:
name: build macos
@@ -185,8 +177,10 @@ jobs:
sudo rm -rf /Applications/ArmGNUToolchain/*/*/.fseventsd
docker_push:
- name: docker push
- runs-on: ubuntu-20.04
+ strategy:
+ matrix:
+ arch: ${{ fromJson( (github.repository == 'commaai/openpilot') && '["x86_64", "aarch64"]' || '["x86_64"]' ) }}
+ runs-on: ${{ (matrix.arch == 'aarch64') && 'buildjet-2vcpu-ubuntu-2204-arm' || 'ubuntu-20.04' }}
if: github.ref == 'refs/heads/master' && github.event_name != 'pull_request' && github.repository == 'commaai/openpilot'
steps:
- uses: actions/checkout@v3
@@ -195,14 +189,34 @@ jobs:
- name: Setup to push to repo
run: |
echo "PUSH_IMAGE=true" >> "$GITHUB_ENV"
+ echo "TARGET_ARCHITECTURE=${{ matrix.arch }}" >> "$GITHUB_ENV"
$DOCKER_LOGIN
- uses: ./.github/workflows/setup-with-retry
with:
git-lfs: false
- name: Build and push CL Docker image
+ if: matrix.arch == 'x86_64'
run: |
+ unset TARGET_ARCHITECTURE
eval "$BUILD_CL"
+ docker_push_multiarch:
+ name: docker push multiarch tag
+ runs-on: ubuntu-20.04
+ if: github.ref == 'refs/heads/master' && github.event_name != 'pull_request' && github.repository == 'commaai/openpilot'
+ needs: [docker_push]
+ steps:
+ - uses: actions/checkout@v3
+ with:
+ submodules: false
+ - name: Setup docker
+ run: |
+ $DOCKER_LOGIN
+ - name: Merge x64 and arm64 tags
+ run: |
+ export PUSH_IMAGE=true
+ selfdrive/test/docker_tag_multiarch.sh base x86_64 aarch64
+
static_analysis:
name: static analysis
runs-on: ubuntu-20.04
diff --git a/.github/workflows/setup-with-retry/action.yaml b/.github/workflows/setup-with-retry/action.yaml
index 427b5a45ac..846d8bbaf0 100644
--- a/.github/workflows/setup-with-retry/action.yaml
+++ b/.github/workflows/setup-with-retry/action.yaml
@@ -5,6 +5,10 @@ inputs:
description: 'Whether or not to pull the git lfs'
required: false
default: 'true'
+ cache_key_prefix:
+ description: 'Prefix for caching key'
+ required: false
+ default: 'scons_x86_64'
env:
SLEEP_TIME: 30 # Time to sleep between retries
@@ -17,6 +21,7 @@ runs:
continue-on-error: true
with:
git_lfs: ${{ inputs.git_lfs }}
+ cache_key_prefix: ${{ inputs.cache_key_prefix }}
is_retried: true
- if: steps.setup1.outcome == 'failure'
shell: bash
@@ -27,6 +32,7 @@ runs:
continue-on-error: true
with:
git_lfs: ${{ inputs.git_lfs }}
+ cache_key_prefix: ${{ inputs.cache_key_prefix }}
is_retried: true
- if: steps.setup2.outcome == 'failure'
shell: bash
@@ -36,4 +42,5 @@ runs:
uses: ./.github/workflows/setup
with:
git_lfs: ${{ inputs.git_lfs }}
- is_retried: true
\ No newline at end of file
+ cache_key_prefix: ${{ inputs.cache_key_prefix }}
+ is_retried: true
diff --git a/.github/workflows/setup/action.yaml b/.github/workflows/setup/action.yaml
index 74b82f9158..eebc376346 100644
--- a/.github/workflows/setup/action.yaml
+++ b/.github/workflows/setup/action.yaml
@@ -5,6 +5,10 @@ inputs:
description: 'Whether or not to pull the git lfs'
required: false
default: 'true'
+ cache_key_prefix:
+ description: 'Prefix for caching key'
+ required: false
+ default: 'scons_x86_64'
is_retried:
description: 'A mock param that asserts that we use the setup-with-retry instead of this action directly'
required: false
@@ -35,10 +39,10 @@ runs:
uses: actions/cache/restore@v3
with:
path: .ci_cache/scons_cache
- key: scons-${{ env.CACHE_COMMIT_DATE }}-${{ github.sha }}
+ key: ${{ inputs.cache_key_prefix }}-${{ env.CACHE_COMMIT_DATE }}-${{ github.sha }}
restore-keys: |
- scons-${{ env.CACHE_COMMIT_DATE }}-
- scons-
+ ${{ inputs.cache_key_prefix }}-${{ env.CACHE_COMMIT_DATE }}-
+ ${{ inputs.cache_key_prefix }}-
# if we didn't get a cache hit, make the directory manually so it doesn't fail on future steps
- id: scons-cache-setup
shell: bash
diff --git a/Jenkinsfile b/Jenkinsfile
index 08dc33dd8c..176c82bcc9 100644
--- a/Jenkinsfile
+++ b/Jenkinsfile
@@ -1,4 +1,4 @@
-def phone(String ip, String step_label, String cmd) {
+def device(String ip, String step_label, String cmd) {
withCredentials([file(credentialsId: 'id_rsa', variable: 'key_file')]) {
def ssh_cmd = """
ssh -tt -o StrictHostKeyChecking=no -i ${key_file} 'comma@${ip}' /usr/bin/bash <<'END'
@@ -51,226 +51,186 @@ END"""
}
}
-def phone_steps(String device_type, steps) {
- lock(resource: "", label: device_type, inversePrecedence: true, variable: 'device_ip', quantity: 1) {
- timeout(time: 20, unit: 'MINUTES') {
- phone(device_ip, "git checkout", readFile("selfdrive/test/setup_device_ci.sh"),)
- steps.each { item ->
- phone(device_ip, item[0], item[1])
- }
+def deviceStage(String stageName, String deviceType, List env, def steps) {
+ stage(stageName) {
+ if (currentBuild.result != null) {
+ return
}
- }
-}
-pipeline {
- agent none
- environment {
- CI = "1"
- PYTHONWARNINGS = "error"
- TEST_DIR = "/data/openpilot"
- SOURCE_DIR = "/data/openpilot_source/"
- AZURE_TOKEN = credentials('azure_token')
- MAPBOX_TOKEN = credentials('mapbox_token')
- }
- options {
- timeout(time: 3, unit: 'HOURS')
- disableConcurrentBuilds(abortPrevious: env.BRANCH_NAME != 'master')
- }
+ def extra = env.collect { "export ${it}" }.join('\n');
- stages {
- stage('build release3-staging') {
- agent { docker { image 'ghcr.io/commaai/alpine-ssh'; args '--user=root' } }
- when {
- branch 'devel-staging'
- }
- steps {
- phone_steps("tici-needs-can", [
- ["build release3-staging & dashcam3-staging", "RELEASE_BRANCH=release3-staging DASHCAM_BRANCH=dashcam3-staging $SOURCE_DIR/release/build_release.sh"],
- ])
+ docker.image('ghcr.io/commaai/alpine-ssh').inside('--user=root') {
+ lock(resource: "", label: deviceType, inversePrecedence: true, variable: 'device_ip', quantity: 1) {
+ timeout(time: 20, unit: 'MINUTES') {
+ device(device_ip, "git checkout", extra + "\n" + readFile("selfdrive/test/setup_device_ci.sh"))
+ steps.each { item ->
+ device(device_ip, item[0], item[1])
+ }
+ }
}
}
+ }
+}
- stage('build nightly') {
- agent { docker { image 'ghcr.io/commaai/alpine-ssh'; args '--user=root' } }
- when {
- branch 'master-ci'
- }
- steps {
- phone_steps("tici-needs-can", [
- ["build nightly", "RELEASE_BRANCH=nightly $SOURCE_DIR/release/build_release.sh"],
- ])
- }
+def pcStage(String stageName, Closure body) {
+ node {
+ stage(stageName) {
+ if (currentBuild.result != null) {
+ return
}
- stage('openpilot tests') {
- when {
- not {
- anyOf {
- branch 'master-ci'; branch 'devel'; branch 'devel-staging';
- branch 'release3'; branch 'release3-staging'; branch 'dashcam3'; branch 'dashcam3-staging';
- branch 'testing-closet*'; branch 'hotfix-*'
- }
+ checkout scm
+
+ def dockerArgs = '--user=root -v /tmp/comma_download_cache:/tmp/comma_download_cache -v /tmp/scons_cache:/tmp/scons_cache';
+ docker.build("openpilot-base:build-${env.GIT_COMMIT}", "-f Dockerfile.openpilot_base .").inside(dockerArgs) {
+ timeout(time: 20, unit: 'MINUTES') {
+ try {
+ sh "git config --global --add safe.directory '*'"
+ sh "git submodule update --init --recursive"
+ sh "git lfs pull"
+ body()
+ } finally {
+ sh "rm -rf ${env.WORKSPACE}/* || true"
+ sh "rm -rf .* || true"
}
}
+ }
+ }
+ }
+}
- parallel {
-
- /*
- stage('simulator') {
- agent {
- dockerfile {
- filename 'Dockerfile.sim_nvidia'
- dir 'tools/sim'
- args '--user=root'
- }
- }
- steps {
- sh "git config --global --add safe.directory '*'"
- sh "git submodule update --init --recursive"
- sh "git lfs pull"
- lock(resource: "", label: "simulator", inversePrecedence: true, quantity: 1) {
- sh "${WORKSPACE}/tools/sim/build_container.sh"
- sh "DETACH=1 ${WORKSPACE}/tools/sim/start_carla.sh"
- sh "${WORKSPACE}/tools/sim/start_openpilot_docker.sh"
- }
- }
+def setupCredentials() {
+ withCredentials([
+ string(credentialsId: 'azure_token', variable: 'AZURE_TOKEN'),
+ string(credentialsId: 'mapbox_token', variable: 'MAPBOX_TOKEN')
+ ]) {
+ env.AZURE_TOKEN = "${AZURE_TOKEN}"
+ env.MAPBOX_TOKEN = "${MAPBOX_TOKEN}"
+ }
+}
- post {
- always {
- sh "docker kill carla_sim || true"
- sh "rm -rf ${WORKSPACE}/* || true"
- sh "rm -rf .* || true"
- }
- }
- }
- */
+node {
+ env.CI = "1"
+ env.PYTHONWARNINGS = "error"
+ env.TEST_DIR = "/data/openpilot"
+ env.SOURCE_DIR = "/data/openpilot_source/"
+ setupCredentials()
- stage('PC tests') {
- agent {
- dockerfile {
- filename 'Dockerfile.openpilot_base'
- args '--user=root -v /tmp/comma_download_cache:/tmp/comma_download_cache'
- }
- }
- steps {
- sh "git config --global --add safe.directory '*'"
- sh "git submodule update --init --depth=1 --recursive"
- sh "git lfs pull"
- // tests that our build system's dependencies are configured properly, needs a machine with lots of cores
- sh "scons --clean && scons --no-cache --random -j42"
- sh "INTERNAL_SEG_CNT=500 INTERNAL_SEG_LIST=selfdrive/car/tests/test_models_segs.txt FILEREADER_CACHE=1 \
- pytest -n42 --dist=loadscope selfdrive/car/tests/test_models.py"
- sh "MAX_EXAMPLES=100 pytest -n42 selfdrive/car/tests/test_car_interfaces.py"
- }
+ env.GIT_BRANCH = checkout(scm).GIT_BRANCH
+ env.GIT_COMMIT = checkout(scm).GIT_COMMIT
- post {
- always {
- sh "rm -rf ${WORKSPACE}/* || true"
- sh "rm -rf .* || true"
- }
- }
- }
+ def excludeBranches = ['master-ci', 'devel', 'devel-staging', 'release3', 'release3-staging',
+ 'dashcam3', 'dashcam3-staging', 'testing-closet*', 'hotfix-*']
+ def excludeRegex = excludeBranches.join('|').replaceAll('\\*', '.*')
- stage('tizi-tests') {
- agent { docker { image 'ghcr.io/commaai/alpine-ssh'; args '--user=root' } }
- steps {
- phone_steps("tizi", [
- ["build openpilot", "cd selfdrive/manager && ./build.py"],
- ["test boardd loopback", "SINGLE_PANDA=1 pytest selfdrive/boardd/tests/test_boardd_loopback.py"],
- ["test pandad", "pytest selfdrive/boardd/tests/test_pandad.py"],
- ["test sensord", "cd system/sensord/tests && pytest test_sensord.py"],
- ["test camerad", "pytest system/camerad/test/test_camerad.py"],
- ["test exposure", "pytest system/camerad/test/test_exposure.py"],
- ["test amp", "pytest system/hardware/tici/tests/test_amplifier.py"],
- ["test hw", "pytest system/hardware/tici/tests/test_hardware.py"],
- ["test rawgpsd", "pytest system/sensord/rawgps/test_rawgps.py"],
- ])
- }
- }
-
- stage('build') {
- agent { docker { image 'ghcr.io/commaai/alpine-ssh'; args '--user=root' } }
- environment {
- R3_PUSH = "${env.BRANCH_NAME == 'master' ? '1' : ' '}"
- }
- steps {
- phone_steps("tici-needs-can", [
- ["build master-ci", "cd $SOURCE_DIR/release && TARGET_DIR=$TEST_DIR ./build_devel.sh"],
- ["build openpilot", "cd selfdrive/manager && ./build.py"],
- ["check dirty", "release/check-dirty.sh"],
- ["onroad tests", "cd selfdrive/test/ && ./test_onroad.py"],
- ["time to onroad", "cd selfdrive/test/ && pytest test_time_to_onroad.py"],
- ])
- }
- }
-
- stage('loopback-tests') {
- agent { docker { image 'ghcr.io/commaai/alpine-ssh'; args '--user=root' } }
- steps {
- phone_steps("tici-loopback", [
- ["build openpilot", "cd selfdrive/manager && ./build.py"],
- ["test boardd loopback", "pytest selfdrive/boardd/tests/test_boardd_loopback.py"],
- ])
- }
- }
+ if (env.BRANCH_NAME != 'master') {
+ properties([
+ disableConcurrentBuilds(abortPrevious: true)
+ ])
+ }
- stage('HW + Unit Tests') {
- agent { docker { image 'ghcr.io/commaai/alpine-ssh'; args '--user=root' } }
- steps {
- phone_steps("tici-common", [
- ["build", "cd selfdrive/manager && ./build.py"],
- ["test pandad", "pytest selfdrive/boardd/tests/test_pandad.py"],
- ["test power draw", "./system/hardware/tici/tests/test_power_draw.py"],
- ["test encoder", "LD_LIBRARY_PATH=/usr/local/lib pytest system/loggerd/tests/test_encoder.py"],
- ["test pigeond", "pytest system/sensord/tests/test_pigeond.py"],
- ["test manager", "pytest selfdrive/manager/test/test_manager.py"],
- ["test nav", "pytest selfdrive/navd/tests/"],
- ])
- }
- }
+ try {
+ if (env.BRANCH_NAME == 'devel-staging') {
+ deviceStage("build release3-staging", "tici-needs-can", [], [
+ ["build release3-staging & dashcam3-staging", "RELEASE_BRANCH=release3-staging DASHCAM_BRANCH=dashcam3-staging $SOURCE_DIR/release/build_release.sh"],
+ ])
+ }
- stage('camerad') {
- agent { docker { image 'ghcr.io/commaai/alpine-ssh'; args '--user=root' } }
- steps {
- phone_steps("tici-ar0231", [
- ["build", "cd selfdrive/manager && ./build.py"],
- ["test camerad", "pytest system/camerad/test/test_camerad.py"],
- ["test exposure", "pytest system/camerad/test/test_exposure.py"],
- ])
- phone_steps("tici-ox03c10", [
- ["build", "cd selfdrive/manager && ./build.py"],
- ["test camerad", "pytest system/camerad/test/test_camerad.py"],
- ["test exposure", "pytest system/camerad/test/test_exposure.py"],
- ])
- }
- }
+ if (env.BRANCH_NAME == 'master-ci') {
+ deviceStage("build nightly", "tici-needs-can", [], [
+ ["build nightly", "RELEASE_BRANCH=nightly $SOURCE_DIR/release/build_release.sh"],
+ ])
+ }
- stage('sensord') {
- agent { docker { image 'ghcr.io/commaai/alpine-ssh'; args '--user=root' } }
- steps {
- phone_steps("tici-lsmc", [
- ["build", "cd selfdrive/manager && ./build.py"],
- ["test sensord", "cd system/sensord/tests && pytest test_sensord.py"],
- ])
- phone_steps("tici-bmx-lsm", [
- ["build", "cd selfdrive/manager && ./build.py"],
- ["test sensord", "cd system/sensord/tests && pytest test_sensord.py"],
- ])
- }
+ if (!env.BRANCH_NAME.matches(excludeRegex)) {
+ parallel (
+ // tici tests
+ 'onroad tests': {
+ deviceStage("onroad", "tici-needs-can", ["SKIP_COPY=1"], [
+ ["build master-ci", "cd $SOURCE_DIR/release && TARGET_DIR=$TEST_DIR ./build_devel.sh"],
+ ["build openpilot", "cd selfdrive/manager && ./build.py"],
+ ["check dirty", "release/check-dirty.sh"],
+ ["onroad tests", "cd selfdrive/test/ && ./test_onroad.py"],
+ ["time to onroad", "cd selfdrive/test/ && pytest test_time_to_onroad.py"],
+ ])
+ },
+ 'HW + Unit Tests': {
+ deviceStage("tici", "tici-common", ["UNSAFE=1"], [
+ ["build", "cd selfdrive/manager && ./build.py"],
+ ["test pandad", "pytest selfdrive/boardd/tests/test_pandad.py"],
+ ["test power draw", "./system/hardware/tici/tests/test_power_draw.py"],
+ ["test encoder", "LD_LIBRARY_PATH=/usr/local/lib pytest system/loggerd/tests/test_encoder.py"],
+ ["test pigeond", "pytest system/sensord/tests/test_pigeond.py"],
+ ["test manager", "pytest selfdrive/manager/test/test_manager.py"],
+ ["test nav", "pytest selfdrive/navd/tests/"],
+ ])
+ },
+ 'loopback': {
+ deviceStage("tici", "tici-loopback", ["UNSAFE=1"], [
+ ["build openpilot", "cd selfdrive/manager && ./build.py"],
+ ["test boardd loopback", "pytest selfdrive/boardd/tests/test_boardd_loopback.py"],
+ ])
+ },
+ 'camerad': {
+ deviceStage("AR0231", "tici-ar0231", ["UNSAFE=1"], [
+ ["build", "cd selfdrive/manager && ./build.py"],
+ ["test camerad", "pytest system/camerad/test/test_camerad.py"],
+ ["test exposure", "pytest system/camerad/test/test_exposure.py"],
+ ])
+ deviceStage("OX03C10", "tici-ox03c10", ["UNSAFE=1"], [
+ ["build", "cd selfdrive/manager && ./build.py"],
+ ["test camerad", "pytest system/camerad/test/test_camerad.py"],
+ ["test exposure", "pytest system/camerad/test/test_exposure.py"],
+ ])
+ },
+ 'sensord': {
+ deviceStage("LSM + MMC", "tici-lsmc", ["UNSAFE=1"], [
+ ["build", "cd selfdrive/manager && ./build.py"],
+ ["test sensord", "cd system/sensord/tests && pytest test_sensord.py"],
+ ])
+ deviceStage("BMX + LSM", "tici-bmx-lsm", ["UNSAFE=1"], [
+ ["build", "cd selfdrive/manager && ./build.py"],
+ ["test sensord", "cd system/sensord/tests && pytest test_sensord.py"],
+ ])
+ },
+ 'replay': {
+ deviceStage("tici", "tici-replay", ["UNSAFE=1"], [
+ ["build", "cd selfdrive/manager && ./build.py"],
+ ["model replay", "cd selfdrive/test/process_replay && ./model_replay.py"],
+ ])
+ },
+ 'tizi': {
+ deviceStage("tizi", "tizi", ["UNSAFE=1"], [
+ ["build openpilot", "cd selfdrive/manager && ./build.py"],
+ ["test boardd loopback", "SINGLE_PANDA=1 pytest selfdrive/boardd/tests/test_boardd_loopback.py"],
+ ["test pandad", "pytest selfdrive/boardd/tests/test_pandad.py"],
+ ["test amp", "pytest system/hardware/tici/tests/test_amplifier.py"],
+ ["test hw", "pytest system/hardware/tici/tests/test_hardware.py"],
+ ["test rawgpsd", "pytest system/sensord/rawgps/test_rawgps.py"],
+ ])
+ },
+
+ // *** PC tests ***
+ 'PC tests': {
+ pcStage("PC tests") {
+ // tests that our build system's dependencies are configured properly,
+ // needs a machine with lots of cores
+ sh label: "test multi-threaded build", script: "scons --no-cache --random -j42"
}
-
- stage('replay') {
- agent { docker { image 'ghcr.io/commaai/alpine-ssh'; args '--user=root' } }
- steps {
- phone_steps("tici-replay", [
- ["build", "cd selfdrive/manager && ./build.py"],
- ["model replay", "cd selfdrive/test/process_replay && ./model_replay.py"],
- ])
- }
+ },
+ 'car tests': {
+ pcStage("car tests") {
+ sh "scons -j30"
+ sh label: "test_models.py", script: "INTERNAL_SEG_CNT=250 INTERNAL_SEG_LIST=selfdrive/car/tests/test_models_segs.txt FILEREADER_CACHE=1 \
+ pytest -n42 --dist=loadscope selfdrive/car/tests/test_models.py"
+ sh label: "test_car_interfaces.py", script: "MAX_EXAMPLES=100 pytest -n42 selfdrive/car/tests/test_car_interfaces.py"
}
+ },
- }
+ )
}
-
+ } catch (Exception e) {
+ currentBuild.result = 'FAILED'
+ throw e
}
-}
+}
\ No newline at end of file
diff --git a/cereal b/cereal
index d469732b3b..7de568b659 160000
--- a/cereal
+++ b/cereal
@@ -1 +1 @@
-Subproject commit d469732b3b4c37aaa21fa37682a20f2aba9ab91c
+Subproject commit 7de568b65922b1b7a5cb9a9a391e3e03394500f7
diff --git a/common/prefix.py b/common/prefix.py
index c49cf603df..6b7e7e2fd7 100644
--- a/common/prefix.py
+++ b/common/prefix.py
@@ -39,3 +39,5 @@ class OpenpilotPrefix:
os.remove(symlink_path)
shutil.rmtree(self.msgq_path, ignore_errors=True)
shutil.rmtree(Paths.log_root(), ignore_errors=True)
+ shutil.rmtree(Paths.download_cache_root(), ignore_errors=True)
+ shutil.rmtree(Paths.comma_home(), ignore_errors=True)
diff --git a/common/tests/test_util.cc b/common/tests/test_util.cc
index 68fced19c2..de87fa3e06 100644
--- a/common/tests/test_util.cc
+++ b/common/tests/test_util.cc
@@ -38,7 +38,8 @@ TEST_CASE("util::read_file") {
std::string content = random_bytes(64 * 1024);
write(fd, content.c_str(), content.size());
std::string ret = util::read_file(filename);
- REQUIRE(ret == content);
+ bool equal = (ret == content);
+ REQUIRE(equal);
close(fd);
}
SECTION("read directory") {
@@ -108,7 +109,8 @@ TEST_CASE("util::safe_fwrite") {
REQUIRE(ret == 0);
ret = fclose(f);
REQUIRE(ret == 0);
- REQUIRE(dat == util::read_file(filename));
+ bool equal = (dat == util::read_file(filename));
+ REQUIRE(equal);
}
TEST_CASE("util::create_directories") {
diff --git a/docs/CARS.md b/docs/CARS.md
index 365da46dab..775418427d 100644
--- a/docs/CARS.md
+++ b/docs/CARS.md
@@ -88,7 +88,7 @@ A supported vehicle is one that just works when you install a comma device. All
|Hyundai|Ioniq Plug-in Hybrid 2019|Smart Cruise Control (SCC)|openpilot available[1](#footnotes)|0 mph|32 mph|[](##)|[](##)|Parts
- 1 Hyundai C connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Hyundai|Ioniq Plug-in Hybrid 2020-22|All|openpilot available[1](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai H connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Hyundai|Kona 2020|Smart Cruise Control (SCC)|openpilot available[1](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai B connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
-|Hyundai|Kona Electric 2018-21|Smart Cruise Control (SCC)|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai G connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
+|Hyundai|Kona Electric 2018-21|Smart Cruise Control (SCC)|openpilot available[1](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai G connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Hyundai|Kona Electric 2022-23|Smart Cruise Control (SCC)|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai O connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Hyundai|Kona Electric (with HDA II, Korea only) 2023[6](#footnotes)|Smart Cruise Control (SCC)|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai R connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here |
|
|Hyundai|Kona Hybrid 2020|Smart Cruise Control (SCC)|openpilot available[1](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai I connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here |
|
@@ -109,7 +109,7 @@ A supported vehicle is one that just works when you install a comma device. All
|Hyundai|Veloster 2019-20|Smart Cruise Control (SCC)|Stock|5 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai E connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Jeep|Grand Cherokee 2016-18|Adaptive Cruise Control (ACC)|Stock|0 mph|9 mph|[](##)|[](##)|Parts
- 1 FCA connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here |
|
|Jeep|Grand Cherokee 2019-21|Adaptive Cruise Control (ACC)|Stock|0 mph|39 mph|[](##)|[](##)|Parts
- 1 FCA connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here |
|
-|Kia|Carnival 2023[6](#footnotes)|Smart Cruise Control (SCC)|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai A connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
+|Kia|Carnival 2023-24[6](#footnotes)|Smart Cruise Control (SCC)|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai A connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Kia|Carnival (China only) 2023[6](#footnotes)|Smart Cruise Control (SCC)|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai K connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Kia|Ceed 2019|Smart Cruise Control (SCC)|Stock|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai E connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
|Kia|EV6 (Southeast Asia only) 2022-23[6](#footnotes)|All|openpilot available[1](#footnotes)|0 mph|0 mph|[](##)|[](##)|Parts
- 1 Hyundai P connector
- 1 RJ45 cable (7 ft)
- 1 comma power v2
- 1 comma three
- 1 harness box
- 1 mount
- 1 right angle OBD-C cable (1.5 ft)
Buy Here ||
diff --git a/panda b/panda
index e9a0987617..8e8aa5acf6 160000
--- a/panda
+++ b/panda
@@ -1 +1 @@
-Subproject commit e9a098761776896957fce61d632a221dcfb90ce6
+Subproject commit 8e8aa5acf6ab0e45d91e72beaab74e69810448cd
diff --git a/pyproject.toml b/pyproject.toml
index b4a795accc..5d4daadb15 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,6 +1,6 @@
[tool.pytest.ini_options]
minversion = "6.0"
-addopts = "--ignore=openpilot/ --ignore=cereal/ --ignore=opendbc/ --ignore=panda/ --ignore=rednose_repo/ --ignore=tinygrad_repo/ --ignore=laika_repo/ -Werror --strict-config --strict-markers"
+addopts = "--ignore=openpilot/ --ignore=cereal/ --ignore=opendbc/ --ignore=panda/ --ignore=rednose_repo/ --ignore=tinygrad_repo/ --ignore=laika_repo/ -Werror --strict-config --strict-markers --durations=10"
python_files = "test_*.py"
#timeout = "30" # you get this long by default
markers = [
diff --git a/selfdrive/car/hyundai/tests/test_hyundai.py b/selfdrive/car/hyundai/tests/test_hyundai.py
index 39a9aaf627..11268913aa 100755
--- a/selfdrive/car/hyundai/tests/test_hyundai.py
+++ b/selfdrive/car/hyundai/tests/test_hyundai.py
@@ -1,4 +1,5 @@
#!/usr/bin/env python3
+from hypothesis import settings, given, strategies as st
import unittest
from cereal import car
@@ -66,6 +67,14 @@ class TestHyundaiFingerprint(unittest.TestCase):
part = code.split(b"-")[1]
self.assertFalse(part.startswith(b'CW'), "Car has bad part number")
+ @settings(max_examples=100)
+ @given(data=st.data())
+ def test_platform_codes_fuzzy_fw(self, data):
+ """Ensure function doesn't raise an exception"""
+ fw_strategy = st.lists(st.binary())
+ fws = data.draw(fw_strategy)
+ get_platform_codes(fws)
+
# Tests for platform codes, part numbers, and FW dates which Hyundai will use to fuzzy
# fingerprint in the absence of full FW matches:
def test_platform_code_ecus_available(self):
diff --git a/selfdrive/car/hyundai/values.py b/selfdrive/car/hyundai/values.py
index d48d7f5e48..2b8bb763a2 100644
--- a/selfdrive/car/hyundai/values.py
+++ b/selfdrive/car/hyundai/values.py
@@ -260,7 +260,7 @@ CAR_INFO: Dict[str, Optional[Union[HyundaiCarInfo, List[HyundaiCarInfo]]]] = {
HyundaiCarInfo("Kia EV6 (with HDA II) 2022-23", "Highway Driving Assist II", car_parts=CarParts.common([CarHarness.hyundai_p]))
],
CAR.KIA_CARNIVAL_4TH_GEN: [
- HyundaiCarInfo("Kia Carnival 2023", car_parts=CarParts.common([CarHarness.hyundai_a])),
+ HyundaiCarInfo("Kia Carnival 2023-24", car_parts=CarParts.common([CarHarness.hyundai_a])),
HyundaiCarInfo("Kia Carnival (China only) 2023", car_parts=CarParts.common([CarHarness.hyundai_k]))
],
@@ -1924,6 +1924,7 @@ FW_VERSIONS = {
(Ecu.fwdCamera, 0x7c4, None): [
b'\xf1\x00KA4 MFC AT USA LHD 1.00 1.06 99210-R0000 220221',
b'\xf1\x00KA4CMFC AT CHN LHD 1.00 1.01 99211-I4000 210525',
+ b'\xf1\x00KA4 MFC AT USA LHD 1.00 1.00 99210-R0100 230105',
],
(Ecu.fwdRadar, 0x7d0, None): [
b'\xf1\x00KA4_ SCC FHCUP 1.00 1.03 99110-R0000 ',
@@ -1979,7 +1980,7 @@ EV_CAR = {CAR.IONIQ_EV_2020, CAR.IONIQ_EV_LTD, CAR.KONA_EV, CAR.KIA_NIRO_EV, CAR
CAR.KIA_EV6, CAR.IONIQ_5, CAR.IONIQ_6, CAR.GENESIS_GV60_EV_1ST_GEN, CAR.KONA_EV_2ND_GEN}
# these cars require a special panda safety mode due to missing counters and checksums in the messages
-LEGACY_SAFETY_MODE_CAR = {CAR.HYUNDAI_GENESIS, CAR.IONIQ_EV_LTD, CAR.KONA_EV, CAR.KIA_OPTIMA_G4,
+LEGACY_SAFETY_MODE_CAR = {CAR.HYUNDAI_GENESIS, CAR.IONIQ_EV_LTD, CAR.KIA_OPTIMA_G4,
CAR.VELOSTER, CAR.GENESIS_G70, CAR.GENESIS_G80, CAR.KIA_CEED, CAR.ELANTRA, CAR.IONIQ_HEV_2022,
CAR.KIA_OPTIMA_H}
diff --git a/selfdrive/car/tests/routes.py b/selfdrive/car/tests/routes.py
index 8299d15d52..4d99f594db 100755
--- a/selfdrive/car/tests/routes.py
+++ b/selfdrive/car/tests/routes.py
@@ -134,6 +134,7 @@ routes = [
CarTestRoute("ab59fe909f626921|2021-10-18--18-34-28", HYUNDAI.IONIQ_HEV_2022),
CarTestRoute("22d955b2cd499c22|2020-08-10--19-58-21", HYUNDAI.KONA),
CarTestRoute("efc48acf44b1e64d|2021-05-28--21-05-04", HYUNDAI.KONA_EV),
+ CarTestRoute("f90d3cd06caeb6fa|2023-09-06--17-15-47", HYUNDAI.KONA_EV), # openpilot longitudinal enabled
CarTestRoute("ff973b941a69366f|2022-07-28--22-01-19", HYUNDAI.KONA_EV_2022, segment=11),
CarTestRoute("1618132d68afc876|2023-08-27--09-32-14", HYUNDAI.KONA_EV_2ND_GEN, segment=13),
CarTestRoute("49f3c13141b6bc87|2021-07-28--08-05-13", HYUNDAI.KONA_HEV),
diff --git a/selfdrive/car/tests/test_car_interfaces.py b/selfdrive/car/tests/test_car_interfaces.py
index 0c970f5c75..787b5d6316 100755
--- a/selfdrive/car/tests/test_car_interfaces.py
+++ b/selfdrive/car/tests/test_car_interfaces.py
@@ -87,9 +87,6 @@ class TestCarInterfaces(unittest.TestCase):
self.assertTrue(not math.isnan(tune.torque.kf) and tune.torque.kf > 0)
self.assertTrue(not math.isnan(tune.torque.friction) and tune.torque.friction > 0)
- elif tune.which() == 'indi':
- self.assertTrue(len(tune.indi.outerLoopGainV))
-
cc_msg = FuzzyGenerator.get_random_msg(data.draw, car.CarControl, real_floats=True)
# Run car interface
now_nanos = 0
diff --git a/selfdrive/car/tests/test_models.py b/selfdrive/car/tests/test_models.py
index 6cebdde77b..1b9e39f2f1 100755
--- a/selfdrive/car/tests/test_models.py
+++ b/selfdrive/car/tests/test_models.py
@@ -187,8 +187,6 @@ class TestCarModelBase(unittest.TestCase):
self.assertTrue(len(self.CP.lateralTuning.pid.kpV))
elif tuning == 'torque':
self.assertTrue(self.CP.lateralTuning.torque.kf > 0)
- elif tuning == 'indi':
- self.assertTrue(len(self.CP.lateralTuning.indi.outerLoopGainV))
else:
raise Exception("unknown tuning")
diff --git a/selfdrive/car/toyota/tests/test_toyota.py b/selfdrive/car/toyota/tests/test_toyota.py
index f51e2e9ae5..d494a7129b 100755
--- a/selfdrive/car/toyota/tests/test_toyota.py
+++ b/selfdrive/car/toyota/tests/test_toyota.py
@@ -10,8 +10,11 @@ from selfdrive.car.fw_versions import build_fw_dict
from selfdrive.car.toyota.values import TSS2_CAR, ANGLE_CONTROL_CAR, FW_VERSIONS, FW_QUERY_CONFIG, EV_HYBRID_CAR, \
LONG_FW_PATTERN, FW_LEN_CODE, get_platform_codes
from openpilot.selfdrive.car.toyota.values import CAR, DBC, TSS2_CAR, ANGLE_CONTROL_CAR, RADAR_ACC_CAR, FW_VERSIONS
+from openpilot.selfdrive.car.toyota.values import CAR, DBC, TSS2_CAR, ANGLE_CONTROL_CAR, RADAR_ACC_CAR, FW_VERSIONS, \
+ get_platform_codes
Ecu = car.CarParams.Ecu
+ECU_NAME = {v: k for k, v in Ecu.schema.enumerants.items()}
ECU_NAME = {v: k for k, v in Ecu.schema.enumerants.items()}
@@ -195,6 +198,21 @@ class TestToyotaFingerprint(unittest.TestCase):
self.assertEqual(platforms_with_shared_codes, excluded_platforms, (len(platforms_with_shared_codes), len(FW_VERSIONS)))
+ @settings(max_examples=100)
+ @given(data=st.data())
+ def test_platform_codes_fuzzy_fw(self, data):
+ fw_strategy = st.lists(st.binary())
+ fws = data.draw(fw_strategy)
+ get_platform_codes(fws)
+
+ def test_fw_pattern(self):
+ """Asserts all ECUs can be parsed"""
+ for ecus in FW_VERSIONS.values():
+ for fws in ecus.values():
+ for fw in fws:
+ ret = get_platform_codes([fw])
+ self.assertTrue(len(ret))
+
if __name__ == "__main__":
unittest.main()
diff --git a/selfdrive/car/toyota/values.py b/selfdrive/car/toyota/values.py
index ecd125cda8..09d3c69c07 100644
--- a/selfdrive/car/toyota/values.py
+++ b/selfdrive/car/toyota/values.py
@@ -358,11 +358,13 @@ SHORT_FW_PATTERN = re.compile(b'(?P[A-Z0-9]{2})(?P[A-Z0
MEDIUM_FW_PATTERN = re.compile(b'(?P[A-Z0-9]{5})(?P[A-Z0-9]{2})(?P[A-Z0-9]{1})(?P[A-Z0-9]{2})')
LONG_FW_PATTERN = re.compile(b'(?P[A-Z0-9]{5})(?P[A-Z0-9]{2})(?P[A-Z0-9]{2})(?P[A-Z0-9]{3})')
FW_LEN_CODE = re.compile(b'^[\x01-\x05]') # 5 chunks max. highest seen is 3 chunks, 16 bytes each
+FW_CHUNK_LEN = 16
# List of ECUs expected to have platform codes
# TODO: use hybrid ECU, splits many similar ICE and hybrid variants
PLATFORM_CODE_ECUS = [Ecu.abs, Ecu.engine, Ecu.eps, Ecu.dsu, Ecu.fwdCamera, Ecu.fwdRadar]
+
# Some ECUs that use KWP2000 have their FW versions on non-standard data identifiers.
# Toyota diagnostic software first gets the supported data ids, then queries them one by one.
# For example, sends: 0x1a8800, receives: 0x1a8800010203, queries: 0x1a8801, 0x1a8802, 0x1a8803
diff --git a/selfdrive/controls/controlsd.py b/selfdrive/controls/controlsd.py
index bb135071f2..6cfeabefd4 100755
--- a/selfdrive/controls/controlsd.py
+++ b/selfdrive/controls/controlsd.py
@@ -807,8 +807,6 @@ class Controls:
controlsState.lateralControlState.pidState = lac_log
elif lat_tuning == 'torque':
controlsState.lateralControlState.torqueState = lac_log
- elif lat_tuning == 'indi':
- controlsState.lateralControlState.indiState = lac_log
self.pm.send('controlsState', dat)
diff --git a/selfdrive/modeld/SConscript b/selfdrive/modeld/SConscript
index 428fe7d0bf..63f3b2b97a 100644
--- a/selfdrive/modeld/SConscript
+++ b/selfdrive/modeld/SConscript
@@ -33,7 +33,7 @@ else:
libs += ['OpenCL']
# Set path definitions
-for pathdef, fn in {'TRANSFORM': 'transforms/transform.cl', 'LOADYUV': 'transforms/loadyuv.cl', 'ONNXRUNNER': 'runners/onnx_runner.py'}.items():
+for pathdef, fn in {'TRANSFORM': 'transforms/transform.cl', 'LOADYUV': 'transforms/loadyuv.cl'}.items():
for xenv in (lenv, lenvCython):
xenv['CXXFLAGS'].append(f'-D{pathdef}_PATH=\\"{File(fn).abspath}\\"')
@@ -43,13 +43,11 @@ snpe_rpath_pc = f"{Dir('#').abspath}/third_party/snpe/x86_64-linux-clang"
snpe_rpath = lenvCython['RPATH'] + [snpe_rpath_qcom if arch == "larch64" else snpe_rpath_pc]
cython_libs = envCython["LIBS"] + libs
-onnxmodel_lib = lenv.Library('onnxmodel', ['runners/onnxmodel.cc'])
snpemodel_lib = lenv.Library('snpemodel', ['runners/snpemodel.cc'])
commonmodel_lib = lenv.Library('commonmodel', common_src)
driving_lib = lenv.Library('driving', ['models/driving.cc'])
lenvCython.Program('runners/runmodel_pyx.so', 'runners/runmodel_pyx.pyx', LIBS=cython_libs, FRAMEWORKS=frameworks)
-lenvCython.Program('runners/onnxmodel_pyx.so', 'runners/onnxmodel_pyx.pyx', LIBS=[onnxmodel_lib, *cython_libs], FRAMEWORKS=frameworks)
lenvCython.Program('runners/snpemodel_pyx.so', 'runners/snpemodel_pyx.pyx', LIBS=[snpemodel_lib, snpe_lib, *cython_libs], FRAMEWORKS=frameworks, RPATH=snpe_rpath)
lenvCython.Program('models/commonmodel_pyx.so', 'models/commonmodel_pyx.pyx', LIBS=[commonmodel_lib, *cython_libs], FRAMEWORKS=frameworks)
lenvCython.Program('models/driving_pyx.so', 'models/driving_pyx.pyx', LIBS=[driving_lib, commonmodel_lib, *cython_libs], FRAMEWORKS=frameworks)
diff --git a/selfdrive/modeld/dmonitoringmodeld.py b/selfdrive/modeld/dmonitoringmodeld.py
index 0f9669258c..53c0af0ff3 100755
--- a/selfdrive/modeld/dmonitoringmodeld.py
+++ b/selfdrive/modeld/dmonitoringmodeld.py
@@ -61,11 +61,11 @@ class ModelState:
assert ctypes.sizeof(DMonitoringModelResult) == OUTPUT_SIZE * ctypes.sizeof(ctypes.c_float)
self.output = np.zeros(OUTPUT_SIZE, dtype=np.float32)
self.inputs = {
- 'input_imgs': np.zeros(MODEL_HEIGHT * MODEL_WIDTH, dtype=np.uint8),
+ 'input_img': np.zeros(MODEL_HEIGHT * MODEL_WIDTH, dtype=np.uint8),
'calib': np.zeros(CALIB_LEN, dtype=np.float32)}
self.model = ModelRunner(MODEL_PATHS, self.output, Runtime.DSP, True, None)
- self.model.addInput("input_imgs", None)
+ self.model.addInput("input_img", None)
self.model.addInput("calib", self.inputs['calib'])
def run(self, buf:VisionBuf, calib:np.ndarray) -> Tuple[np.ndarray, float]:
@@ -74,11 +74,11 @@ class ModelState:
v_offset = buf.height - MODEL_HEIGHT
h_offset = (buf.width - MODEL_WIDTH) // 2
buf_data = buf.data.reshape(-1, buf.stride)
- input_data = self.inputs['input_imgs'].reshape(MODEL_HEIGHT, MODEL_WIDTH)
+ input_data = self.inputs['input_img'].reshape(MODEL_HEIGHT, MODEL_WIDTH)
input_data[:] = buf_data[v_offset:v_offset+MODEL_HEIGHT, h_offset:h_offset+MODEL_WIDTH]
t1 = time.perf_counter()
- self.model.setInputBuffer("input_imgs", self.inputs['input_imgs'].view(np.float32))
+ self.model.setInputBuffer("input_img", self.inputs['input_img'].view(np.float32))
self.model.execute()
t2 = time.perf_counter()
return self.output, t2 - t1
diff --git a/selfdrive/modeld/modeld.py b/selfdrive/modeld/modeld.py
index ece706cfd9..3c416ce939 100755
--- a/selfdrive/modeld/modeld.py
+++ b/selfdrive/modeld/modeld.py
@@ -46,11 +46,11 @@ class ModelState:
self.prev_desire = np.zeros(DESIRE_LEN, dtype=np.float32)
self.output = np.zeros(NET_OUTPUT_SIZE, dtype=np.float32)
self.inputs = {
- 'desire_pulse': np.zeros(DESIRE_LEN * (HISTORY_BUFFER_LEN+1), dtype=np.float32),
+ 'desire': np.zeros(DESIRE_LEN * (HISTORY_BUFFER_LEN+1), dtype=np.float32),
'traffic_convention': np.zeros(TRAFFIC_CONVENTION_LEN, dtype=np.float32),
'nav_features': np.zeros(NAV_FEATURE_LEN, dtype=np.float32),
'nav_instructions': np.zeros(NAV_INSTRUCTION_LEN, dtype=np.float32),
- 'feature_buffer': np.zeros(HISTORY_BUFFER_LEN * FEATURE_LEN, dtype=np.float32),
+ 'features_buffer': np.zeros(HISTORY_BUFFER_LEN * FEATURE_LEN, dtype=np.float32),
}
self.model = ModelRunner(MODEL_PATHS, self.output, Runtime.GPU, False, context)
@@ -62,10 +62,10 @@ class ModelState:
def run(self, buf: VisionBuf, wbuf: VisionBuf, transform: np.ndarray, transform_wide: np.ndarray,
inputs: Dict[str, np.ndarray], prepare_only: bool) -> Optional[np.ndarray]:
# Model decides when action is completed, so desire input is just a pulse triggered on rising edge
- inputs['desire_pulse'][0] = 0
- self.inputs['desire_pulse'][:-DESIRE_LEN] = self.inputs['desire_pulse'][DESIRE_LEN:]
- self.inputs['desire_pulse'][-DESIRE_LEN:] = np.where(inputs['desire_pulse'] - self.prev_desire > .99, inputs['desire_pulse'], 0)
- self.prev_desire[:] = inputs['desire_pulse']
+ inputs['desire'][0] = 0
+ self.inputs['desire'][:-DESIRE_LEN] = self.inputs['desire'][DESIRE_LEN:]
+ self.inputs['desire'][-DESIRE_LEN:] = np.where(inputs['desire'] - self.prev_desire > .99, inputs['desire'], 0)
+ self.prev_desire[:] = inputs['desire']
self.inputs['traffic_convention'][:] = inputs['traffic_convention']
self.inputs['nav_features'][:] = inputs['nav_features']
@@ -81,8 +81,8 @@ class ModelState:
return None
self.model.execute()
- self.inputs['feature_buffer'][:-FEATURE_LEN] = self.inputs['feature_buffer'][FEATURE_LEN:]
- self.inputs['feature_buffer'][-FEATURE_LEN:] = self.output[OUTPUT_SIZE:OUTPUT_SIZE+FEATURE_LEN]
+ self.inputs['features_buffer'][:-FEATURE_LEN] = self.inputs['features_buffer'][FEATURE_LEN:]
+ self.inputs['features_buffer'][-FEATURE_LEN:] = self.output[OUTPUT_SIZE:OUTPUT_SIZE+FEATURE_LEN]
return self.output
@@ -232,7 +232,7 @@ def main():
cloudlog.error(f"skipping model eval. Dropped {vipc_dropped_frames} frames")
inputs:Dict[str, np.ndarray] = {
- 'desire_pulse': vec_desire,
+ 'desire': vec_desire,
'traffic_convention': traffic_convention,
'driving_style': driving_style,
'nav_features': nav_features,
diff --git a/selfdrive/modeld/navmodeld.py b/selfdrive/modeld/navmodeld.py
index 54f85f9ae8..f5dc51ed20 100755
--- a/selfdrive/modeld/navmodeld.py
+++ b/selfdrive/modeld/navmodeld.py
@@ -48,15 +48,15 @@ class ModelState:
def __init__(self):
assert ctypes.sizeof(NavModelResult) == NAV_OUTPUT_SIZE * ctypes.sizeof(ctypes.c_float)
self.output = np.zeros(NAV_OUTPUT_SIZE, dtype=np.float32)
- self.inputs = {'map': np.zeros(NAV_INPUT_SIZE, dtype=np.uint8)}
+ self.inputs = {'input_img': np.zeros(NAV_INPUT_SIZE, dtype=np.uint8)}
self.model = ModelRunner(MODEL_PATHS, self.output, Runtime.DSP, True, None)
- self.model.addInput("map", None)
+ self.model.addInput("input_img", None)
def run(self, buf:np.ndarray) -> Tuple[np.ndarray, float]:
- self.inputs['map'][:] = buf
+ self.inputs['input_img'][:] = buf
t1 = time.perf_counter()
- self.model.setInputBuffer("map", self.inputs['map'].view(np.float32))
+ self.model.setInputBuffer("input_img", self.inputs['input_img'].view(np.float32))
self.model.execute()
t2 = time.perf_counter()
return self.output, t2 - t1
diff --git a/selfdrive/modeld/runners/__init__.py b/selfdrive/modeld/runners/__init__.py
index 9193d63e2b..4c29bf3f1c 100644
--- a/selfdrive/modeld/runners/__init__.py
+++ b/selfdrive/modeld/runners/__init__.py
@@ -19,7 +19,7 @@ class ModelRunner(RunModel):
from openpilot.selfdrive.modeld.runners.snpemodel_pyx import SNPEModel as Runner
runner_type = ModelRunner.SNPE
elif ModelRunner.ONNX in paths:
- from openpilot.selfdrive.modeld.runners.onnxmodel_pyx import ONNXModel as Runner
+ from openpilot.selfdrive.modeld.runners.onnxmodel import ONNXModel as Runner
runner_type = ModelRunner.ONNX
else:
raise Exception("Couldn't select a model runner, make sure to pass at least one valid model path")
diff --git a/selfdrive/modeld/runners/onnx_runner.py b/selfdrive/modeld/runners/onnx_runner.py
deleted file mode 100755
index 9e53874578..0000000000
--- a/selfdrive/modeld/runners/onnx_runner.py
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env python3
-
-import os
-import sys
-import numpy as np
-from typing import Tuple, Dict, Union, Any
-
-os.environ["OMP_NUM_THREADS"] = "4"
-os.environ["OMP_WAIT_POLICY"] = "PASSIVE"
-
-import onnxruntime as ort
-
-ORT_TYPES_TO_NP_TYPES = {'tensor(float16)': np.float16, 'tensor(float)': np.float32, 'tensor(uint8)': np.uint8}
-
-def read(sz, tf8=False):
- dd = []
- gt = 0
- szof = 1 if tf8 else 4
- while gt < sz * szof:
- st = os.read(0, sz * szof - gt)
- assert(len(st) > 0)
- dd.append(st)
- gt += len(st)
- r = np.frombuffer(b''.join(dd), dtype=np.uint8 if tf8 else np.float32)
- if tf8:
- r = r / 255.
- return r
-
-def write(d):
- os.write(1, d.tobytes())
-
-def run_loop(m, tf8_input=False):
- ishapes = [[1]+ii.shape[1:] for ii in m.get_inputs()]
- keys = [x.name for x in m.get_inputs()]
- itypes = [ORT_TYPES_TO_NP_TYPES[x.type] for x in m.get_inputs()]
-
- # run once to initialize CUDA provider
- if "CUDAExecutionProvider" in m.get_providers():
- m.run(None, dict(zip(keys, [np.zeros(shp, dtype=itp) for shp, itp in zip(ishapes, itypes, strict=True)], strict=True)))
-
- print("ready to run onnx model", keys, ishapes, file=sys.stderr)
- while 1:
- inputs = []
- for k, shp, itp in zip(keys, ishapes, itypes, strict=True):
- ts = np.product(shp)
- #print("reshaping %s with offset %d" % (str(shp), offset), file=sys.stderr)
- inputs.append(read(ts, (k=='input_img' and tf8_input)).reshape(shp).astype(itp))
- ret = m.run(None, dict(zip(keys, inputs, strict=True)))
- #print(ret, file=sys.stderr)
- for r in ret:
- write(r.astype(np.float32))
-
-
-if __name__ == "__main__":
- print(sys.argv, file=sys.stderr)
- print("Onnx available providers: ", ort.get_available_providers(), file=sys.stderr)
- options = ort.SessionOptions()
- options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_DISABLE_ALL
-
- provider: Union[str, Tuple[str, Dict[Any, Any]]]
- if 'OpenVINOExecutionProvider' in ort.get_available_providers() and 'ONNXCPU' not in os.environ:
- provider = 'OpenVINOExecutionProvider'
- elif 'CUDAExecutionProvider' in ort.get_available_providers() and 'ONNXCPU' not in os.environ:
- options.intra_op_num_threads = 2
- provider = ('CUDAExecutionProvider', {'cudnn_conv_algo_search': 'DEFAULT'})
- else:
- options.intra_op_num_threads = 2
- options.execution_mode = ort.ExecutionMode.ORT_SEQUENTIAL
- options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_ALL
- provider = 'CPUExecutionProvider'
-
- try:
- print("Onnx selected provider: ", [provider], file=sys.stderr)
- ort_session = ort.InferenceSession(sys.argv[1], options, providers=[provider])
- print("Onnx using ", ort_session.get_providers(), file=sys.stderr)
- run_loop(ort_session, tf8_input=("--use_tf8" in sys.argv))
- except KeyboardInterrupt:
- pass
diff --git a/selfdrive/modeld/runners/onnxmodel.cc b/selfdrive/modeld/runners/onnxmodel.cc
deleted file mode 100644
index ebe6ad847c..0000000000
--- a/selfdrive/modeld/runners/onnxmodel.cc
+++ /dev/null
@@ -1,88 +0,0 @@
-#include "selfdrive/modeld/runners/onnxmodel.h"
-
-#include
-#include
-#include
-#include
-#include
-
-#include "common/util.h"
-
-ONNXModel::ONNXModel(const std::string path, float *_output, size_t _output_size, int runtime, bool _use_tf8, cl_context context) {
- LOGD("loading model %s", path.c_str());
-
- output = _output;
- output_size = _output_size;
- use_tf8 = _use_tf8;
-
- int err = pipe(pipein);
- assert(err == 0);
- err = pipe(pipeout);
- assert(err == 0);
-
- std::string onnx_runner = ONNXRUNNER_PATH;
- std::string tf8_arg = use_tf8 ? "--use_tf8" : "";
-
- proc_pid = fork();
- if (proc_pid == 0) {
- LOGD("spawning onnx process %s", onnx_runner.c_str());
- char *argv[] = {(char*)onnx_runner.c_str(), (char*)path.c_str(), (char*)tf8_arg.c_str(), nullptr};
- dup2(pipein[0], 0);
- dup2(pipeout[1], 1);
- close(pipein[0]);
- close(pipein[1]);
- close(pipeout[0]);
- close(pipeout[1]);
- execvp(onnx_runner.c_str(), argv);
- exit(1); // exit if the exec fails
- }
-
- // parent
- close(pipein[0]);
- close(pipeout[1]);
-}
-
-ONNXModel::~ONNXModel() {
- close(pipein[1]);
- close(pipeout[0]);
- kill(proc_pid, SIGTERM);
-}
-
-void ONNXModel::pwrite(float *buf, int size) {
- char *cbuf = (char *)buf;
- int tw = size*sizeof(float);
- while (tw > 0) {
- int err = write(pipein[1], cbuf, tw);
- //printf("host write %d\n", err);
- assert(err >= 0);
- cbuf += err;
- tw -= err;
- }
- LOGD("host write of size %d done", size);
-}
-
-void ONNXModel::pread(float *buf, int size) {
- char *cbuf = (char *)buf;
- int tr = size*sizeof(float);
- struct pollfd fds[1];
- fds[0].fd = pipeout[0];
- fds[0].events = POLLIN;
- while (tr > 0) {
- int err;
- err = poll(fds, 1, 10000); // 10 second timeout
- assert(err == 1 || (err == -1 && errno == EINTR));
- LOGD("host read remaining %d/%lu poll %d", tr, size*sizeof(float), err);
- err = read(pipeout[0], cbuf, tr);
- assert(err > 0 || (err == 0 && errno == EINTR));
- cbuf += err;
- tr -= err;
- }
- LOGD("host read done");
-}
-
-void ONNXModel::execute() {
- for (auto &input : inputs) {
- pwrite(input->buffer, input->size);
- }
- pread(output, output_size);
-}
diff --git a/selfdrive/modeld/runners/onnxmodel.h b/selfdrive/modeld/runners/onnxmodel.h
deleted file mode 100644
index f9bae1a0a9..0000000000
--- a/selfdrive/modeld/runners/onnxmodel.h
+++ /dev/null
@@ -1,23 +0,0 @@
-#pragma once
-
-#include
-
-#include "selfdrive/modeld/runners/runmodel.h"
-
-class ONNXModel : public RunModel {
-public:
- ONNXModel(const std::string path, float *output, size_t output_size, int runtime, bool _use_tf8 = false, cl_context context = NULL);
- ~ONNXModel();
- void execute();
-private:
- int proc_pid;
- float *output;
- size_t output_size;
- bool use_tf8;
-
- // pipe to communicate to onnx_runner subprocess
- void pread(float *buf, int size);
- void pwrite(float *buf, int size);
- int pipein[2];
- int pipeout[2];
-};
diff --git a/selfdrive/modeld/runners/onnxmodel.pxd b/selfdrive/modeld/runners/onnxmodel.pxd
deleted file mode 100644
index 165f043da8..0000000000
--- a/selfdrive/modeld/runners/onnxmodel.pxd
+++ /dev/null
@@ -1,9 +0,0 @@
-# distutils: language = c++
-
-from libcpp.string cimport string
-
-from cereal.visionipc.visionipc cimport cl_context
-
-cdef extern from "selfdrive/modeld/runners/onnxmodel.h":
- cdef cppclass ONNXModel:
- ONNXModel(string, float*, size_t, int, bool, cl_context)
diff --git a/selfdrive/modeld/runners/onnxmodel.py b/selfdrive/modeld/runners/onnxmodel.py
new file mode 100644
index 0000000000..3c20a39760
--- /dev/null
+++ b/selfdrive/modeld/runners/onnxmodel.py
@@ -0,0 +1,70 @@
+import os
+import sys
+import numpy as np
+from typing import Tuple, Dict, Union, Any
+
+from openpilot.selfdrive.modeld.runners.runmodel_pyx import RunModel
+
+ORT_TYPES_TO_NP_TYPES = {'tensor(float16)': np.float16, 'tensor(float)': np.float32, 'tensor(uint8)': np.uint8}
+
+def create_ort_session(path):
+ os.environ["OMP_NUM_THREADS"] = "4"
+ os.environ["OMP_WAIT_POLICY"] = "PASSIVE"
+
+ import onnxruntime as ort
+ print("Onnx available providers: ", ort.get_available_providers(), file=sys.stderr)
+ options = ort.SessionOptions()
+ options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_DISABLE_ALL
+
+ provider: Union[str, Tuple[str, Dict[Any, Any]]]
+ if 'OpenVINOExecutionProvider' in ort.get_available_providers() and 'ONNXCPU' not in os.environ:
+ provider = 'OpenVINOExecutionProvider'
+ elif 'CUDAExecutionProvider' in ort.get_available_providers() and 'ONNXCPU' not in os.environ:
+ options.intra_op_num_threads = 2
+ provider = ('CUDAExecutionProvider', {'cudnn_conv_algo_search': 'DEFAULT'})
+ else:
+ options.intra_op_num_threads = 2
+ options.execution_mode = ort.ExecutionMode.ORT_SEQUENTIAL
+ options.graph_optimization_level = ort.GraphOptimizationLevel.ORT_ENABLE_ALL
+ provider = 'CPUExecutionProvider'
+
+ print("Onnx selected provider: ", [provider], file=sys.stderr)
+ ort_session = ort.InferenceSession(path, options, providers=[provider])
+ print("Onnx using ", ort_session.get_providers(), file=sys.stderr)
+ return ort_session
+
+
+class ONNXModel(RunModel):
+ def __init__(self, path, output, runtime, use_tf8, cl_context):
+ self.inputs = {}
+ self.output = output
+ self.use_tf8 = use_tf8
+
+ self.session = create_ort_session(path)
+ self.input_names = [x.name for x in self.session.get_inputs()]
+ self.input_shapes = {x.name: [1, *x.shape[1:]] for x in self.session.get_inputs()}
+ self.input_dtypes = {x.name: ORT_TYPES_TO_NP_TYPES[x.type] for x in self.session.get_inputs()}
+
+ # run once to initialize CUDA provider
+ if "CUDAExecutionProvider" in self.session.get_providers():
+ self.session.run(None, {k: np.zeros(self.input_shapes[k], dtype=self.input_dtypes[k]) for k in self.input_names})
+ print("ready to run onnx model", self.input_shapes, file=sys.stderr)
+
+ def addInput(self, name, buffer):
+ assert name in self.input_names
+ self.inputs[name] = buffer
+
+ def setInputBuffer(self, name, buffer):
+ assert name in self.inputs
+ self.inputs[name] = buffer
+
+ def getCLBuffer(self, name):
+ return None
+
+ def execute(self):
+ inputs = {k: (v.view(np.uint8) / 255. if self.use_tf8 and k == 'input_img' else v) for k,v in self.inputs.items()}
+ inputs = {k: v.reshape(self.input_shapes[k]).astype(self.input_dtypes[k]) for k,v in inputs.items()}
+ outputs = self.session.run(None, inputs)
+ assert len(outputs) == 1, "Only single model outputs are supported"
+ self.output[:] = outputs[0]
+ return self.output
diff --git a/selfdrive/modeld/runners/onnxmodel_pyx.pyx b/selfdrive/modeld/runners/onnxmodel_pyx.pyx
deleted file mode 100644
index 27aa22d1c6..0000000000
--- a/selfdrive/modeld/runners/onnxmodel_pyx.pyx
+++ /dev/null
@@ -1,14 +0,0 @@
-# distutils: language = c++
-# cython: c_string_encoding=ascii
-
-from libcpp cimport bool
-from libcpp.string cimport string
-
-from .onnxmodel cimport ONNXModel as cppONNXModel
-from selfdrive.modeld.models.commonmodel_pyx cimport CLContext
-from selfdrive.modeld.runners.runmodel_pyx cimport RunModel
-from selfdrive.modeld.runners.runmodel cimport RunModel as cppRunModel
-
-cdef class ONNXModel(RunModel):
- def __cinit__(self, string path, float[:] output, int runtime, bool use_tf8, CLContext context):
- self.model = new cppONNXModel(path, &output[0], len(output), runtime, use_tf8, context.context)
diff --git a/selfdrive/test/docker_build.sh b/selfdrive/test/docker_build.sh
index 3851e2b1e3..e0ba54f058 100755
--- a/selfdrive/test/docker_build.sh
+++ b/selfdrive/test/docker_build.sh
@@ -1,45 +1,26 @@
-#!/bin/bash
+#!/usr/bin/env bash
set -e
# To build sim and docs, you can run the following to mount the scons cache to the same place as in CI:
# mkdir -p .ci_cache/scons_cache
# sudo mount --bind /tmp/scons_cache/ .ci_cache/scons_cache
-if [ $1 = "base" ]; then
- export DOCKER_IMAGE=openpilot-base
- export DOCKER_FILE=Dockerfile.openpilot_base
-elif [ $1 = "docs" ]; then
- export DOCKER_IMAGE=openpilot-docs
- export DOCKER_FILE=docs/docker/Dockerfile
-elif [ $1 = "sim" ]; then
- export DOCKER_IMAGE=openpilot-sim
- export DOCKER_FILE=tools/sim/Dockerfile.sim
-elif [ $1 = "prebuilt" ]; then
- export DOCKER_IMAGE=openpilot-prebuilt
- export DOCKER_FILE=Dockerfile.openpilot
-elif [ $1 = "cl" ]; then
- export DOCKER_IMAGE=openpilot-base-cl
- export DOCKER_FILE=Dockerfile.openpilot_base_cl
+SCRIPT_DIR=$(dirname "$0")
+OPENPILOT_DIR=$SCRIPT_DIR/../../
+if [ -n "$TARGET_ARCHITECTURE" ]; then
+ PLATFORM="linux/$TARGET_ARCHITECTURE"
+ TAG_SUFFIX="-$TARGET_ARCHITECTURE"
else
- echo "Invalid docker build image $1"
- exit 1
+ PLATFORM="linux/$(uname -m)"
+ TAG_SUFFIX=""
fi
-export DOCKER_REGISTRY=ghcr.io/commaai
-export COMMIT_SHA=$(git rev-parse HEAD)
+source $SCRIPT_DIR/docker_common.sh $1 "$TAG_SUFFIX"
-LOCAL_TAG=$DOCKER_IMAGE
-REMOTE_TAG=$DOCKER_REGISTRY/$LOCAL_TAG
-REMOTE_SHA_TAG=$REMOTE_TAG:$COMMIT_SHA
+DOCKER_BUILDKIT=1 docker buildx build --platform $PLATFORM --load --cache-to type=inline --cache-from type=registry,ref=$REMOTE_TAG -t $REMOTE_TAG -t $LOCAL_TAG -f $OPENPILOT_DIR/$DOCKER_FILE $OPENPILOT_DIR
-SCRIPT_DIR=$(dirname "$0")
-OPENPILOT_DIR=$SCRIPT_DIR/../../
-
-DOCKER_BUILDKIT=1 docker buildx build --load --cache-to type=inline --cache-from type=registry,ref=$REMOTE_TAG -t $REMOTE_TAG -t $LOCAL_TAG -f $OPENPILOT_DIR/$DOCKER_FILE $OPENPILOT_DIR
-
-if [[ ! -z "$PUSH_IMAGE" ]];
-then
+if [ -n "$PUSH_IMAGE" ]; then
docker push $REMOTE_TAG
docker tag $REMOTE_TAG $REMOTE_SHA_TAG
docker push $REMOTE_SHA_TAG
-fi
\ No newline at end of file
+fi
diff --git a/selfdrive/test/docker_common.sh b/selfdrive/test/docker_common.sh
new file mode 100644
index 0000000000..68ea472d26
--- /dev/null
+++ b/selfdrive/test/docker_common.sh
@@ -0,0 +1,27 @@
+if [ $1 = "base" ]; then
+ export DOCKER_IMAGE=openpilot-base
+ export DOCKER_FILE=Dockerfile.openpilot_base
+elif [ $1 = "docs" ]; then
+ export DOCKER_IMAGE=openpilot-docs
+ export DOCKER_FILE=docs/docker/Dockerfile
+elif [ $1 = "sim" ]; then
+ export DOCKER_IMAGE=openpilot-sim
+ export DOCKER_FILE=tools/sim/Dockerfile.sim
+elif [ $1 = "prebuilt" ]; then
+ export DOCKER_IMAGE=openpilot-prebuilt
+ export DOCKER_FILE=Dockerfile.openpilot
+elif [ $1 = "cl" ]; then
+ export DOCKER_IMAGE=openpilot-base-cl
+ export DOCKER_FILE=Dockerfile.openpilot_base_cl
+else
+ echo "Invalid docker build image $1"
+ exit 1
+fi
+
+export DOCKER_REGISTRY=ghcr.io/commaai
+export COMMIT_SHA=$(git rev-parse HEAD)
+
+TAG_SUFFIX=$2
+LOCAL_TAG=$DOCKER_IMAGE$TAG_SUFFIX
+REMOTE_TAG=$DOCKER_REGISTRY/$LOCAL_TAG
+REMOTE_SHA_TAG=$DOCKER_REGISTRY/$LOCAL_TAG:$COMMIT_SHA
diff --git a/selfdrive/test/docker_tag_multiarch.sh b/selfdrive/test/docker_tag_multiarch.sh
new file mode 100755
index 0000000000..c1761802c7
--- /dev/null
+++ b/selfdrive/test/docker_tag_multiarch.sh
@@ -0,0 +1,25 @@
+#!/usr/bin/env bash
+set -e
+
+if [ $# -lt 2 ]; then
+ echo "Usage: $0 ..."
+ exit 1
+fi
+
+SCRIPT_DIR=$(dirname "$0")
+ARCHS=("${@:2}")
+
+source $SCRIPT_DIR/docker_common.sh $1
+
+MANIFEST_AMENDS=""
+for ARCH in ${ARCHS[@]}; do
+ MANIFEST_AMENDS="$MANIFEST_AMENDS --amend $REMOTE_TAG-$ARCH:$COMMIT_SHA"
+done
+
+docker manifest create $REMOTE_TAG $MANIFEST_AMENDS
+docker manifest create $REMOTE_SHA_TAG $MANIFEST_AMENDS
+
+if [[ -n "$PUSH_IMAGE" ]]; then
+ docker manifest push $REMOTE_TAG
+ docker manifest push $REMOTE_SHA_TAG
+fi
diff --git a/selfdrive/test/process_replay/test_processes.py b/selfdrive/test/process_replay/test_processes.py
index ab51948dd0..973c16971e 100755
--- a/selfdrive/test/process_replay/test_processes.py
+++ b/selfdrive/test/process_replay/test_processes.py
@@ -20,8 +20,8 @@ source_segments = [
("BODY", "937ccb7243511b65|2022-05-24--16-03-09--1"), # COMMA.BODY
("HYUNDAI", "02c45f73a2e5c6e9|2021-01-01--19-08-22--1"), # HYUNDAI.SONATA
("HYUNDAI2", "d545129f3ca90f28|2022-11-07--20-43-08--3"), # HYUNDAI.KIA_EV6 (+ QCOM GPS)
- ("TOYOTA", "0982d79ebb0de295|2021-01-04--17-13-21--13"), # TOYOTA.PRIUS (INDI)
- ("TOYOTA2", "0982d79ebb0de295|2021-01-03--20-03-36--6"), # TOYOTA.RAV4 (LQR)
+ ("TOYOTA", "0982d79ebb0de295|2021-01-04--17-13-21--13"), # TOYOTA.PRIUS
+ ("TOYOTA2", "0982d79ebb0de295|2021-01-03--20-03-36--6"), # TOYOTA.RAV4
("TOYOTA3", "f7d7e3538cda1a2a|2021-08-16--08-55-34--6"), # TOYOTA.COROLLA_TSS2
("HONDA", "eb140f119469d9ab|2021-06-12--10-46-24--27"), # HONDA.CIVIC (NIDEC)
("HONDA2", "7d2244f34d1bbcda|2021-06-25--12-25-37--26"), # HONDA.ACCORD (BOSCH)
diff --git a/selfdrive/test/setup_device_ci.sh b/selfdrive/test/setup_device_ci.sh
index 1b99c31038..ca458f2a79 100755
--- a/selfdrive/test/setup_device_ci.sh
+++ b/selfdrive/test/setup_device_ci.sh
@@ -56,31 +56,68 @@ sleep infinity
EOF
chmod +x $CONTINUE_PATH
-# set up environment
-if [ ! -d "$SOURCE_DIR" ]; then
- git clone https://github.com/commaai/openpilot.git $SOURCE_DIR
-fi
-cd $SOURCE_DIR
+safe_checkout() {
+ # completely clean TEST_DIR
+
+ cd $SOURCE_DIR
-# cleanup orphaned locks
-find .git -type f -name "*.lock" -exec rm {} +
+ # cleanup orphaned locks
+ find .git -type f -name "*.lock" -exec rm {} +
+
+ git reset --hard
+ git fetch --no-tags --no-recurse-submodules -j4 --verbose --depth 1 origin $GIT_COMMIT
+ find . -maxdepth 1 -not -path './.git' -not -name '.' -not -name '..' -exec rm -rf '{}' \;
+ git reset --hard $GIT_COMMIT
+ git checkout $GIT_COMMIT
+ git clean -xdff
+ git submodule sync
+ git submodule update --init --recursive
+ git submodule foreach --recursive "git reset --hard && git clean -xdff"
+
+ git lfs pull
+ (ulimit -n 65535 && git lfs prune)
+
+ echo "git checkout done, t=$SECONDS"
+ du -hs $SOURCE_DIR $SOURCE_DIR/.git
+
+ if [ -z "SKIP_COPY" ]; then
+ rsync -a --delete $SOURCE_DIR $TEST_DIR
+ fi
+}
-git reset --hard
-git fetch --no-tags --no-recurse-submodules -j4 --verbose --depth 1 origin $GIT_COMMIT
-find . -maxdepth 1 -not -path './.git' -not -name '.' -not -name '..' -exec rm -rf '{}' \;
-git reset --hard $GIT_COMMIT
-git checkout $GIT_COMMIT
-git clean -xdff
-git submodule sync
-git submodule update --init --recursive
-git submodule foreach --recursive "git reset --hard && git clean -xdff"
+unsafe_checkout() {
+ # checkout directly in test dir, leave old build products
-git lfs pull
-(ulimit -n 65535 && git lfs prune)
+ cd $TEST_DIR
-echo "git checkout done, t=$SECONDS"
-du -hs $SOURCE_DIR $SOURCE_DIR/.git
+ # cleanup orphaned locks
+ find .git -type f -name "*.lock" -exec rm {} +
-rsync -a --delete $SOURCE_DIR $TEST_DIR
+ git fetch --no-tags --no-recurse-submodules -j8 --verbose --depth 1 origin $GIT_COMMIT
+ git checkout --force --no-recurse-submodules $GIT_COMMIT
+ git reset --hard $GIT_COMMIT
+ git clean -df
+ git submodule sync
+ git submodule update --init --recursive
+ git submodule foreach --recursive "git reset --hard && git clean -df"
+
+ git lfs pull
+ (ulimit -n 65535 && git lfs prune)
+}
+
+export GIT_PACK_THREADS=8
+
+# set up environment
+if [ ! -d "$SOURCE_DIR" ]; then
+ git clone https://github.com/commaai/openpilot.git $SOURCE_DIR
+fi
+
+if [ ! -z "$UNSAFE" ]; then
+ echo "doing unsafe checkout"
+ unsafe_checkout
+else
+ echo "doing safe checkout"
+ safe_checkout
+fi
echo "$TEST_DIR synced with $GIT_COMMIT, t=$SECONDS"
diff --git a/tools/cabana/chart/chart.cc b/tools/cabana/chart/chart.cc
index 18a9adbe07..709a052d72 100644
--- a/tools/cabana/chart/chart.cc
+++ b/tools/cabana/chart/chart.cc
@@ -286,9 +286,7 @@ void ChartView::updateSeries(const cabana::Signal *sig, bool clear) {
s.vals.reserve(msgs.capacity());
s.step_vals.reserve(msgs.capacity() * 2);
- auto first = std::upper_bound(msgs.cbegin(), msgs.cend(), s.last_value_mono_time, [](uint64_t ts, auto e) {
- return ts < e->mono_time;
- });
+ auto first = std::upper_bound(msgs.cbegin(), msgs.cend(), s.last_value_mono_time, CompareCanEvent());
const double route_start_time = can->routeStartTime();
for (auto end = msgs.cend(); first != end; ++first) {
const CanEvent *e = *first;
diff --git a/tools/cabana/chart/sparkline.cc b/tools/cabana/chart/sparkline.cc
index 4692b41e4b..1ae6a1bfe0 100644
--- a/tools/cabana/chart/sparkline.cc
+++ b/tools/cabana/chart/sparkline.cc
@@ -9,12 +9,8 @@ void Sparkline::update(const MessageId &msg_id, const cabana::Signal *sig, doubl
const auto &msgs = can->events(msg_id);
uint64_t ts = (last_msg_ts + can->routeStartTime()) * 1e9;
uint64_t first_ts = (ts > range * 1e9) ? ts - range * 1e9 : 0;
- auto first = std::lower_bound(msgs.cbegin(), msgs.cend(), first_ts, [](auto e, uint64_t ts) {
- return e->mono_time < ts;
- });
- auto last = std::upper_bound(first, msgs.cend(), ts, [](uint64_t ts, auto e) {
- return ts < e->mono_time;
- });
+ auto first = std::lower_bound(msgs.cbegin(), msgs.cend(), first_ts, CompareCanEvent());
+ auto last = std::upper_bound(first, msgs.cend(), ts, CompareCanEvent());
bool update_values = last_ts != last_msg_ts || time_range != range;
last_ts = last_msg_ts;
diff --git a/tools/cabana/historylog.cc b/tools/cabana/historylog.cc
index 7381884cf3..5330549963 100644
--- a/tools/cabana/historylog.cc
+++ b/tools/cabana/historylog.cc
@@ -162,9 +162,7 @@ std::deque HistoryLogModel::fetchData(uint64_t from_ti
return msgs;
} else {
assert(min_time == 0);
- auto first = std::upper_bound(events.cbegin(), events.cend(), from_time, [](uint64_t ts, auto e) {
- return ts < e->mono_time;
- });
+ auto first = std::upper_bound(events.cbegin(), events.cend(), from_time, CompareCanEvent());
auto msgs = fetchData(first, events.cend(), 0);
if (update_colors) {
for (auto it = msgs.begin(); it != msgs.end(); ++it) {
diff --git a/tools/cabana/streams/abstractstream.cc b/tools/cabana/streams/abstractstream.cc
index 2cf65d4911..6fa479815d 100644
--- a/tools/cabana/streams/abstractstream.cc
+++ b/tools/cabana/streams/abstractstream.cc
@@ -162,18 +162,14 @@ void AbstractStream::mergeEvents(std::vector::const_iterator first, std
}
}
- auto compare = [](const CanEvent *l, const CanEvent *r) {
- return l->mono_time < r->mono_time;
- };
-
for (auto &[id, new_e] : new_events_map) {
auto &e = events_[id];
- auto insert_pos = std::upper_bound(e.cbegin(), e.cend(), new_e.front(), compare);
+ auto insert_pos = std::upper_bound(e.cbegin(), e.cend(), new_e.front()->mono_time, CompareCanEvent());
e.insert(insert_pos, new_e.cbegin(), new_e.cend());
}
if (!new_events.empty()) {
- auto insert_pos = std::upper_bound(all_events_.cbegin(), all_events_.cend(), new_events.front(), compare);
+ auto insert_pos = std::upper_bound(all_events_.cbegin(), all_events_.cend(), new_events.front()->mono_time, CompareCanEvent());
all_events_.insert(insert_pos, new_events.cbegin(), new_events.cend());
}
@@ -201,15 +197,11 @@ inline QColor blend(const QColor &a, const QColor &b) {
// Calculate the frequency of the past minute.
double calc_freq(const MessageId &msg_id, double current_sec) {
- auto compare = [](const CanEvent *e, uint64_t mono_time) {
- return e->mono_time < mono_time;
- };
-
const auto &events = can->events(msg_id);
uint64_t cur_mono_time = (can->routeStartTime() + current_sec) * 1e9;
uint64_t first_mono_time = std::max(0, cur_mono_time - 59 * 1e9);
- auto first = std::lower_bound(events.begin(), events.end(), first_mono_time, compare);
- auto second = std::lower_bound(first, events.end(), cur_mono_time, compare);
+ auto first = std::lower_bound(events.begin(), events.end(), first_mono_time, CompareCanEvent());
+ auto second = std::lower_bound(first, events.end(), cur_mono_time, CompareCanEvent());
if (first != events.end() && second != events.end()) {
double duration = ((*second)->mono_time - (*first)->mono_time) / 1e9;
uint32_t count = std::distance(first, second);
diff --git a/tools/cabana/streams/abstractstream.h b/tools/cabana/streams/abstractstream.h
index 62ab3f2f4b..eae34e2cc4 100644
--- a/tools/cabana/streams/abstractstream.h
+++ b/tools/cabana/streams/abstractstream.h
@@ -41,6 +41,11 @@ struct CanEvent {
uint8_t dat[];
};
+struct CompareCanEvent {
+ constexpr bool operator()(const CanEvent *const e, uint64_t ts) const { return e->mono_time < ts; }
+ constexpr bool operator()(uint64_t ts, const CanEvent *const e) const { return ts < e->mono_time; }
+};
+
struct BusConfig {
int can_speed_kbps = 500;
int data_speed_kbps = 2000;
diff --git a/tools/cabana/streams/livestream.cc b/tools/cabana/streams/livestream.cc
index 3f8397f56c..17805a0b66 100644
--- a/tools/cabana/streams/livestream.cc
+++ b/tools/cabana/streams/livestream.cc
@@ -102,12 +102,8 @@ void LiveStream::updateEvents() {
uint64_t last_ts = post_last_event && speed_ == 1.0
? all_events_.back()->mono_time
: first_event_ts + (nanos_since_boot() - first_update_ts) * speed_;
- auto first = std::upper_bound(all_events_.cbegin(), all_events_.cend(), current_event_ts, [](uint64_t ts, auto e) {
- return ts < e->mono_time;
- });
- auto last = std::upper_bound(first, all_events_.cend(), last_ts, [](uint64_t ts, auto e) {
- return ts < e->mono_time;
- });
+ auto first = std::upper_bound(all_events_.cbegin(), all_events_.cend(), current_event_ts, CompareCanEvent());
+ auto last = std::upper_bound(first, all_events_.cend(), last_ts, CompareCanEvent());
for (auto it = first; it != last; ++it) {
const CanEvent *e = *it;
diff --git a/tools/cabana/tools/findsignal.cc b/tools/cabana/tools/findsignal.cc
index b19babdf88..51d86f5964 100644
--- a/tools/cabana/tools/findsignal.cc
+++ b/tools/cabana/tools/findsignal.cc
@@ -37,10 +37,10 @@ void FindSignalModel::search(std::function cmp) {
filtered_signals.reserve(prev_sigs.size());
QtConcurrent::blockingMap(prev_sigs, [&](auto &s) {
const auto &events = can->events(s.id);
- auto first = std::upper_bound(events.cbegin(), events.cend(), s.mono_time, [](uint64_t ts, auto &e) { return ts < e->mono_time; });
+ auto first = std::upper_bound(events.cbegin(), events.cend(), s.mono_time, CompareCanEvent());
auto last = events.cend();
if (last_time < std::numeric_limits::max()) {
- last = std::upper_bound(events.cbegin(), events.cend(), last_time, [](uint64_t ts, auto &e) { return ts < e->mono_time; });
+ last = std::upper_bound(events.cbegin(), events.cend(), last_time, CompareCanEvent());
}
auto it = std::find_if(first, last, [&](const CanEvent *e) { return cmp(get_raw_value(e->dat, e->size, s.sig)); });
@@ -225,7 +225,7 @@ void FindSignalDlg::setInitialSignals() {
for (auto it = can->last_msgs.cbegin(); it != can->last_msgs.cend(); ++it) {
if (buses.isEmpty() || buses.contains(it.key().source) && (addresses.isEmpty() || addresses.contains(it.key().address))) {
const auto &events = can->events(it.key());
- auto e = std::lower_bound(events.cbegin(), events.cend(), first_time, [](auto e, uint64_t ts) { return e->mono_time < ts; });
+ auto e = std::lower_bound(events.cbegin(), events.cend(), first_time, CompareCanEvent());
if (e != events.cend()) {
const int total_size = it.value().dat.size() * 8;
for (int size = min_size->value(); size <= max_size->value(); ++size) {
diff --git a/tools/mac_setup.sh b/tools/mac_setup.sh
index c767131f51..712923626b 100755
--- a/tools/mac_setup.sh
+++ b/tools/mac_setup.sh
@@ -49,7 +49,7 @@ brew "pyenv"
brew "pyenv-virtualenv"
brew "qt@5"
brew "zeromq"
-brew "gcc@12"
+brew "gcc@13"
cask "gcc-arm-embedded"
brew "portaudio"
EOS