pull/2048/head
ZwX1616 5 years ago
commit ca72ba8af5
  1. 3
      .coveragerc-app
  2. 2
      .editorconfig
  3. 27
      .github/ISSUE_TEMPLATE/bug_report.md
  4. 25
      .github/ISSUE_TEMPLATE/bug_report_pc.md
  5. 8
      .github/ISSUE_TEMPLATE/config.yml
  6. 8
      .github/ISSUE_TEMPLATE/enhancement.md
  7. 17
      .github/ISSUE_TEMPLATE/question.md
  8. 15
      .github/PULL_REQUEST_TEMPLATE/bugfix.md
  9. 19
      .github/PULL_REQUEST_TEMPLATE/car_bugfix.md
  10. 14
      .github/PULL_REQUEST_TEMPLATE/car_port.md
  11. 11
      .github/PULL_REQUEST_TEMPLATE/fingerprint.md
  12. 15
      .github/PULL_REQUEST_TEMPLATE/refactor.md
  13. 8
      .github/dependabot.yml
  14. 38
      .github/pull_request_template.md
  15. 216
      .github/workflows/test.yaml
  16. 23
      .github/workflows/update-pipfile.yml
  17. 6
      .gitignore
  18. 4
      .gitmodules
  19. 47
      .pre-commit-config.yaml
  20. 118
      .pylintrc
  21. 7
      CONTRIBUTING.md
  22. 74
      Dockerfile.openpilot
  23. 146
      Jenkinsfile
  24. 31
      Pipfile
  25. 2164
      Pipfile.lock
  26. 73
      README.md
  27. 38
      RELEASES.md
  28. 4
      SAFETY.md
  29. 135
      SConstruct
  30. 4
      apk/ai.comma.plus.offroad.apk
  31. 1
      apks
  32. 10
      common/SConscript
  33. 17
      common/android.py
  34. 3
      common/api/__init__.py
  35. 2
      common/apk.py
  36. 1
      common/basedir.py
  37. 3
      common/compat.py
  38. 23
      common/file_helpers.py
  39. 6
      common/kalman/SConscript
  40. 2
      common/kalman/simple_kalman_old.py
  41. 21
      common/kalman/tests/test_simple_kalman.py
  42. 14
      common/logging_extra.py
  43. 50
      common/manager_helpers.py
  44. 5
      common/numpy_fast.py
  45. 52
      common/params.py
  46. 1
      common/profiler.py
  47. 28
      common/realtime.py
  48. 38
      common/spinner.py
  49. 4
      common/stat_live.py
  50. 2
      common/string_helpers.py
  51. 9
      common/testing.py
  52. 1
      common/tests/test_numpy_fast.py
  53. 10
      common/tests/test_params.py
  54. 51
      common/text_window.py
  55. 1
      common/timeout.py
  56. 2
      common/transformations/.gitignore
  57. 13
      common/transformations/README.md
  58. 8
      common/transformations/SConscript
  59. 32
      common/transformations/camera.py
  60. 104
      common/transformations/coordinates.cc
  61. 35
      common/transformations/coordinates.hpp
  62. 117
      common/transformations/coordinates.py
  63. 21
      common/transformations/model.py
  64. 147
      common/transformations/orientation.cc
  65. 17
      common/transformations/orientation.hpp
  66. 327
      common/transformations/orientation.py
  67. 42
      common/transformations/setup.py
  68. 23
      common/transformations/tests/test_coordinates.py
  69. 3
      common/transformations/tests/test_orientation.py
  70. 71
      common/transformations/transformations.pxd
  71. 172
      common/transformations/transformations.pyx
  72. 29
      common/window.py
  73. BIN
      external/bin/capnpc-java
  74. 3
      external/opencl/intel-opencl-devel_0r3.1-58621_amd64.deb
  75. 1
      external/opencl/intel.icd
  76. 3
      external/opencl/opencl-1.2-base-pset_6.4.0.37-2_all.deb
  77. 3
      external/opencl/opencl-1.2-base_6.4.0.37-2_amd64.deb
  78. 3
      external/opencl/opencl-1.2-intel-cpu_6.4.0.37-2_amd64.deb
  79. 9
      external/simpleperf/utils.py
  80. 9
      flake8_openpilot.sh
  81. 82
      installer/updater/test_updater.py
  82. 7
      installer/updater/update_kernel.json
  83. BIN
      installer/updater/updater
  84. 201
      installer/updater/updater.cc
  85. 2
      laika_repo
  86. 86
      launch_chffrplus.sh
  87. 17
      launch_env.sh
  88. 2
      models/dmonitoring_model.current
  89. 4
      models/dmonitoring_model.keras
  90. 4
      models/dmonitoring_model_q.dlc
  91. 4
      mypy.ini
  92. 2
      opendbc
  93. 2
      panda
  94. 1
      pyextra/logentries/__init__.py
  95. 49
      pyextra/logentries/helpers.py
  96. 57
      pyextra/logentries/metrics.py
  97. 218
      pyextra/logentries/utils.py
  98. 11
      pylint_openpilot.sh
  99. 2
      rednose_repo
  100. 79
      release/build_devel.sh
  101. Some files were not shown because too many files have changed in this diff Show More

@ -1,3 +0,0 @@
[run]
concurrency=multiprocessing

@ -5,7 +5,7 @@ end_of_line = lf
insert_final_newline = true
trim_trailing_whitespace = true
[{*.py, *.pyx, *pxd}]
[{*.py, *.pyx, *.pxd}]
charset = utf-8
indent_style = space
indent_size = 2

@ -1,26 +1,31 @@
---
name: Bug report
about: Create a report to help us improve openpilot
about: For issues with running openpilot on your comma device
title: ''
labels: ''
labels: 'bug'
assignees: ''
---
**Describe the bug**
A clear and concise description of what the bug is.
<!-- A clear and concise description of what the bug is. Add the `car bug` label for vehicle/brand specific bugs and the `bug` label for all other bugs. -->
**How to reproduce or log data**
Steps to reproduce the behavior, or a explorer/cabana link to the exact drive and timestamp of when the bug occurred.
<!-- Steps to reproduce the behavior. -->
**Expected behavior**
A clear and concise description of what you expected to happen.
<!-- A clear and concise description of what you expected to happen. -->
**Device/Version information (please complete the following information):**
- Device: [e.g. EON/EON Gold]
- Dongle ID: [e.g. 77611a1fac303767, can be found in Settings -> Device -> Dongle ID]
- Version: [e.g. 0.6.4], or commit hash when on devel
- Car make/model [e.g. Toyota Prius 2016]
- Device: [e.g. EON/EON Gold/comma two]
- Dongle ID: [e.g. 77611a1fac303767, can be found in Settings -> Device -> Dongle ID or my.comma.ai/useradmin]
- Route: [e.g. 77611a1fac303767|2020-05-11--16-37-07, can be found in my.comma.ai/useradmin]
- Timestamp: [When in the route the bug occurs (e.g. 4min 30s into the drive)]
- Version: [commit hash when on a non-release branch, or version number when on devel or release2 (e.g. 0.7.6)]
- Car make/model: [e.g. Toyota Prius 2016]
**Additional context**
Add any other context about the problem here.
<!-- Add any other context about the problem here. -->

@ -0,0 +1,25 @@
---
name: PC Bug report
about: For issues with running openpilot on PC
title: ''
labels: 'PC'
assignees: ''
---
**Describe the bug**
<!-- A clear and concise description of what the bug is. Add the `simulation` label if running in an environment like CARLA. -->
**How to reproduce or log data**
<!-- Steps to reproduce the behavior. -->
**Expected behavior**
<!-- A clear and concise description of what you expected to happen. -->
**Additional context**
<!-- Add any other context about the problem here. -->
Operating system: [e.g. Ubuntu 16.04]

@ -0,0 +1,8 @@
blank_issues_enabled: false
contact_links:
- name: Community Wiki
url: https://github.com/commaai/openpilot/wiki
about: Check out our community wiki
- name: Community Discord
urli: https://discord.comma.ai
about: Check out our community discord

@ -0,0 +1,8 @@
---
name: Enhancement
about: For suggestions for openpilot enhancements
title: ''
labels: 'enhancement'
assignees: ''
---

@ -0,0 +1,17 @@
---
name: Question
about: For questions about openpilot
title: ''
labels: 'question'
assignees: ''
---
<!--
Consider these options before opening an issue for a question:
- checking the FAQ at https://comma.ai/faq
- checking the wiki at https://wiki.comma.ai
- asking your question on our community discord at https://discord.comma.ai
-->

@ -0,0 +1,15 @@
---
name: Bug fix
about: For openpilot bug fixes
title: ''
labels: 'bugfix'
assignees: ''
---
**Description**
<!-- A description of the bug and the fix. Also link the issue if it exists. -->
**Verification**
<!-- Explain how you tested this bug fix. -->

@ -0,0 +1,19 @@
---
name: Car Bug fix
about: For vehicle/brand specifc bug fixes
title: ''
labels: 'car bug fix'
assignees: ''
---
**Description**
<!-- A description of the bug and the fix. Also link the issue if it exists. -->
**Verification**
<!-- Explain how you tested this bug fix. -->
**Route**
Route: [a route with the bug fix]

@ -0,0 +1,14 @@
---
name: Car port
about: For new car ports
title: ''
labels: 'car port'
assignees: ''
---
**Checklist**
- [ ] added to README
- [ ] test route added to [test_car_models](../../selfdrive/test/test_car_models.py)
- [ ] route with openpilot:
- [ ] route with stock system:

@ -0,0 +1,11 @@
---
name: Fingerprint
about: For adding fingerprints to existing cars
title: ''
labels: 'fingerprint'
assignees: ''
---
Discord username: []
Route: []

@ -0,0 +1,15 @@
---
name: Refactor
about: For code refactors
title: ''
labels: 'refactor'
assignees: ''
---
**Description**
<!-- A description of the refactor, including the goals it accomplishes. -->
**Verification**
<!-- Explain how you tested the refactor for regressions. -->

@ -0,0 +1,8 @@
version: 2
updates:
- package-ecosystem: pip
directory: "/"
schedule:
interval: daily
time: '15:00'
open-pull-requests-limit: 10

@ -0,0 +1,38 @@
<!-- Please copy and paste the relevant template -->
<!--- ***** Template: Car bug fix *****
**Description** [](A description of the bug and the fix. Also link any relevant issues.)
**Verification** [](Explain how you tested this bug fix.)
**Route**
Route: [a route with the bug fix]
-->
<!--- ***** Template: Bug fix *****
**Description** [](A description of the bug and the fix. Also link any relevant issues.)
**Verification** [](Explain how you tested this bug fix.)
-->
<!--- ***** Template: Car port *****
**Checklist**
- [ ] added to README
- [ ] test route added to [test_car_models](../../selfdrive/test/test_car_models.py)
- [ ] route with openpilot:
- [ ] route with stock system:
-->
<!--- ***** Template: Refactor *****
**Description** [](A description of the refactor, including the goals it accomplishes.)
**Verification** [](Explain how you tested the refactor for regressions.)
-->

@ -1,37 +1,25 @@
name: Openpilot Tests
on: [push, pull_request]
name: openpilot tests
on:
push:
pull_request:
schedule:
- cron: '0 * * * *'
env:
RUN: docker run --shm-size 1G --rm tmppilot /bin/sh -c
PERSIST: docker run --shm-size 1G --name tmppilot tmppilot /bin/sh -c
LOAD: docker load -i tmppilot.tar.gz/tmppilot.tar.gz
CI_RUN: docker run -e GITHUB_ACTION -e GITHUB_REF -e GITHUB_HEAD_REF -e GITHUB_SHA -e GITHUB_REPOSITORY -e GITHUB_RUN_ID --rm tmppilotci /bin/bash -c
UNIT_TEST: coverage run --append -m unittest discover
BUILD: |
docker pull $(grep -ioP '(?<=^from)\s+\S+' Dockerfile.openpilot) || true
docker pull docker.io/commaai/openpilotci:latest || true
docker build --cache-from docker.io/commaai/openpilotci:latest -t tmppilot -f Dockerfile.openpilot .
jobs:
build:
name: build
runs-on: ubuntu-16.04
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build docker image
run: |
docker pull $(grep -ioP '(?<=^from)\s+\S+' Dockerfile.openpilot) || true
docker pull docker.io/commaai/openpilot:latest || true
docker build --cache-from docker.io/commaai/openpilot:latest -t tmppilot -f Dockerfile.openpilot .
docker save tmppilot:latest | gzip > tmppilot.tar.gz
- uses: actions/upload-artifact@v2
with:
name: tmppilot.tar.gz
path: tmppilot.tar.gz
build_release:
name: build release
runs-on: ubuntu-16.04
timeout-minutes: 30
timeout-minutes: 50
env:
TEST_DIR: tmppilot
steps:
@ -44,111 +32,151 @@ jobs:
cp -pR --parents $(cat release/files_common) $TEST_DIR
cp Dockerfile.openpilot $TEST_DIR
# copy submodules
cp -pR panda/ opendbc/ cereal/ $TEST_DIR
# need this to build on x86
cp -pR --parents phonelibs/libyuv phonelibs/snpe \
external/bin selfdrive/modeld/runners $TEST_DIR
# need these so docker copy won't fail
cp Pipfile Pipfile.lock flake8_openpilot.sh pylint_openpilot.sh .pylintrc \
.coveragerc-app $TEST_DIR
cp Pipfile Pipfile.lock .pylintrc .pre-commit-config.yaml $TEST_DIR
cd $TEST_DIR
mkdir pyextra laika laika_repo tools release
- name: Build
mkdir laika laika_repo tools
- name: Build Docker image
run: cd $TEST_DIR && eval "$BUILD"
- name: Build openpilot and run quick check
run: |
cd $TEST_DIR
docker pull $(grep -ioP '(?<=^from)\s+\S+' Dockerfile.openpilot) || true
docker pull docker.io/commaai/openpilot:latest || true
docker build --cache-from docker.io/commaai/openpilot:latest -t tmppilot -f Dockerfile.openpilot .
$RUN "cd /tmp/openpilot && \
scons -j$(nproc) && \
$UNIT_TEST selfdrive/car"
push:
name: push
build_mac:
name: build macos
runs-on: macos-10.15
timeout-minutes: 35
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Cache dependencies
id: dependency-cache
uses: actions/cache@v2
with:
path: |
~/.pyenv
~/Library/Caches/pip
~/Library/Caches/pipenv
~/Library/Caches/Homebrew
key: ${{ hashFiles('tools/mac_setup.sh') }}
- name: Install dependencies
run: ./tools/mac_setup.sh
- name: Build openpilot
run: eval "$(pyenv init -)" && scons -j$(nproc)
- name: Brew cleanup
run: brew cleanup # keeps our cache small
docker_push:
name: docker push
runs-on: ubuntu-16.04
needs: build
timeout-minutes: 50
if: github.ref == 'refs/heads/master' && github.event_name != 'pull_request' && github.repository == 'commaai/openpilot'
needs: static_analysis # hack to ensure slow tests run first since this and static_analysis are fast
steps:
- uses: actions/download-artifact@v1
- uses: actions/checkout@v2
with:
name: tmppilot.tar.gz
- name: Load image
run: $LOAD
- name: Login to dockerhub
run: docker login -u wmelching -p ${{ secrets.DOCKERHUB_TOKEN }}
- name: Tag image
run: docker tag tmppilot docker.io/commaai/openpilot:latest
- name: Push image
run: docker push docker.io/commaai/openpilot:latest
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: Push to dockerhub
run: |
docker login -u wmelching -p ${{ secrets.COMMA_DOCKERHUB_TOKEN}}
docker tag tmppilot docker.io/commaai/openpilotci:latest
docker push docker.io/commaai/openpilotci:latest
linter:
name: linter
docker_push_prebuilt:
name: docker push prebuilt
runs-on: ubuntu-16.04
needs: build
timeout-minutes: 50
if: github.event_name == 'schedule' && github.repository == 'commaai/openpilot'
needs: [static_analysis, unit_tests, process_replay, test_longitudinal, test_car_models]
steps:
- uses: actions/download-artifact@v1
- uses: actions/checkout@v2
with:
name: tmppilot.tar.gz
- name: Load image
run: $LOAD
- name: flake8
run: $RUN "cd /tmp/openpilot/ && ./flake8_openpilot.sh"
- name: pylint
run: $RUN "cd /tmp/openpilot/ && ./pylint_openpilot.sh"
submodules: true
- name: Build Docker image
run: echo "RUN cd /tmp/openpilot && scons -c && scons -j3" >> Dockerfile.openpilot && eval "$BUILD"
- name: Push to dockerhub
run: |
docker login -u wmelching -p ${{ secrets.COMMA_DOCKERHUB_TOKEN}}
docker tag tmppilot docker.io/commaai/openpilot:latest
docker push docker.io/commaai/openpilot:latest
static_analysis:
name: static analysis
runs-on: ubuntu-16.04
timeout-minutes: 50
steps:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: pre-commit
run: $RUN "cd /tmp/openpilot/ && git init && git add -A && pre-commit run --all"
unit_tests:
name: unit tests
runs-on: ubuntu-16.04
needs: build
timeout-minutes: 50
steps:
- uses: actions/download-artifact@v1
- uses: actions/checkout@v2
with:
name: tmppilot.tar.gz
- name: Load image
run: $LOAD
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: Run unit tests
run: |
$PERSIST "cd /tmp/openpilot && \
scons -j$(nproc) && \
coverage run selfdrive/test/test_fingerprints.py && \
$UNIT_TEST common && \
$UNIT_TEST opendbc/can && \
$UNIT_TEST selfdrive/boardd && \
$UNIT_TEST selfdrive/controls && \
$UNIT_TEST selfdrive/monitoring && \
$UNIT_TEST selfdrive/loggerd && \
$UNIT_TEST selfdrive/car && \
$UNIT_TEST selfdrive/locationd && \
$UNIT_TEST selfdrive/athena && \
$UNIT_TEST selfdrive/thermald && \
$UNIT_TEST tools/lib/tests"
- name: Upload coverage to Codecov
run: |
docker commit tmppilot tmppilotci
$CI_RUN "cd /tmp/openpilot && bash <(curl -s https://codecov.io/bash) -Z -F unit_tests"
$CI_RUN "cd /tmp/openpilot && bash <(curl -s https://codecov.io/bash) -v -F unit_tests"
process_replay:
name: process replay
runs-on: ubuntu-16.04
needs: build
timeout-minutes: 30
timeout-minutes: 50
steps:
- uses: actions/download-artifact@v1
- uses: actions/checkout@v2
with:
name: tmppilot.tar.gz
- name: Load image
run: $LOAD
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: Run replay
run: |
$PERSIST "cd /tmp/openpilot && CI=1 coverage run selfdrive/test/process_replay/test_processes.py"
$PERSIST "cd /tmp/openpilot && \
scons -j$(nproc) && \
CI=1 coverage run selfdrive/test/process_replay/test_processes.py"
- name: Upload coverage to Codecov
run: |
docker commit tmppilot tmppilotci
$CI_RUN "cd /tmp/openpilot && bash <(curl -s https://codecov.io/bash) -Z -F process_replay"
- name: Copy diff
$CI_RUN "cd /tmp/openpilot && bash <(curl -s https://codecov.io/bash) -v -F process_replay"
- name: Print diff
if: always()
run: |
docker cp tmppilot:/tmp/openpilot/selfdrive/test/process_replay/diff.txt diff.txt
- name: Print diff
if: always()
run: cat diff.txt
cat diff.txt
- uses: actions/upload-artifact@v2
if: always()
with:
@ -158,17 +186,20 @@ jobs:
test_longitudinal:
name: longitudinal
runs-on: ubuntu-16.04
needs: build
timeout-minutes: 30
timeout-minutes: 50
steps:
- uses: actions/download-artifact@v1
- uses: actions/checkout@v2
with:
name: tmppilot.tar.gz
- name: Load image
run: $LOAD
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: Test longitudinal
run: |
$PERSIST "cd /tmp/openpilot/selfdrive/test/longitudinal_maneuvers && OPTEST=1 ./test_longitudinal.py"
$PERSIST "mkdir -p /tmp/openpilot/selfdrive/test/out && \
cd /tmp/openpilot/ && \
scons -j$(nproc) && \
cd selfdrive/test/longitudinal_maneuvers && \
OPTEST=1 ./test_longitudinal.py"
- name: Copy artifacts
if: always()
run: |
@ -183,18 +214,21 @@ jobs:
test_car_models:
name: test car models
runs-on: ubuntu-16.04
needs: build
timeout-minutes: 30
timeout-minutes: 50
steps:
- uses: actions/download-artifact@v1
- uses: actions/checkout@v2
with:
name: tmppilot.tar.gz
- name: Load image
run: $LOAD
submodules: true
- name: Build Docker image
run: eval "$BUILD"
- name: Test car models
run: |
$PERSIST "mkdir -p /data/params && cd /tmp/openpilot && coverage run --parallel-mode --concurrency=multiprocessing --rcfile=./.coveragerc-app selfdrive/test/test_car_models.py && coverage combine"
$PERSIST "cd /tmp/openpilot && \
scons -j$(nproc) && \
coverage run --parallel-mode -m nose --processes=4 --process-timeout=60 \
selfdrive/test/test_models.py && \
coverage combine"
- name: Upload coverage to Codecov
run: |
docker commit tmppilot tmppilotci
$CI_RUN "cd /tmp/openpilot && bash <(curl -s https://codecov.io/bash) -Z -F test_car_models"
$CI_RUN "cd /tmp/openpilot && bash <(curl -s https://codecov.io/bash) -v -F test_car_models"

@ -0,0 +1,23 @@
name: "Update Pipfile.lock"
on:
schedule:
- cron: '00 15 * * 1' # Every monday on 15:00 UTC
jobs:
piplock:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- uses: actions/setup-python@v2
- run: pip install wheel
- run: pip install pipenv
- run: pipenv lock
- uses: actions/upload-artifact@v2
with:
name: "Pipfile lock"
path: Pipfile.lock
- uses: peter-evans/create-pull-request@v2
with:
title: "Update Pipfile.lock (dependencies)"
branch: update-pipfile
commit-message: "[Bot] Update Pipfile.lock dependencies"

6
.gitignore vendored

@ -54,9 +54,15 @@ openpilot
notebooks
xx
panda_jungle
apks
openpilot-apks
.coverage*
coverage.xml
htmlcov
pandaextra
.mypy_cache/
flycheck_*
cppcheck_report.txt

4
.gitmodules vendored

@ -7,13 +7,9 @@
[submodule "laika_repo"]
path = laika_repo
url = ../../commaai/laika.git
[submodule "apks"]
path = apks
url = ../../commaai/openpilot-apks.git
[submodule "cereal"]
path = cereal
url = ../../commaai/cereal.git
[submodule "rednose_repo"]
path = rednose_repo
url = ../../commaai/rednose.git

@ -0,0 +1,47 @@
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: master
hooks:
- id: check-ast
- id: check-json
- id: check-xml
- id: check-yaml
- id: check-merge-conflict
- id: check-symlinks
- repo: https://github.com/pre-commit/mirrors-mypy
rev: master
hooks:
- id: mypy
exclude: '^(pyextra)|(external)|(cereal)|(rednose)|(panda)|(laika)|(opendbc)|(laika_repo)|(rednose_repo)/'
additional_dependencies: ['git+https://github.com/numpy/numpy-stubs']
- repo: https://github.com/PyCQA/flake8
rev: master
hooks:
- id: flake8
exclude: '^(pyextra)|(external)|(cereal)|(rednose)|(panda)|(laika)|(opendbc)|(laika_repo)|(rednose_repo)|(selfdrive/debug)/'
args:
- --select=F,E112,E113,E304,E501,E502,E701,E702,E703,E71,E72,E731,W191,W6
- --max-line-length=240
- --statistics
- repo: local
hooks:
- id: pylint
name: pylint
entry: pylint
language: system
types: [python]
exclude: '^(pyextra)|(external)|(cereal)|(rednose)|(panda)|(laika)|(laika_repo)|(rednose_repo)/'
- repo: local
hooks:
- id: cppcheck
name: cppcheck
entry: cppcheck
language: system
types: [c++]
exclude: '^(phonelibs)|(external)|(cereal)|(opendbc)|(panda)|(tools)|(selfdrive/modeld/thneed/debug)|(selfdrive/modeld/test)|(selfdrive/camerad/test)/|(installer)'
args:
- --error-exitcode=1
- --language=c++
- --quiet
- --force
- -j8

@ -3,7 +3,7 @@
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
extension-pkg-whitelist=scipy
extension-pkg-whitelist=scipy cereal.messaging.messaging_pyx
# Add files or directories to the blacklist. They should be base names, not
# paths.
@ -54,121 +54,7 @@ confidence=
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=print-statement,
parameter-unpacking,
unpacking-in-except,
old-raise-syntax,
backtick,
long-suffix,
old-ne-operator,
old-octal-literal,
import-star-module-level,
non-ascii-bytes-literal,
raw-checker-failed,
bad-inline-option,
locally-disabled,
locally-enabled,
file-ignored,
suppressed-message,
useless-suppression,
deprecated-pragma,
apply-builtin,
basestring-builtin,
buffer-builtin,
cmp-builtin,
coerce-builtin,
execfile-builtin,
file-builtin,
long-builtin,
raw_input-builtin,
reduce-builtin,
standarderror-builtin,
unicode-builtin,
xrange-builtin,
coerce-method,
delslice-method,
getslice-method,
setslice-method,
no-absolute-import,
old-division,
dict-iter-method,
dict-view-method,
next-method-called,
metaclass-assignment,
indexing-exception,
raising-string,
reload-builtin,
oct-method,
hex-method,
nonzero-method,
cmp-method,
input-builtin,
round-builtin,
intern-builtin,
unichr-builtin,
map-builtin-not-iterating,
zip-builtin-not-iterating,
range-builtin-not-iterating,
filter-builtin-not-iterating,
using-cmp-argument,
eq-without-hash,
div-method,
idiv-method,
rdiv-method,
exception-message-attribute,
invalid-str-codec,
sys-max-int,
bad-python3-import,
deprecated-string-function,
deprecated-str-translate-call,
deprecated-itertools-function,
deprecated-types-field,
next-method-defined,
dict-items-not-iterating,
dict-keys-not-iterating,
dict-values-not-iterating,
bad-indentation,
line-too-long,
missing-docstring,
multiple-statements,
bad-continuation,
invalid-name,
too-many-arguments,
too-many-locals,
superfluous-parens,
bad-whitespace,
too-many-instance-attributes,
wrong-import-position,
ungrouped-imports,
wrong-import-order,
protected-access,
trailing-whitespace,
too-many-branches,
too-few-public-methods,
too-many-statements,
trailing-newlines,
attribute-defined-outside-init,
too-many-return-statements,
too-many-public-methods,
unused-argument,
old-style-class,
no-init,
len-as-condition,
unneeded-not,
no-self-use,
multiple-imports,
no-else-return,
logging-not-lazy,
fixme,
redefined-outer-name,
unused-variable,
unsubscriptable-object,
expression-not-assigned,
too-many-boolean-expressions,
consider-using-ternary,
invalid-unary-operand-type,
relative-import,
deprecated-lambda
disable=C,R,W0613,W0511,W0212,W0201,W0311,W0106,W0603,W0621,W0703,E1136
# Enable the message, report, category or checker with the given id(s). You can

@ -2,7 +2,7 @@
Our software is open source so you can solve your own problems without needing help from others. And if you solve a problem and are so kind, you can upstream it for the rest of the world to use.
Most open source development activity is coordinated through our [Discord](https://discord.comma.ai). A lot of documentation is available on our [medium](https://medium.com/@comma_ai/)
Most open source development activity is coordinated through our [Discord](https://discord.comma.ai). A lot of documentation is available on our [medium](https://medium.com/@comma_ai/).
## Getting Started
@ -18,11 +18,11 @@ You can test your changes on your machine by running `run_docker_tests.sh`. This
### Automated Testing
All PRs are automatically checked by Github Actions. Check out `.github/workflows/` for what Github Actions runs. Any new tests sould be added to Github Actions.
All PRs and commits are automatically checked by Github Actions. Check out `.github/workflows/` for what Github Actions runs. Any new tests sould be added to Github Actions.
### Code Style and Linting
Code is automatically checked for style by Github Actions as part of the automated tests. You can also run these tests yourself by running `pylint_openpilot.sh` and `flake8_openpilot.sh`.
Code is automatically checked for style by Github Actions as part of the automated tests. You can also run these tests yourself by running `pre-commit run --all`.
## Car Ports (openpilot)
@ -48,3 +48,4 @@ Modules that are in seperate repositories include:
* laika
* opendbc
* panda
* rednose

@ -1,7 +1,8 @@
FROM ubuntu:16.04
ENV PYTHONUNBUFFERED 1
ENV PYTHONPATH /tmp/openpilot:${PYTHONPATH}
RUN apt-get update && apt-get install -y \
RUN apt-get update && apt-get install -y --no-install-recommends \
autoconf \
build-essential \
bzip2 \
@ -9,9 +10,11 @@ RUN apt-get update && apt-get install -y \
libcapnp-dev \
clang \
cmake \
cppcheck \
curl \
ffmpeg \
git \
iputils-ping \
libarchive-dev \
libbz2-dev \
libcurl4-openssl-dev \
@ -22,12 +25,10 @@ RUN apt-get update && apt-get install -y \
libglfw3-dev \
libglib2.0-0 \
liblzma-dev \
libmysqlclient-dev \
libomp-dev \
libopencv-dev \
libssl-dev \
libsqlite3-dev \
libtool \
libusb-1.0-0-dev \
libczmq-dev \
libzmq3-dev \
@ -37,11 +38,10 @@ RUN apt-get update && apt-get install -y \
opencl-headers \
python-dev \
python-pip \
screen \
qt5-default \
sudo \
vim \
wget
wget \
&& rm -rf /var/lib/apt/lists/*
RUN sed -i -e 's/# en_US.UTF-8 UTF-8/en_US.UTF-8 UTF-8/' /etc/locale.gen && locale-gen
ENV LANG en_US.UTF-8
@ -49,49 +49,51 @@ ENV LANGUAGE en_US:en
ENV LC_ALL en_US.UTF-8
RUN curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash
ENV PATH="/root/.pyenv/bin:/root/.pyenv/shims:${PATH}"
RUN pyenv install 3.8.2
RUN pyenv global 3.8.2
RUN pyenv rehash
RUN pip install pipenv==2018.11.26
COPY Pipfile /tmp/
COPY Pipfile.lock /tmp/
RUN python --version
RUN cd /tmp && pipenv install --system --deploy
# Install subset of dev dependencies needed for CI
RUN pip install matplotlib==3.1.1 dictdiffer==0.8.0 fastcluster==1.1.25 aenum==2.2.1 lru-dict==1.1.6 scipy==1.4.1 tenacity==5.1.1 azure-common==1.1.23 azure-nspkg==3.0.2 azure-storage-blob==2.1.0 azure-storage-common==2.1.0 azure-storage-nspkg==3.1.0 pycurl==7.43.0.3 coverage==5.1
ENV PATH="/tmp/openpilot/external/bin:${PATH}"
ENV PYTHONPATH /tmp/openpilot:${PYTHONPATH}
COPY Pipfile Pipfile.lock /tmp/
RUN pyenv install 3.8.2 && \
pyenv global 3.8.2 && \
pyenv rehash && \
pip install --no-cache-dir --upgrade pip==20.1.1 && \
pip install --no-cache-dir pipenv==2018.11.26 && \
cd /tmp && \
pipenv install --system --deploy --clear && \
pip uninstall -y pipenv && \
pip install --no-cache-dir \
matplotlib==3.1.1 \
dictdiffer==0.8.0 \
fastcluster==1.1.25 \
aenum==2.2.1 \
lru-dict==1.1.6 \
scipy==1.4.1 \
tenacity==5.1.1 \
azure-common==1.1.23 \
azure-nspkg==3.0.2 \
azure-storage-blob==2.1.0 \
azure-storage-common==2.1.0 \
azure-storage-nspkg==3.1.0 \
pycurl==7.43.0.3 \
coverage==5.1 \
pre-commit==2.4.0 \
parameterized==0.7.4
RUN mkdir -p /tmp/openpilot
COPY ./flake8_openpilot.sh /tmp/openpilot/
COPY ./pylint_openpilot.sh /tmp/openpilot/
COPY ./.pylintrc /tmp/openpilot/
COPY ./.coveragerc-app /tmp/openpilot/
COPY SConstruct \
.pylintrc \
.pre-commit-config.yaml \
/tmp/openpilot/
COPY ./pyextra /tmp/openpilot/pyextra
COPY ./phonelibs /tmp/openpilot/phonelibs
COPY ./external /tmp/openpilot/external
COPY ./laika /tmp/openpilot/laika
COPY ./laika_repo /tmp/openpilot/laika_repo
COPY ./rednose /tmp/openpilot/rednose
COPY ./tools /tmp/openpilot/tools
COPY ./release /tmp/openpilot/release
COPY ./common /tmp/openpilot/common
COPY ./opendbc /tmp/openpilot/opendbc
COPY ./cereal /tmp/openpilot/cereal
COPY ./panda /tmp/openpilot/panda
COPY ./rednose /tmp/openpilot/rednose
COPY ./selfdrive /tmp/openpilot/selfdrive
COPY SConstruct /tmp/openpilot/SConstruct
RUN mkdir -p /tmp/openpilot/selfdrive/test/out
RUN cd /tmp/openpilot && scons -j$(nproc)

146
Jenkinsfile vendored

@ -1,25 +1,147 @@
pipeline {
agent {
docker {
image 'python:3.7.3'
args '--user=root'
}
def phone(String ip, String step_label, String cmd) {
def ci_env = "CI=1 TEST_DIR=${env.TEST_DIR} GIT_BRANCH=${env.GIT_BRANCH} GIT_COMMIT=${env.GIT_COMMIT}"
withCredentials([file(credentialsId: 'id_rsa_public', variable: 'key_file')]) {
sh label: step_label,
script: """
ssh -tt -o StrictHostKeyChecking=no -i ${key_file} -p 8022 root@${ip} '${ci_env} /usr/bin/bash -le' <<'EOF'
echo \$\$ > /dev/cpuset/app/tasks || true
echo \$PPID > /dev/cpuset/app/tasks || true
mkdir -p /dev/shm
chmod 777 /dev/shm
cd ${env.TEST_DIR} || true
${cmd}
exit 0
EOF"""
}
}
def phone_steps(String device_type, steps) {
lock(resource: "", label: device_type, inversePrecedence: true, variable: 'device_ip', quantity: 1) {
timeout(time: 60, unit: 'MINUTES') {
phone(device_ip, "kill old processes", "pkill -f comma || true")
phone(device_ip, "git checkout", readFile("selfdrive/test/setup_device_ci.sh"),)
steps.each { item ->
phone(device_ip, item[0], item[1])
}
}
}
}
pipeline {
agent none
environment {
COMMA_JWT = credentials('athena-test-jwt')
TEST_DIR = "/data/openpilot"
}
stages {
stage('EON Build/Test') {
stage('Release Build') {
agent {
docker {
image 'python:3.7.3'
args '--user=root'
}
}
when {
branch 'devel-staging'
}
steps {
lock(resource: "", label: 'eon', inversePrecedence: true, variable: 'eon_name', quantity: 1){
timeout(time: 30, unit: 'MINUTES') {
dir(path: 'release') {
sh 'pip install paramiko'
sh 'python remote_build.py'
phone_steps("eon-build", [
["build release2-staging and dashcam-staging", "cd release && PUSH=1 ./build_release2.sh"],
])
}
}
stage('openpilot tests') {
when {
not {
anyOf {
branch 'master-ci'; branch 'devel'; branch 'devel-staging'; branch 'release2'; branch 'release2-staging'; branch 'dashcam'; branch 'dashcam-staging'
}
}
}
stages {
/*
stage('PC tests') {
agent {
dockerfile {
filename 'Dockerfile.openpilot'
args '--privileged --shm-size=1G --user=root'
}
}
stages {
stage('Build') {
steps {
sh 'scons -j$(nproc)'
}
}
}
post {
always {
// fix permissions since docker runs as another user
sh "chmod -R 777 ."
}
}
}
*/
stage('On-device Tests') {
agent {
docker {
image 'python:3.7.3'
args '--user=root'
}
}
stages {
stage('parallel tests') {
parallel {
stage('Devel Build') {
environment {
CI_PUSH = "${env.BRANCH_NAME == 'master' ? 'master-ci' : ' '}"
}
steps {
phone_steps("eon", [
["build devel", "cd release && CI_PUSH=${env.CI_PUSH} ./build_devel.sh"],
["test openpilot", "nosetests -s selfdrive/test/test_openpilot.py"],
["test cpu usage", "cd selfdrive/test/ && ./test_cpu_usage.py"],
["test car interfaces", "cd selfdrive/car/tests/ && ./test_car_interfaces.py"],
["test spinner build", "cd selfdrive/ui/spinner && make clean && make"],
["test text window build", "cd selfdrive/ui/text && make clean && make"],
])
}
}
stage('Replay Tests') {
steps {
phone_steps("eon2", [
["camerad/modeld replay", "cd selfdrive/test/process_replay && ./camera_replay.py"],
])
}
}
stage('HW + Unit Tests') {
steps {
phone_steps("eon", [
["build cereal", "SCONS_CACHE=1 scons -j4 cereal/"],
["test sounds", "nosetests -s selfdrive/test/test_sounds.py"],
["test boardd loopback", "nosetests -s selfdrive/boardd/tests/test_boardd_loopback.py"],
//["test updater", "python installer/updater/test_updater.py"],
])
}
}
}
}
}
}
}
}
}

@ -6,20 +6,18 @@ verify_ssl = true
[dev-packages]
opencv-python= "*"
ipython = "*"
networkx = "==2.3"
networkx = "~=2.3"
azure-core = "*"
azure-common = "*"
azure-nspkg = "==3.0.2"
azure-storage-blob = "==2.1.0"
azure-storage-common = "==2.1.0"
azure-storage-nspkg = "==3.1.0"
azure-nspkg = "~=3.0"
azure-storage-blob = "~=2.1"
azure-storage-common = "~=2.1"
azure-storage-nspkg = "~=3.1"
boto = "*"
"boto3" = "*"
control = "*"
datadog = "*"
dlib = "*"
elasticsearch = "*"
pycocotools = {git = "https://github.com/cocodataset/cocoapi.git",subdirectory = "PythonAPI"}
gunicorn = "*"
"h5py" = "*"
hexdump = "*"
@ -41,9 +39,9 @@ redis = "*"
"s2sphere" = "*"
"subprocess32" = "*"
tenacity = "*"
tensorflow-gpu = "==2.2.0"
tensorflow = "==2.2"
keras_applications = "*"
PyMySQL = "==0.9.2"
PyMySQL = "~=0.9"
Werkzeug = "*"
"backports.lzma" = "*"
Flask-Cors = "*"
@ -69,9 +67,12 @@ paramiko = "*"
aiohttp = "*"
lru-dict = "*"
scikit-image = "*"
pygame = "==2.0.0.dev6"
pygame = "==2.0.0.dev8"
pprofile = "*"
pyprof2calltree = "*"
pre-commit = "*"
mypy = "*"
parameterized = "*"
[packages]
atomicwrites = "*"
@ -90,7 +91,7 @@ requests = "*"
setproctitle = "*"
six = "*"
smbus2 = "*"
sympy = "*"
sympy = "!=1.6.1"
tqdm = "*"
Cython = "*"
PyYAML = "*"
@ -107,10 +108,10 @@ pylint = "*"
pillow = "*"
scons = "*"
cysignals = "*"
pycryptodome = "*"
"Jinja2" = "*"
PyJWT = "*"
pycryptodome = "*"
"Jinja2" = "*"
PyJWT = "*"
pyserial = "*"
[requires]
python_version = "3.8.2"
python_version = "3.8"

2164
Pipfile.lock generated

File diff suppressed because it is too large Load Diff

@ -66,8 +66,8 @@ Supported Cars
| ----------| ------------------------------| ------------------| -----------------| -------------------| ------------------|
| Acura | ILX 2016-18 | AcuraWatch Plus | openpilot | 25mph<sup>1</sup> | 25mph |
| Acura | RDX 2016-18 | AcuraWatch Plus | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Accord 2018-19 | All | Stock | 0mph | 3mph |
| Honda | Accord Hybrid 2018-19 | All | Stock | 0mph | 3mph |
| Honda | Accord 2018-20 | All | Stock | 0mph | 3mph |
| Honda | Accord Hybrid 2018-20 | All | Stock | 0mph | 3mph |
| Honda | Civic Hatchback 2017-19 | Honda Sensing | Stock | 0mph | 12mph |
| Honda | Civic Sedan/Coupe 2016-18 | Honda Sensing | openpilot | 0mph | 12mph |
| Honda | Civic Sedan/Coupe 2019-20 | Honda Sensing | Stock | 0mph | 2mph<sup>2</sup> |
@ -76,12 +76,13 @@ Supported Cars
| Honda | CR-V Hybrid 2017-2019 | Honda Sensing | Stock | 0mph | 12mph |
| Honda | Fit 2018-19 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | HR-V 2019 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Insight 2019 | Honda Sensing | Stock | 0mph | 3mph |
| Honda | Insight 2019-20 | Honda Sensing | Stock | 0mph | 3mph |
| Honda | Odyssey 2018-20 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 0mph |
| Honda | Passport 2019 | All | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Pilot 2016-18 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Pilot 2019 | All | openpilot | 25mph<sup>1</sup> | 12mph |
| Honda | Ridgeline 2017-20 | Honda Sensing | openpilot | 25mph<sup>1</sup> | 12mph |
| Hyundai | Sonata 2020 | All | Stock | 0mph | 0mph |
| Lexus | CT Hybrid 2017-18 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | ES 2019 | All | openpilot | 0mph | 0mph |
| Lexus | ES Hybrid 2019 | All | openpilot | 0mph | 0mph |
@ -91,6 +92,7 @@ Supported Cars
| Lexus | RX 2016-17 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | RX 2020 | All | openpilot | 0mph | 0mph |
| Lexus | RX Hybrid 2016-19 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Lexus | RX Hybrid 2020 | All | openpilot | 0mph | 0mph |
| Toyota | Avalon 2016 | TSS-P | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
| Toyota | Avalon 2017-18 | All | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
| Toyota | Camry 2018-20 | All | Stock | 0mph<sup>4</sup> | 0mph |
@ -106,7 +108,7 @@ Supported Cars
| Toyota | Highlander 2020 | All | openpilot | 0mph | 0mph |
| Toyota | Highlander Hybrid 2020 | All | openpilot | 0mph | 0mph |
| Toyota | Prius 2016 | TSS-P | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Prius 2017-19 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Prius 2017-20 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Prius Prime 2017-20 | All | Stock<sup>3</sup>| 0mph | 0mph |
| Toyota | Rav4 2016 | TSS-P | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
| Toyota | Rav4 2017-18 | All | Stock<sup>3</sup>| 20mph<sup>1</sup> | 0mph |
@ -116,9 +118,9 @@ Supported Cars
| Toyota | Rav4 Hybrid 2019-20 | All | openpilot | 0mph | 0mph |
| Toyota | Sienna 2018-20 | All | Stock<sup>3</sup>| 0mph | 0mph |
<sup>1</sup>[Comma Pedal](https://community.comma.ai/wiki/index.php/Comma_Pedal) is used to provide stop-and-go capability to some of the openpilot-supported cars that don't currently support stop-and-go. Here is how to [build a Comma Pedal](https://medium.com/@jfrux/comma-pedal-building-with-macrofab-6328bea791e8). ***NOTE: The Comma Pedal is not officially supported by [comma](https://comma.ai).*** <br />
<sup>1</sup>[Comma Pedal](https://github.com/commaai/openpilot/wiki/comma-pedal) is used to provide stop-and-go capability to some of the openpilot-supported cars that don't currently support stop-and-go. ***NOTE: The Comma Pedal is not officially supported by [comma](https://comma.ai).*** <br />
<sup>2</sup>2019 Honda Civic 1.6L Diesel Sedan does not have ALC below 12mph. <br />
<sup>3</sup>When disconnecting the Driver Support Unit (DSU), openpilot ACC will replace stock ACC. For DSU locations, see [Toyota Wiki page](https://community.comma.ai/wiki/index.php/Toyota). ***NOTE: disconnecting the DSU disables Automatic Emergency Braking (AEB).*** <br />
<sup>3</sup>When disconnecting the Driver Support Unit (DSU), openpilot ACC will replace stock ACC. ***NOTE: disconnecting the DSU disables Automatic Emergency Braking (AEB).*** <br />
<sup>4</sup>28mph for Camry 4CYL L, 4CYL LE and 4CYL SE which don't have Full-Speed Range Dynamic Radar Cruise Control. <br />
Community Maintained Cars and Features
@ -134,35 +136,39 @@ Community Maintained Cars and Features
| Chrysler | Pacifica 2020 | Adaptive Cruise | Stock | 0mph | 39mph |
| Chrysler | Pacifica Hybrid 2017-18 | Adaptive Cruise | Stock | 0mph | 9mph |
| Chrysler | Pacifica Hybrid 2019-20 | Adaptive Cruise | Stock | 0mph | 39mph |
| Genesis | G80 2018<sup>2</sup> | All | Stock | 0mph | 0mph |
| Genesis | G90 2018<sup>2</sup> | All | Stock | 0mph | 0mph |
| GMC | Acadia Denali 2018<sup>3</sup>| Adaptive Cruise | openpilot | 0mph | 7mph |
| Genesis | G70 2018 | All | Stock | 0mph | 0mph |
| Genesis | G80 2018 | All | Stock | 0mph | 0mph |
| Genesis | G90 2018 | All | Stock | 0mph | 0mph |
| GMC | Acadia Denali 2018<sup>2</sup>| Adaptive Cruise | openpilot | 0mph | 7mph |
| Holden | Astra 2017<sup>1</sup> | Adaptive Cruise | openpilot | 0mph | 7mph |
| Hyundai | Elantra 2017-19<sup>2</sup> | SCC + LKAS | Stock | 19mph | 34mph |
| Hyundai | Genesis 2015-16<sup>2</sup> | SCC + LKAS | Stock | 19mph | 37mph |
| Hyundai | Ioniq 2017<sup>2</sup> | SCC + LKAS | Stock | 0mph | 32mph |
| Hyundai | Ioniq 2019 EV<sup>2</sup> | SCC + LKAS | Stock | 0mph | 32mph |
| Hyundai | Kona 2017-19<sup>2</sup> | SCC + LKAS | Stock | 22mph | 0mph |
| Hyundai | Kona 2019 EV<sup>2</sup> | SCC + LKAS | Stock | 0mph | 0mph |
| Hyundai | Palisade 2020<sup>2</sup> | All | Stock | 0mph | 0mph |
| Hyundai | Santa Fe 2019<sup>2</sup> | All | Stock | 0mph | 0mph |
| Hyundai | Sonata 2020<sup>2</sup> | All | Stock | 0mph | 0mph |
| Hyundai | Elantra 2017-19 | SCC + LKAS | Stock | 19mph | 34mph |
| Hyundai | Genesis 2015-16 | SCC + LKAS | Stock | 19mph | 37mph |
| Hyundai | Ioniq Electric Premium SE 2020| SCC + LKAS | Stock | 0mph | 32mph |
| Hyundai | Ioniq Electric Limited 2019 | SCC + LKAS | Stock | 0mph | 32mph |
| Hyundai | Kona 2020 | SCC + LKAS | Stock | 0mph | 0mph |
| Hyundai | Kona EV 2019 | SCC + LKAS | Stock | 0mph | 0mph |
| Hyundai | Palisade 2020 | All | Stock | 0mph | 0mph |
| Hyundai | Santa Fe 2019 | All | Stock | 0mph | 0mph |
| Hyundai | Sonata 2019 | All | Stock | 0mph | 0mph |
| Hyundai | Veloster 2019 | SCC + LKAS | Stock | 5mph | 0mph |
| Jeep | Grand Cherokee 2016-18 | Adaptive Cruise | Stock | 0mph | 9mph |
| Jeep | Grand Cherokee 2019 | Adaptive Cruise | Stock | 0mph | 39mph |
| Kia | Forte 2018<sup>2</sup> | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Optima 2017<sup>2</sup> | SCC + LKAS/LDWS | Stock | 0mph | 32mph |
| Kia | Optima 2019<sup>2</sup> | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Sorento 2018<sup>2</sup> | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Stinger 2018<sup>2</sup> | SCC + LKAS | Stock | 0mph | 0mph |
| Nissan | Leaf 2019 | Propilot | Stock | 0mph | 0mph |
| Nissan | X-Trail 2018 | Propilot | Stock | 0mph | 0mph |
| Jeep | Grand Cherokee 2019-20 | Adaptive Cruise | Stock | 0mph | 39mph |
| Kia | Forte 2018-19 | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Optima 2017 | SCC + LKAS/LDWS | Stock | 0mph | 32mph |
| Kia | Optima 2019 | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Sorento 2018 | SCC + LKAS | Stock | 0mph | 0mph |
| Kia | Stinger 2018 | SCC + LKAS | Stock | 0mph | 0mph |
| Nissan | Leaf 2018-19 | Propilot | Stock | 0mph | 0mph |
| Nissan | Rogue 2019 | Propilot | Stock | 0mph | 0mph |
| Nissan | X-Trail 2017 | Propilot | Stock | 0mph | 0mph |
| Subaru | Ascent 2019 | EyeSight | Stock | 0mph | 0mph |
| Subaru | Crosstrek 2018-19 | EyeSight | Stock | 0mph | 0mph |
| Subaru | Impreza 2018-20 | EyeSight | Stock | 0mph | 0mph |
| Volkswagen| Golf 2016-19<sup>3</sup> | Driver Assistance | Stock | 0mph | 0mph |
| Subaru | Forester 2019 | EyeSight | Stock | 0mph | 0mph |
| Subaru | Impreza 2017-19 | EyeSight | Stock | 0mph | 0mph |
| Volkswagen| Golf 2015-19 | Driver Assistance | Stock | 0mph | 0mph |
<sup>1</sup>Requires a [panda](https://comma.ai/shop/products/panda-obd-ii-dongle) and [community built giraffe](https://zoneos.com/volt/). ***NOTE: disconnecting the ASCM disables Automatic Emergency Braking (AEB).*** <br />
<sup>2</sup>Requires a [panda](https://comma.ai/shop/products/panda-obd-ii-dongle) and open sourced [Hyundai giraffe](https://github.com/commaai/neo/tree/master/giraffe/hyundai), designed for the 2019 Sante Fe; pinout may differ for other Hyundai and Kia models. <br />
<sup>3</sup>Requires a [custom connector](https://community.comma.ai/wiki/index.php/Volkswagen#Integration_at_R242_Camera) for the [car harness](https://comma.ai/shop/products/car-harness) <br />
<sup>1</sup>Requires an [OBD-II car harness](https://comma.ai/shop/products/comma-car-harness) and [community built giraffe](https://github.com/commaai/openpilot/wiki/GM). ***NOTE: disconnecting the ASCM disables Automatic Emergency Braking (AEB).*** <br />
<sup>2</sup>Requires a custom connector for the developer [car harness](https://comma.ai/shop/products/car-harness) <br />
Although it's not upstream, there's a community of people getting openpilot to run on Tesla's [here](https://tinkla.us/)
@ -175,7 +181,7 @@ Installation Instructions
Install openpilot on an EON or comma two by entering ``https://openpilot.comma.ai`` during the installer setup.
Follow these [video instructions](https://youtu.be/3nlkomHathI) to properly mount the device on the windshield. Note: openpilot features an automatic pose calibration routine and openpilot performance should not be affected by small pitch and yaw misalignments caused by imprecise device mounting.
Follow these [video instructions](https://youtu.be/lcjqxCymins) to properly mount the device on the windshield. Note: openpilot features an automatic pose calibration routine and openpilot performance should not be affected by small pitch and yaw misalignments caused by imprecise device mounting.
Before placing the device on your windshield, check the state and local laws and ordinances where you drive. Some state laws prohibit or restrict the placement of objects on the windshield of a motor vehicle.
@ -278,7 +284,7 @@ openpilot is developed by [comma](https://comma.ai/) and by users like you. We w
You can add support for your car by following guides we have written for [Brand](https://medium.com/@comma_ai/how-to-write-a-car-port-for-openpilot-7ce0785eda84) and [Model](https://medium.com/@comma_ai/openpilot-port-guide-for-toyota-models-e5467f4b5fe6) ports. Generally, a car with adaptive cruise control and lane keep assist is a good candidate. [Join our Discord](https://discord.comma.ai) to discuss car ports: most car makes have a dedicated channel.
Want to get paid to work on openpilot? [comma is hiring](https://comma.ai/jobs/). We also have a [bounty program](https://comma.ai/bounties.html).
Want to get paid to work on openpilot? [comma is hiring](https://comma.ai/jobs/).
And [follow us on Twitter](https://twitter.com/comma_ai).
@ -328,6 +334,7 @@ NO WARRANTY EXPRESSED OR IMPLIED.**
<img src="https://d1qb2nb5cznatu.cloudfront.net/startups/i/1061157-bc7e9bf3b246ece7322e6ffe653f6af8-medium_jpg.jpg?buster=1458363130" width="75"></img> <img src="https://cdn-images-1.medium.com/max/1600/1*C87EjxGeMPrkTuVRVWVg4w.png" width="225"></img>
[![openpilot tests](https://github.com/commaai/openpilot/workflows/openpilot%20tests/badge.svg?event=push)](https://github.com/commaai/openpilot/actions)
[![Total alerts](https://img.shields.io/lgtm/alerts/g/commaai/openpilot.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/commaai/openpilot/alerts/)
[![Language grade: Python](https://img.shields.io/lgtm/grade/python/g/commaai/openpilot.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/commaai/openpilot/context:python)
[![Language grade: C/C++](https://img.shields.io/lgtm/grade/cpp/g/commaai/openpilot.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/commaai/openpilot/context:cpp)

@ -1,3 +1,40 @@
Version 0.7.9 (2020-XX-XX)
========================
* Improved car battery power management
Version 0.7.8 (2020-08-19)
========================
* New driver monitoring model: improved face detection and better compatibility with sunglasses
* Download NEOS operating system updates in the background
* Improved updater reliability and responsiveness
* Hyundai Kona 2020, Veloster 2019, and Genesis G70 2018 support thanks to xps-genesis!
Version 0.7.7 (2020-07-20)
========================
* White panda is no longer supported, upgrade to comma two or black panda
* Improved vehicle model estimation using high precision localizer
* Improved thermal management on comma two
* Improved autofocus for road-facing camera
* Improved noise performance for driver-facing camera
* Block lane change start using blindspot monitor on select Toyota, Hyundai, and Subaru
* Fix GM ignition detection
* Code cleanup and smaller release sizes
* Hyundai Sonata 2020 promoted to officially supported car
* Hyundai Ioniq Electric Limited 2019 and Ioniq SE 2020 support thanks to baldwalker!
* Subaru Forester 2019 and Ascent 2019 support thanks to martinl!
Version 0.7.6.1 (2020-06-16)
========================
* Hotfix: update kernel on some comma twos (orders #8570-#8680)
Version 0.7.6 (2020-06-05)
========================
* White panda is deprecated, upgrade to comma two or black panda
* 2017 Nissan X-Trail, 2018-19 Leaf and 2019 Rogue support thanks to avolmensky!
* 2017 Mazda CX-5 support in dashcam mode thanks to Jafaral!
* Huge CPU savings in modeld by using thneed!
* Lots of code cleanup and refactors
Version 0.7.5 (2020-05-13)
========================
* Right-Hand Drive support for both driving and driver monitoring!
@ -5,7 +42,6 @@ Version 0.7.5 (2020-05-13)
* New driver monitoring model: overall improvement on comma two
* Driver camera preview in settings to improve mounting position
* Added support for many Hyundai, Kia, Genesis models thanks to xx979xx!
* 2019 Nissan X-Trail and 2018 Nissan Leaf support thanks to avolmensky!
* Improved lateral tuning for 2020 Toyota Rav 4 (hybrid)
Version 0.7.4 (2020-03-20)

@ -1,7 +1,7 @@
openpilot Safety
======
openpilot is an Adaptive Cruise Control (ACC) and Automated Lane Centering (ALC) system.
openpilot is an Adaptive Cruise Control (ACC) and Automated Lane Centering (ALC) system.
Like other ACC and ALC systems, openpilot is a failsafe passive system and it requires the
driver to be alert and to pay attention at all times.
@ -22,7 +22,7 @@ hardware-in-the-loop and in-vehicle tests before each software release.
Following Hazard and Risk Analysis and FMEA, at a very high level, we have designed openpilot
ensuring two main safety requirements.
1. The driver must always be capable to immediately retake manual control of the vehicle,
1. The driver must always be capable to immediately retake manual control of the vehicle,
by stepping on either pedal or by pressing the cancel button.
2. The vehicle must not alter its trajectory too quickly for the driver to safely
react. This means that while the system is engaged, the actuators are constrained

@ -1,4 +1,7 @@
import Cython
import distutils
import os
import shutil
import subprocess
import sys
import platform
@ -11,6 +14,10 @@ AddOption('--asan',
action='store_true',
help='turn on ASAN')
# Rebuild cython extensions if python, distutils, or cython change
cython_dependencies = [Value(v) for v in (sys.version, distutils.__version__, Cython.__version__)]
Export('cython_dependencies')
arch = subprocess.check_output(["uname", "-m"], encoding='utf8').rstrip()
if platform.system() == "Darwin":
arch = "Darwin"
@ -18,6 +25,7 @@ if arch == "aarch64" and not os.path.isdir("/system"):
arch = "larch64"
webcam = bool(ARGUMENTS.get("use_webcam", 0))
QCOM_REPLAY = arch == "aarch64" and os.getenv("QCOM_REPLAY") is not None
if arch == "aarch64" or arch == "larch64":
lenv = {
@ -43,20 +51,31 @@ if arch == "aarch64" or arch == "larch64":
]
if arch == "larch64":
libpath += ["#phonelibs/snpe/larch64"]
libpath += ["#phonelibs/libyuv/larch64/lib"]
libpath += ["/usr/lib/aarch64-linux-gnu"]
libpath += [
"#phonelibs/snpe/larch64",
"#phonelibs/libyuv/larch64/lib",
"/usr/lib/aarch64-linux-gnu"
]
cflags = ["-DQCOM2", "-mcpu=cortex-a57"]
cxxflags = ["-DQCOM2", "-mcpu=cortex-a57"]
rpath = ["/usr/local/lib"]
else:
libpath += ["#phonelibs/snpe/aarch64"]
libpath += ["#phonelibs/libyuv/lib"]
libpath += [
"#phonelibs/snpe/aarch64",
"#phonelibs/libyuv/lib"
]
cflags = ["-DQCOM", "-mcpu=cortex-a57"]
cxxflags = ["-DQCOM", "-mcpu=cortex-a57"]
rpath = ["/system/vendor/lib64"]
if QCOM_REPLAY:
cflags += ["-DQCOM_REPLAY"]
cxxflags += ["-DQCOM_REPLAY"]
else:
cflags = []
cxxflags = []
lenv = {
"PATH": "#external/bin:" + os.environ['PATH'],
}
@ -72,6 +91,8 @@ else:
"/usr/local/lib",
"/System/Library/Frameworks/OpenGL.framework/Libraries",
]
cflags += ["-DGL_SILENCE_DEPRECATION"]
cxxflags += ["-DGL_SILENCE_DEPRECATION"]
else:
libpath = [
"#phonelibs/snpe/x86_64-linux-clang",
@ -84,18 +105,20 @@ else:
]
rpath = [
"external/tensorflow/lib",
"cereal",
"selfdrive/common"]
"external/tensorflow/lib",
"cereal",
"selfdrive/common"
]
# allows shared libraries to work globally
rpath = [os.path.join(os.getcwd(), x) for x in rpath]
cflags = []
cxxflags = []
ccflags_asan = ["-fsanitize=address", "-fno-omit-frame-pointer"] if GetOption('asan') else []
ldflags_asan = ["-fsanitize=address"] if GetOption('asan') else []
if GetOption('asan'):
ccflags_asan = ["-fsanitize=address", "-fno-omit-frame-pointer"]
ldflags_asan = ["-fsanitize=address"]
else:
ccflags_asan = []
ldflags_asan = []
# change pythonpath to this
lenv["PYTHONPATH"] = Dir("#").path
@ -106,11 +129,10 @@ env = Environment(
"-g",
"-fPIC",
"-O2",
"-Werror=implicit-function-declaration",
"-Werror=incompatible-pointer-types",
"-Werror=int-conversion",
"-Werror=return-type",
"-Werror=format-extra-args",
"-Wunused",
"-Werror",
"-Wno-deprecated-register",
"-Wno-inconsistent-missing-override",
] + cflags + ccflags_asan,
CPPPATH=cpppath + [
@ -134,6 +156,7 @@ env = Environment(
"#selfdrive/camerad/include",
"#selfdrive/loggerd/include",
"#selfdrive/modeld",
"#selfdrive/ui",
"#cereal/messaging",
"#cereal",
"#opendbc/can",
@ -147,16 +170,73 @@ env = Environment(
CFLAGS=["-std=gnu11"] + cflags,
CXXFLAGS=["-std=c++14"] + cxxflags,
LIBPATH=libpath +
[
LIBPATH=libpath + [
"#cereal",
"#selfdrive/common",
"#phonelibs",
]
)
qt_env = None
if arch in ["x86_64", "Darwin", "larch64"]:
qt_env = env.Clone()
if arch == "larch64":
qt_env['QTDIR'] = "/usr/local/Qt-5.15.0"
QT_BASE = "/usr/local/Qt-5.15.0/"
qt_dirs = [
QT_BASE + "include/",
QT_BASE + "include/QtWidgets",
QT_BASE + "include/QtGui",
QT_BASE + "include/QtCore",
QT_BASE + "include/QtDBus",
]
qt_env["RPATH"] += [QT_BASE + "lib"]
elif arch == "Darwin":
qt_env['QTDIR'] = "/usr/local/opt/qt"
QT_BASE = "/usr/local/opt/qt/"
qt_dirs = [
QT_BASE + "include/",
QT_BASE + "include/QtWidgets",
QT_BASE + "include/QtGui",
QT_BASE + "include/QtCore",
QT_BASE + "include/QtDBus",
]
qt_env["LINKFLAGS"] += ["-F" + QT_BASE + "lib"]
else:
qt_dirs = [
f"/usr/include/{arch}-linux-gnu/qt5",
f"/usr/include/{arch}-linux-gnu/qt5/QtWidgets",
f"/usr/include/{arch}-linux-gnu/qt5/QtGui",
f"/usr/include/{arch}-linux-gnu/qt5/QtCore",
f"/usr/include/{arch}-linux-gnu/qt5/QtDBus",
]
qt_env.Tool('qt')
qt_env['CPPPATH'] += qt_dirs
qt_flags = [
"-D_REENTRANT",
"-DQT_NO_DEBUG",
"-DQT_WIDGETS_LIB",
"-DQT_GUI_LIB",
"-DQT_CORE_LIB"
]
qt_env['CXXFLAGS'] += qt_flags
if os.environ.get('SCONS_CACHE'):
CacheDir('/tmp/scons_cache')
cache_dir = '/tmp/scons_cache'
if os.getenv('CI'):
branch = os.getenv('GIT_BRANCH')
if QCOM_REPLAY:
cache_dir = '/tmp/scons_cache_qcom_replay'
elif branch is not None and branch != 'master':
cache_dir_branch = '/tmp/scons_cache_' + branch
if not os.path.isdir(cache_dir_branch) and os.path.isdir(cache_dir):
shutil.copytree(cache_dir, cache_dir_branch)
cache_dir = cache_dir_branch
CacheDir(cache_dir)
node_interval = 5
node_count = 0
@ -181,7 +261,7 @@ def abspath(x):
# still needed for apks
zmq = 'zmq'
Export('env', 'arch', 'zmq', 'SHARED', 'webcam')
Export('env', 'qt_env', 'arch', 'zmq', 'SHARED', 'webcam', 'QCOM_REPLAY')
# cereal and messaging are shared with the system
SConscript(['cereal/SConscript'])
@ -209,11 +289,11 @@ SConscript(['opendbc/can/SConscript'])
SConscript(['common/SConscript'])
SConscript(['common/kalman/SConscript'])
SConscript(['common/transformations/SConscript'])
SConscript(['phonelibs/SConscript'])
if arch != "Darwin":
SConscript(['selfdrive/camerad/SConscript'])
SConscript(['selfdrive/modeld/SConscript'])
SConscript(['selfdrive/camerad/SConscript'])
SConscript(['selfdrive/modeld/SConscript'])
SConscript(['selfdrive/controls/lib/cluster/SConscript'])
SConscript(['selfdrive/controls/lib/lateral_mpc/SConscript'])
@ -223,15 +303,18 @@ SConscript(['selfdrive/controls/lib/longitudinal_mpc_model/SConscript'])
SConscript(['selfdrive/boardd/SConscript'])
SConscript(['selfdrive/proclogd/SConscript'])
#SConscript(['selfdrive/ui/SConscript'])
SConscript(['selfdrive/loggerd/SConscript'])
SConscript(['selfdrive/locationd/SConscript'])
SConscript(['selfdrive/locationd/models/SConscript'])
if arch == "aarch64":
SConscript(['selfdrive/logcatd/SConscript'])
SConscript(['selfdrive/sensord/SConscript'])
SConscript(['selfdrive/clocksd/SConscript'])
else:
SConscript(['tools/lib/index_log/SConscript'])
if arch != "larch64":
SConscript(['selfdrive/ui/SConscript'])

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:d7b79038dccaa97d84bd38544573d3a52929770b76a259b25a27311464230e22
size 13732809
oid sha256:a198491887ed6029bffdf7f4dc28c4f9a6ba5f9d2235710fc11a1378893491d7
size 13702777

@ -1 +0,0 @@
Subproject commit f5d2c1715c9482d898062110ce4c612093aa5d4f

@ -1,6 +1,6 @@
Import('env')
Import('env', 'cython_dependencies')
# parser
env.Command(['common_pyx.so'],
['common_pyx_setup.py', 'clock.pyx'],
"cd common && python3 common_pyx_setup.py build_ext --inplace")
# Build cython clock module
env.Command(['common_pyx.so', 'clock.cpp'],
cython_dependencies + ['common_pyx_setup.py', 'clock.pyx'],
"cd common && python3 common_pyx_setup.py build_ext --inplace")

@ -12,6 +12,10 @@ NetworkStrength = log.ThermalData.NetworkStrength
ANDROID = os.path.isfile('/EON')
def get_sound_card_online():
return (os.path.isfile('/proc/asound/card0/state') and
open('/proc/asound/card0/state').read().strip() == 'ONLINE')
def getprop(key):
if not ANDROID:
return ""
@ -22,10 +26,10 @@ def get_imei(slot):
if slot not in ("0", "1"):
raise ValueError("SIM slot must be 0 or 1")
ret = parse_service_call_string(service_call(["iphonesubinfo", "3" ,"i32", str(slot)]))
ret = parse_service_call_string(service_call(["iphonesubinfo", "3" , "i32", str(slot)]))
if not ret:
# allow non android to be identified differently
ret = "%015d" % random.randint(0, 1<<32)
ret = "%015d" % random.randint(0, 1 << 32)
return ret
def get_serial():
@ -47,10 +51,10 @@ def reboot(reason=None):
reason_args = ["s16", reason]
subprocess.check_output([
"service", "call", "power", "16", # IPowerManager.reboot
"i32", "0", # no confirmation,
"service", "call", "power", "16", # IPowerManager.reboot
"i32", "0", # no confirmation,
*reason_args,
"i32", "1" # wait
"i32", "1" # wait
])
def service_call(call):
@ -71,7 +75,7 @@ def parse_service_call_unpack(r, fmt):
def parse_service_call_string(r):
try:
r = r[8:] # Cut off length field
r = r[8:] # Cut off length field
r = r.decode('utf_16_be')
# All pairs of two characters seem to be swapped. Not sure why
@ -132,6 +136,7 @@ def get_network_type():
def get_network_strength(network_type):
network_strength = NetworkStrength.unknown
# from SignalStrength.java
def get_lte_level(rsrp, rssnr):
INT_MAX = 2147483647

@ -38,5 +38,4 @@ def api_get(endpoint, method='GET', timeout=None, access_token=None, **params):
headers['User-Agent'] = "openpilot-" + version
return requests.request(method, backend+endpoint, timeout=timeout, headers = headers, params=params)
return requests.request(method, backend+endpoint, timeout=timeout, headers=headers, params=params)

@ -13,7 +13,7 @@ def get_installed_apks():
ret = {}
for x in dat:
if x.startswith("package:"):
v,k = x.split("package:")[1].split("=")
v, k = x.split("package:")[1].split("=")
ret[k] = v
return ret

@ -8,4 +8,3 @@ if ANDROID:
else:
PERSIST = os.path.join(BASEDIR, "persist")
PARAMS = os.path.join(BASEDIR, "persist", "params")

@ -1,3 +0,0 @@
# py2,3 compatiblity helpers
basestring = (str, bytes)

@ -3,13 +3,17 @@ import shutil
import tempfile
from atomicwrites import AtomicWriter
def mkdirs_exists_ok(path):
if path.startswith('http://') or path.startswith('https://'):
raise ValueError('URL path')
try:
os.makedirs(path)
except OSError:
if not os.path.isdir(path):
raise
def rm_not_exists_ok(path):
try:
os.remove(path)
@ -17,12 +21,14 @@ def rm_not_exists_ok(path):
if os.path.exists(path):
raise
def rm_tree_or_link(path):
if os.path.islink(path):
os.unlink(path)
elif os.path.isdir(path):
shutil.rmtree(path)
def get_tmpdir_on_same_filesystem(path):
normpath = os.path.normpath(path)
parts = normpath.split("/")
@ -32,6 +38,7 @@ def get_tmpdir_on_same_filesystem(path):
return "/{}/runner/tmp".format(parts[1])
return "/tmp"
class AutoMoveTempdir():
def __init__(self, target_path, temp_dir=None):
self._target_path = target_path
@ -44,14 +51,16 @@ class AutoMoveTempdir():
def close(self):
os.rename(self._path, self._target_path)
def __enter__(self): return self
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
if type is None:
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
self.close()
else:
shutil.rmtree(self._path)
class NamedTemporaryDir():
def __init__(self, temp_dir=None):
self._path = tempfile.mkdtemp(dir=temp_dir)
@ -63,11 +72,13 @@ class NamedTemporaryDir():
def close(self):
shutil.rmtree(self._path)
def __enter__(self): return self
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def _get_fileobject_func(writer, temp_dir):
def _get_fileobject():
file_obj = writer.get_fileobject(dir=temp_dir)
@ -75,6 +86,7 @@ def _get_fileobject_func(writer, temp_dir):
return file_obj
return _get_fileobject
def atomic_write_on_fs_tmp(path, **kwargs):
"""Creates an atomic writer using a temporary file in a temporary directory
on the same filesystem as path.
@ -92,6 +104,7 @@ def atomic_write_in_dir(path, **kwargs):
writer = AtomicWriter(path, **kwargs)
return writer._open(_get_fileobject_func(writer, os.path.dirname(path)))
def atomic_write_in_dir_neos(path, contents, mode=None):
"""
Atomically writes contents to path using a temporary file in the same directory

@ -1,6 +1,6 @@
Import('env')
Import('env', 'cython_dependencies')
env.Command(['simple_kalman_impl.so'],
['simple_kalman_impl.pyx', 'simple_kalman_impl.pxd', 'simple_kalman_setup.py'],
"cd common/kalman && python3 simple_kalman_setup.py build_ext --inplace")
cython_dependencies + ['simple_kalman_impl.pyx', 'simple_kalman_impl.pxd', 'simple_kalman_setup.py'],
"cd common/kalman && python3 simple_kalman_setup.py build_ext --inplace")

@ -8,7 +8,7 @@ class KF1D:
def __init__(self, x0, A, C, K):
self.x = x0
self.A = A
self.C = C
self.C = np.atleast_2d(C)
self.K = K
self.A_K = self.A - np.dot(self.K, self.C)

@ -21,10 +21,10 @@ class TestSimpleKalman(unittest.TestCase):
K0_0 = 0.12287673
K1_0 = 0.29666309
self.kf_old = KF1D_old(x0=np.matrix([[x0_0], [x1_0]]),
A=np.matrix([[A0_0, A0_1], [A1_0, A1_1]]),
C=np.matrix([C0_0, C0_1]),
K=np.matrix([[K0_0], [K1_0]]))
self.kf_old = KF1D_old(x0=np.array([[x0_0], [x1_0]]),
A=np.array([[A0_0, A0_1], [A1_0, A1_1]]),
C=np.array([C0_0, C0_1]),
K=np.array([[K0_0], [K1_0]]))
self.kf = KF1D(x0=[[x0_0], [x1_0]],
A=[[A0_0, A0_1], [A1_0, A1_1]],
@ -47,9 +47,8 @@ class TestSimpleKalman(unittest.TestCase):
x = self.kf.update(v_wheel)
# Compare the output x, verify that the error is less than 1e-4
self.assertAlmostEqual(x_old[0], x[0])
self.assertAlmostEqual(x_old[1], x[1])
np.testing.assert_almost_equal(x_old[0], x[0])
np.testing.assert_almost_equal(x_old[1], x[1])
def test_new_is_faster(self):
setup = """
@ -70,10 +69,10 @@ C0_1 = 0.0
K0_0 = 0.12287673
K1_0 = 0.29666309
kf_old = KF1D_old(x0=np.matrix([[x0_0], [x1_0]]),
A=np.matrix([[A0_0, A0_1], [A1_0, A1_1]]),
C=np.matrix([C0_0, C0_1]),
K=np.matrix([[K0_0], [K1_0]]))
kf_old = KF1D_old(x0=np.array([[x0_0], [x1_0]]),
A=np.array([[A0_0, A0_1], [A1_0, A1_1]]),
C=np.array([C0_0, C0_1]),
K=np.array([[K0_0], [K1_0]]))
kf = KF1D(x0=[[x0_0], [x1_0]],
A=[[A0_0, A0_1], [A1_0, A1_1]],

@ -68,8 +68,11 @@ class SwagErrorFilter(logging.Filter):
def filter(self, record):
return record.levelno < logging.ERROR
_tmpfunc = lambda: 0
_srcfile = os.path.normcase(_tmpfunc.__code__.co_filename)
def _tmpfunc():
return 0
def _srcfile():
return os.path.normcase(_tmpfunc.__code__.co_filename)
class SwagLogger(logging.Logger):
def __init__(self):
@ -112,9 +115,6 @@ class SwagLogger(logging.Logger):
if args:
evt['args'] = args
evt.update(kwargs)
ctx = self.get_ctx()
if ctx:
evt['ctx'] = self.get_ctx()
if 'error' in kwargs:
self.error(evt)
else:
@ -140,7 +140,9 @@ class SwagLogger(logging.Logger):
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename == _srcfile:
# TODO: is this pylint exception correct?
if filename == _srcfile: # pylint: disable=comparison-with-callable
f = f.f_back
continue
sinfo = None

@ -1,50 +0,0 @@
def cputime_total(ct):
return ct.cpuUser + ct.cpuSystem + ct.cpuChildrenUser + ct.cpuChildrenSystem
def print_cpu_usage(first_proc, last_proc):
r = 0
procs = [
("selfdrive.controls.controlsd", 59.46),
("./_modeld", 48.94),
("./loggerd", 28.49),
("selfdrive.controls.plannerd", 19.77),
("selfdrive.controls.radard", 9.54),
("./_ui", 9.54),
("./camerad", 7.07),
("selfdrive.locationd.locationd", 7.13),
("./_sensord", 6.17),
("selfdrive.controls.dmonitoringd", 5.48),
("./boardd", 3.63),
("./_dmonitoringmodeld", 2.67),
("selfdrive.logmessaged", 2.71),
("selfdrive.thermald", 2.41),
("./paramsd", 2.18),
("selfdrive.locationd.calibrationd", 1.76),
("./proclogd", 1.54),
("./_gpsd", 0.09),
("./clocksd", 0.02),
("./ubloxd", 0.02),
("selfdrive.tombstoned", 0),
("./logcatd", 0),
("selfdrive.updated", 0),
]
dt = (last_proc.logMonoTime - first_proc.logMonoTime) / 1e9
print("------------------------------------------------")
for proc_name, normal_cpu_usage in procs:
try:
first = [p for p in first_proc.procLog.procs if proc_name in p.cmdline][0]
last = [p for p in last_proc.procLog.procs if proc_name in p.cmdline][0]
cpu_time = cputime_total(last) - cputime_total(first)
cpu_usage = cpu_time / dt * 100.
if cpu_usage > max(normal_cpu_usage * 1.1, normal_cpu_usage + 5.0):
print(f"Warning {proc_name} using more CPU than normal")
r = 1
print(f"{proc_name.ljust(35)} {cpu_usage:.2f}%")
except IndexError:
print(f"{proc_name.ljust(35)} NO METRICS FOUND")
print("------------------------------------------------")
return r

@ -6,6 +6,7 @@ def clip(x, lo, hi):
def interp(x, xp, fp):
N = len(xp)
def get_interp(xv):
hi = 0
while hi < N and xv > xp[hi]:
@ -14,8 +15,8 @@ def interp(x, xp, fp):
return fp[-1] if hi == N and xv > xp[low] else (
fp[0] if hi == 0 else
(xv - xp[low]) * (fp[hi] - fp[low]) / (xp[hi] - xp[low]) + fp[low])
return [get_interp(v) for v in x] if hasattr(
x, '__iter__') else get_interp(x)
return [get_interp(v) for v in x] if hasattr(x, '__iter__') else get_interp(x)
def mean(x):
return sum(x) / len(x)

@ -22,10 +22,7 @@ file in place without messing with <params_dir>/d.
"""
import time
import os
import string
import binascii
import errno
import sys
import shutil
import fcntl
import tempfile
@ -33,6 +30,7 @@ import threading
from enum import Enum
from common.basedir import PARAMS
def mkdirs_exists_ok(path):
try:
os.makedirs(path)
@ -55,6 +53,7 @@ keys = {
"AccessToken": [TxType.CLEAR_ON_MANAGER_START],
"AthenadPid": [TxType.PERSISTENT],
"CalibrationParams": [TxType.PERSISTENT],
"CarBatteryCapacity": [TxType.PERSISTENT],
"CarParams": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"CarParamsCache": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"CarVin": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
@ -62,6 +61,7 @@ keys = {
"CompletedTrainingVersion": [TxType.PERSISTENT],
"ControlsParams": [TxType.PERSISTENT],
"DisablePowerDown": [TxType.PERSISTENT],
"DisableUpdates": [TxType.PERSISTENT],
"DoUninstall": [TxType.CLEAR_ON_MANAGER_START],
"DongleId": [TxType.PERSISTENT],
"GitBranch": [TxType.PERSISTENT],
@ -81,6 +81,7 @@ keys = {
"IsUploadRawEnabled": [TxType.PERSISTENT],
"LastAthenaPingTime": [TxType.PERSISTENT],
"LastUpdateTime": [TxType.PERSISTENT],
"LastUpdateException": [TxType.PERSISTENT],
"LimitSetSpeed": [TxType.PERSISTENT],
"LimitSetSpeedNeural": [TxType.PERSISTENT],
"LiveParameters": [TxType.PERSISTENT],
@ -108,6 +109,8 @@ keys = {
"Offroad_PandaFirmwareMismatch": [TxType.CLEAR_ON_MANAGER_START, TxType.CLEAR_ON_PANDA_DISCONNECT],
"Offroad_InvalidTime": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_IsTakingSnapshot": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_NeosUpdate": [TxType.CLEAR_ON_MANAGER_START],
"Offroad_UpdateFailed": [TxType.CLEAR_ON_MANAGER_START],
}
@ -146,6 +149,10 @@ class DBAccessor():
def get(self, key):
self._check_entered()
if self._vals is None:
return None
try:
return self._vals[key]
except KeyError:
@ -198,7 +205,8 @@ class DBReader(DBAccessor):
finally:
lock.release()
def __exit__(self, type, value, traceback): pass
def __exit__(self, exc_type, exc_value, traceback):
pass
class DBWriter(DBAccessor):
@ -223,14 +231,14 @@ class DBWriter(DBAccessor):
os.chmod(self._path, 0o777)
self._lock = self._get_lock(True)
self._vals = self._read_values_locked()
except:
except Exception:
os.umask(self._prev_umask)
self._prev_umask = None
raise
return self
def __exit__(self, type, value, traceback):
def __exit__(self, exc_type, exc_value, traceback):
self._check_entered()
try:
@ -304,34 +312,37 @@ def read_db(params_path, key):
except IOError:
return None
def write_db(params_path, key, value):
if isinstance(value, str):
value = value.encode('utf8')
prev_umask = os.umask(0)
lock = FileLock(params_path+"/.lock", True)
lock = FileLock(params_path + "/.lock", True)
lock.acquire()
try:
tmp_path = tempfile.mktemp(prefix=".tmp", dir=params_path)
with open(tmp_path, "wb") as f:
tmp_path = tempfile.NamedTemporaryFile(mode="wb", prefix=".tmp", dir=params_path, delete=False)
with tmp_path as f:
f.write(value)
f.flush()
os.fsync(f.fileno())
os.chmod(tmp_path.name, 0o666)
path = "%s/d/%s" % (params_path, key)
os.rename(tmp_path, path)
os.rename(tmp_path.name, path)
fsync_dir(os.path.dirname(path))
finally:
os.umask(prev_umask)
lock.release()
class Params():
def __init__(self, db=PARAMS):
self.db = db
# create the database if it doesn't exist...
if not os.path.exists(self.db+"/d"):
if not os.path.exists(self.db + "/d"):
with self.transaction(write=True):
pass
@ -401,22 +412,3 @@ def put_nonblocking(key, val):
t = threading.Thread(target=f, args=(key, val))
t.start()
return t
if __name__ == "__main__":
params = Params()
if len(sys.argv) > 2:
params.put(sys.argv[1], sys.argv[2])
else:
for k in keys:
pp = params.get(k)
if pp is None:
print("%s is None" % k)
elif all(chr(c) in string.printable for c in pp):
print("%s = %s" % (k, pp))
else:
print("%s = %s" % (k, binascii.hexlify(pp)))
# Test multiprocess:
# seq 0 100000 | xargs -P20 -I{} python common/params.py DongleId {} && sleep 0.05
# while python common/params.py DongleId; do sleep 0.05; done

@ -43,4 +43,3 @@ class Profiler():
else:
print("%30s: %9.2f percent: %3.0f" % (n, ms*1000.0, ms/self.tot*100))
print("Iter clock: %2.6f TOTAL: %2.2f" % (self.tot/self.iter, self.tot))

@ -6,6 +6,7 @@ import subprocess
import multiprocessing
from cffi import FFI
from common.android import ANDROID
from common.common_pyx import sec_since_boot # pylint: disable=no-name-in-module, import-error
@ -20,11 +21,7 @@ ffi = FFI()
ffi.cdef("long syscall(long number, ...);")
libc = ffi.dlopen(None)
def set_realtime_priority(level):
if os.getuid() != 0:
print("not setting priority, not root")
return
def _get_tid():
if platform.machine() == "x86_64":
NR_gettid = 186
elif platform.machine() == "aarch64":
@ -32,8 +29,25 @@ def set_realtime_priority(level):
else:
raise NotImplementedError
tid = libc.syscall(NR_gettid)
return subprocess.call(['chrt', '-f', '-p', str(level), str(tid)])
return libc.syscall(NR_gettid)
def set_realtime_priority(level):
if os.getuid() != 0:
print("not setting priority, not root")
return
return subprocess.call(['chrt', '-f', '-p', str(level), str(_get_tid())])
def set_core_affinity(core):
if os.getuid() != 0:
print("not setting affinity, not root")
return
if ANDROID:
return subprocess.call(['taskset', '-p', str(core), str(_get_tid())])
else:
return -1
class Ratekeeper():

@ -4,14 +4,17 @@ from common.basedir import BASEDIR
class Spinner():
def __init__(self):
try:
self.spinner_proc = subprocess.Popen(["./spinner"],
stdin=subprocess.PIPE,
cwd=os.path.join(BASEDIR, "selfdrive", "ui", "spinner"),
close_fds=True)
except OSError:
self.spinner_proc = None
def __init__(self, noop=False):
# spinner is currently only implemented for android
self.spinner_proc = None
if not noop:
try:
self.spinner_proc = subprocess.Popen(["./spinner"],
stdin=subprocess.PIPE,
cwd=os.path.join(BASEDIR, "selfdrive", "ui", "spinner"),
close_fds=True)
except OSError:
self.spinner_proc = None
def __enter__(self):
return self
@ -36,27 +39,10 @@ class Spinner():
def __del__(self):
self.close()
def __exit__(self, type, value, traceback):
def __exit__(self, exc_type, exc_value, traceback):
self.close()
class FakeSpinner():
def __init__(self):
pass
def __enter__(self):
return self
def update(self, _):
pass
def close(self):
pass
def __exit__(self, type, value, traceback):
pass
if __name__ == "__main__":
import time
with Spinner() as s:

@ -32,7 +32,7 @@ class RunningStat():
self.S_last = 0.
else:
self.M = self.M_last + (new_data - self.M_last) / self.n
self.S = self.S_last + (new_data - self.M_last) * (new_data - self.M);
self.S = self.S_last + (new_data - self.M_last) * (new_data - self.M)
self.M_last = self.M
self.S_last = self.S
@ -64,7 +64,7 @@ class RunningStatFilter():
_std_last = self.raw_stat.std()
self.raw_stat.push_data(new_data)
_delta_std = self.raw_stat.std() - _std_last
if _delta_std<=0:
if _delta_std <= 0:
self.filtered_stat.push_data(new_data)
else:
pass

@ -3,4 +3,4 @@ def replace_right(s, old, new, occurrence):
# replace_right('1232425', '2', ' ', 2) -> '123 4 5'
split = s.rsplit(old, occurrence)
return new.join(split)
return new.join(split)

@ -1,9 +0,0 @@
import os
from nose.tools import nottest
def phone_only(x):
if os.path.isfile("/init.qcom.rc"):
return x
else:
return nottest(x)

@ -1,6 +1,5 @@
import numpy as np
import unittest
import timeit
from common.numpy_fast import interp

@ -1,10 +1,12 @@
from common.params import Params, UnknownKeyName
import os
import threading
import time
import tempfile
import shutil
import stat
import unittest
from common.params import Params, UnknownKeyName
class TestParams(unittest.TestCase):
def setUp(self):
@ -58,6 +60,12 @@ class TestParams(unittest.TestCase):
with self.assertRaises(UnknownKeyName):
self.params.get("swag")
def test_params_permissions(self):
permissions = stat.S_IRUSR | stat.S_IWUSR | stat.S_IRGRP | stat.S_IWGRP | stat.S_IROTH | stat.S_IWOTH
self.params.put("DongleId", "cb38263377b873ee")
st_mode = os.stat(f"{self.tmpdir}/d/DongleId").st_mode
assert (st_mode & permissions) == permissions
if __name__ == "__main__":
unittest.main()

@ -6,20 +6,22 @@ from common.basedir import BASEDIR
class TextWindow():
def __init__(self, s):
try:
self.text_proc = subprocess.Popen(["./text", s],
stdin=subprocess.PIPE,
cwd=os.path.join(BASEDIR, "selfdrive", "ui", "text"),
close_fds=True)
except OSError:
self.text_proc = None
def __init__(self, s, noop=False):
# text window is only implemented for android currently
self.text_proc = None
if not noop:
try:
self.text_proc = subprocess.Popen(["./text", s],
stdin=subprocess.PIPE,
cwd=os.path.join(BASEDIR, "selfdrive", "ui", "text"),
close_fds=True)
except OSError:
self.text_proc = None
def get_status(self):
if self.text_proc is not None:
self.text_proc.poll()
return self.text_proc.returncode
return None
def __enter__(self):
@ -31,38 +33,19 @@ class TextWindow():
self.text_proc = None
def wait_for_exit(self):
while True:
if self.get_status() == 1:
return
time.sleep(0.1)
if self.text_proc is not None:
while True:
if self.get_status() == 1:
return
time.sleep(0.1)
def __del__(self):
self.close()
def __exit__(self, type, value, traceback):
def __exit__(self, exc_type, exc_value, traceback):
self.close()
class FakeTextWindow():
def __init__(self, s):
pass
def get_status(self):
return 1
def wait_for_exit(self):
return
def __enter__(self):
return self
def update(self, _):
pass
def __exit__(self, type, value, traceback):
pass
if __name__ == "__main__":
text = """Traceback (most recent call last):
File "./controlsd.py", line 608, in <module>

@ -25,4 +25,3 @@ class Timeout:
def __exit__(self, exc_type, exc_val, exc_tb):
signal.alarm(0)

@ -0,0 +1,2 @@
transformations
transformations.cpp

@ -25,14 +25,14 @@ by generating a rotation matrix and multiplying.
Orientation Conventations
------
Quaternions, rotation matrices and euler angles are three
Quaternions, rotation matrices and euler angles are three
equivalent representations of orientation and all three are
used throughout the code base.
For euler angles the preferred convention is [roll, pitch, yaw]
which corresponds to rotations around the [x, y, z] axes. All
euler angles should always be in radians or radians/s unless
for plotting or display purposes. For quaternions the hamilton
for plotting or display purposes. For quaternions the hamilton
notations is preferred which is [q<sub>w</sub>, q<sub>x</sub>, q<sub>y</sub>, q<sub>z</sub>]. All quaternions
should always be normalized with a strictly positive q<sub>w</sub>. **These
quaternions are a unique representation of orientation whereas euler angles
@ -45,11 +45,16 @@ while rotating around the rotated axes, not the original axes.
Calibration
------
EONs are not all mounted in the exact same way. To compensate for the effects of this the vision model takes in an image that is "calibrated". This means the image is aligned so the direction of travel of the car when it is going straight and the road is flat is always in the location on the image. This calibration is defined by a pitch and yaw angle that describe the direction of travel vector in device frame.
Device frame is aligned with the road-facing camera used by openpilot. However, when controlling the vehicle it makes more sense to think in a reference frame aligned with the vehicle. These two reference frames are not necessarily aligned. Calibration is defined as the roll, pitch and yaw angles that describe the orientation of the vehicle in device frame. The vehicle orientation is the orientation of the vehicles's body, the orientation of the vehicle can change relative to the road because of suspension movements.
The roll of the vehicle is defined to be 0 when the vehicle is on a flat road and not turning. Pitch and yaw are defined as the angles that describe the direction in which the vehicle travels when it is driving on a flat road and not turning.
It is important for openpilot's driving model to take in images that look as if the calibration angles were all zero. To achieve this the images input into the model are transformed with the estimated calibration angles. At the moment, roll calibration is always estimated to be zero.
Example
------
To transform global Mesh3D positions and orientations (positions_ecef, quats_ecef) into the local frame described by the
To transform global Mesh3D positions and orientations (positions_ecef, quats_ecef) into the local frame described by the
first position and orientation from Mesh3D one would do:
```
ecef_from_local = rot_from_quat(quats_ecef[0])

@ -0,0 +1,8 @@
Import('env', 'cython_dependencies')
d = Dir('.')
env.Command(['transformations.so'],
cython_dependencies + ['transformations.pxd', 'transformations.pyx',
'coordinates.cc', 'orientation.cc', 'coordinates.hpp', 'orientation.hpp'],
'cd ' + d.path + ' && python3 setup.py build_ext --inplace')

@ -52,6 +52,13 @@ def get_view_frame_from_road_frame(roll, pitch, yaw, height):
return np.hstack((view_from_road, [[0], [height], [0]]))
# aka 'extrinsic_matrix'
def get_view_frame_from_calib_frame(roll, pitch, yaw, height):
device_from_calib= orient.rot_from_euler([roll, pitch, yaw])
view_from_calib = view_frame_from_device_frame.dot(device_from_calib)
return np.hstack((view_from_calib, [[0], [height], [0]]))
def vp_from_ke(m):
"""
Computes the vanishing point from the product of the intrinsic and extrinsic
@ -59,7 +66,7 @@ def vp_from_ke(m):
The vanishing point is defined as lim x->infinity C (x, 0, 0, 1).T
"""
return (m[0, 0]/m[2,0], m[1,0]/m[2,0])
return (m[0, 0]/m[2, 0], m[1, 0]/m[2, 0])
def vp_from_rpy(rpy):
@ -81,10 +88,10 @@ def normalize(img_pts, intrinsics=eon_intrinsics):
img_pts = np.array(img_pts)
input_shape = img_pts.shape
img_pts = np.atleast_2d(img_pts)
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0],1))))
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0], 1))))
img_pts_normalized = img_pts.dot(intrinsics_inv.T)
img_pts_normalized[(img_pts < 0).any(axis=1)] = np.nan
return img_pts_normalized[:,:2].reshape(input_shape)
return img_pts_normalized[:, :2].reshape(input_shape)
def denormalize(img_pts, intrinsics=eon_intrinsics):
@ -93,13 +100,13 @@ def denormalize(img_pts, intrinsics=eon_intrinsics):
img_pts = np.array(img_pts)
input_shape = img_pts.shape
img_pts = np.atleast_2d(img_pts)
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0],1))))
img_pts = np.hstack((img_pts, np.ones((img_pts.shape[0], 1))))
img_pts_denormalized = img_pts.dot(intrinsics.T)
img_pts_denormalized[img_pts_denormalized[:,0] > W] = np.nan
img_pts_denormalized[img_pts_denormalized[:,0] < 0] = np.nan
img_pts_denormalized[img_pts_denormalized[:,1] > H] = np.nan
img_pts_denormalized[img_pts_denormalized[:,1] < 0] = np.nan
return img_pts_denormalized[:,:2].reshape(input_shape)
img_pts_denormalized[img_pts_denormalized[:, 0] > W] = np.nan
img_pts_denormalized[img_pts_denormalized[:, 0] < 0] = np.nan
img_pts_denormalized[img_pts_denormalized[:, 1] > H] = np.nan
img_pts_denormalized[img_pts_denormalized[:, 1] < 0] = np.nan
return img_pts_denormalized[:, :2].reshape(input_shape)
def device_from_ecef(pos_ecef, orientation_ecef, pt_ecef):
@ -124,10 +131,10 @@ def img_from_device(pt_device):
pt_view = np.einsum('jk,ik->ij', view_frame_from_device_frame, pt_device)
# This function should never return negative depths
pt_view[pt_view[:,2] < 0] = np.nan
pt_view[pt_view[:, 2] < 0] = np.nan
pt_img = pt_view/pt_view[:,2:3]
return pt_img.reshape(input_shape)[:,:2]
pt_img = pt_view/pt_view[:, 2:3]
return pt_img.reshape(input_shape)[:, :2]
def get_camera_frame_from_calib_frame(camera_frame_from_road_frame):
@ -145,4 +152,3 @@ def pretransform_from_calib(calib):
camera_frame_from_road_frame = np.dot(eon_intrinsics, view_frame_from_road_frame)
camera_frame_from_calib_frame = get_camera_frame_from_calib_frame(camera_frame_from_road_frame)
return np.linalg.inv(camera_frame_from_calib_frame)

@ -0,0 +1,104 @@
#define _USE_MATH_DEFINES
#include <iostream>
#include <cmath>
#include <eigen3/Eigen/Dense>
#include "coordinates.hpp"
#define DEG2RAD(x) ((x) * M_PI / 180.0)
#define RAD2DEG(x) ((x) * 180.0 / M_PI)
double a = 6378137; // lgtm [cpp/short-global-name]
double b = 6356752.3142; // lgtm [cpp/short-global-name]
double esq = 6.69437999014 * 0.001; // lgtm [cpp/short-global-name]
double e1sq = 6.73949674228 * 0.001;
static Geodetic to_degrees(Geodetic geodetic){
geodetic.lat = RAD2DEG(geodetic.lat);
geodetic.lon = RAD2DEG(geodetic.lon);
return geodetic;
}
static Geodetic to_radians(Geodetic geodetic){
geodetic.lat = DEG2RAD(geodetic.lat);
geodetic.lon = DEG2RAD(geodetic.lon);
return geodetic;
}
ECEF geodetic2ecef(Geodetic g){
g = to_radians(g);
double xi = sqrt(1.0 - esq * pow(sin(g.lat), 2));
double x = (a / xi + g.alt) * cos(g.lat) * cos(g.lon);
double y = (a / xi + g.alt) * cos(g.lat) * sin(g.lon);
double z = (a / xi * (1.0 - esq) + g.alt) * sin(g.lat);
return {x, y, z};
}
Geodetic ecef2geodetic(ECEF e){
// Convert from ECEF to geodetic using Ferrari's methods
// https://en.wikipedia.org/wiki/Geographic_coordinate_conversion#Ferrari.27s_solution
double x = e.x;
double y = e.y;
double z = e.z;
double r = sqrt(x * x + y * y);
double Esq = a * a - b * b;
double F = 54 * b * b * z * z;
double G = r * r + (1 - esq) * z * z - esq * Esq;
double C = (esq * esq * F * r * r) / (pow(G, 3));
double S = cbrt(1 + C + sqrt(C * C + 2 * C));
double P = F / (3 * pow((S + 1 / S + 1), 2) * G * G);
double Q = sqrt(1 + 2 * esq * esq * P);
double r_0 = -(P * esq * r) / (1 + Q) + sqrt(0.5 * a * a*(1 + 1.0 / Q) - P * (1 - esq) * z * z / (Q * (1 + Q)) - 0.5 * P * r * r);
double U = sqrt(pow((r - esq * r_0), 2) + z * z);
double V = sqrt(pow((r - esq * r_0), 2) + (1 - esq) * z * z);
double Z_0 = b * b * z / (a * V);
double h = U * (1 - b * b / (a * V));
double lat = atan((z + e1sq * Z_0) / r);
double lon = atan2(y, x);
return to_degrees({lat, lon, h});
}
LocalCoord::LocalCoord(Geodetic g, ECEF e){
init_ecef << e.x, e.y, e.z;
g = to_radians(g);
ned2ecef_matrix <<
-sin(g.lat)*cos(g.lon), -sin(g.lon), -cos(g.lat)*cos(g.lon),
-sin(g.lat)*sin(g.lon), cos(g.lon), -cos(g.lat)*sin(g.lon),
cos(g.lat), 0, -sin(g.lat);
ecef2ned_matrix = ned2ecef_matrix.transpose();
}
NED LocalCoord::ecef2ned(ECEF e) {
Eigen::Vector3d ecef;
ecef << e.x, e.y, e.z;
Eigen::Vector3d ned = (ecef2ned_matrix * (ecef - init_ecef));
return {ned[0], ned[1], ned[2]};
}
ECEF LocalCoord::ned2ecef(NED n) {
Eigen::Vector3d ned;
ned << n.n, n.e, n.d;
Eigen::Vector3d ecef = (ned2ecef_matrix * ned) + init_ecef;
return {ecef[0], ecef[1], ecef[2]};
}
NED LocalCoord::geodetic2ned(Geodetic g) {
ECEF e = ::geodetic2ecef(g);
return ecef2ned(e);
}
Geodetic LocalCoord::ned2geodetic(NED n){
ECEF e = ned2ecef(n);
return ::ecef2geodetic(e);
}

@ -0,0 +1,35 @@
#pragma once
struct ECEF {
double x, y, z;
Eigen::Vector3d to_vector(){
return Eigen::Vector3d(x, y, z);
}
};
struct NED {
double n, e, d;
};
struct Geodetic {
double lat, lon, alt;
bool radians=false;
};
ECEF geodetic2ecef(Geodetic g);
Geodetic ecef2geodetic(ECEF e);
class LocalCoord {
public:
Eigen::Matrix3d ned2ecef_matrix;
Eigen::Matrix3d ecef2ned_matrix;
Eigen::Vector3d init_ecef;
LocalCoord(Geodetic g, ECEF e);
LocalCoord(Geodetic g) : LocalCoord(g, ::geodetic2ecef(g)) {}
LocalCoord(ECEF e) : LocalCoord(::ecef2geodetic(e), e) {}
NED ecef2ned(ECEF e);
ECEF ned2ecef(NED n);
NED geodetic2ned(Geodetic g);
Geodetic ned2geodetic(NED n);
};

@ -1,108 +1,19 @@
import numpy as np
"""
Coordinate transformation module. All methods accept arrays as input
with each row as a position.
"""
# pylint: skip-file
from common.transformations.orientation import numpy_wrap
from common.transformations.transformations import (ecef2geodetic_single,
geodetic2ecef_single)
from common.transformations.transformations import LocalCoord as LocalCoord_single
class LocalCoord(LocalCoord_single):
ecef2ned = numpy_wrap(LocalCoord_single.ecef2ned_single, (3,), (3,))
ned2ecef = numpy_wrap(LocalCoord_single.ned2ecef_single, (3,), (3,))
geodetic2ned = numpy_wrap(LocalCoord_single.geodetic2ned_single, (3,), (3,))
ned2geodetic = numpy_wrap(LocalCoord_single.ned2geodetic_single, (3,), (3,))
a = 6378137
b = 6356752.3142
esq = 6.69437999014 * 0.001
e1sq = 6.73949674228 * 0.001
geodetic2ecef = numpy_wrap(geodetic2ecef_single, (3,), (3,))
ecef2geodetic = numpy_wrap(ecef2geodetic_single, (3,), (3,))
def geodetic2ecef(geodetic, radians=False):
geodetic = np.array(geodetic)
input_shape = geodetic.shape
geodetic = np.atleast_2d(geodetic)
ratio = 1.0 if radians else (np.pi / 180.0)
lat = ratio*geodetic[:,0]
lon = ratio*geodetic[:,1]
alt = geodetic[:,2]
xi = np.sqrt(1 - esq * np.sin(lat)**2)
x = (a / xi + alt) * np.cos(lat) * np.cos(lon)
y = (a / xi + alt) * np.cos(lat) * np.sin(lon)
z = (a / xi * (1 - esq) + alt) * np.sin(lat)
ecef = np.array([x, y, z]).T
return ecef.reshape(input_shape)
def ecef2geodetic(ecef, radians=False):
"""
Convert ECEF coordinates to geodetic using ferrari's method
"""
# Save shape and export column
ecef = np.atleast_1d(ecef)
input_shape = ecef.shape
ecef = np.atleast_2d(ecef)
x, y, z = ecef[:, 0], ecef[:, 1], ecef[:, 2]
ratio = 1.0 if radians else (180.0 / np.pi)
# Conver from ECEF to geodetic using Ferrari's methods
# https://en.wikipedia.org/wiki/Geographic_coordinate_conversion#Ferrari.27s_solution
r = np.sqrt(x * x + y * y)
Esq = a * a - b * b
F = 54 * b * b * z * z
G = r * r + (1 - esq) * z * z - esq * Esq
C = (esq * esq * F * r * r) / (pow(G, 3))
S = np.cbrt(1 + C + np.sqrt(C * C + 2 * C))
P = F / (3 * pow((S + 1 / S + 1), 2) * G * G)
Q = np.sqrt(1 + 2 * esq * esq * P)
r_0 = -(P * esq * r) / (1 + Q) + np.sqrt(0.5 * a * a*(1 + 1.0 / Q) - \
P * (1 - esq) * z * z / (Q * (1 + Q)) - 0.5 * P * r * r)
U = np.sqrt(pow((r - esq * r_0), 2) + z * z)
V = np.sqrt(pow((r - esq * r_0), 2) + (1 - esq) * z * z)
Z_0 = b * b * z / (a * V)
h = U * (1 - b * b / (a * V))
lat = ratio*np.arctan((z + e1sq * Z_0) / r)
lon = ratio*np.arctan2(y, x)
# stack the new columns and return to the original shape
geodetic = np.column_stack((lat, lon, h))
return geodetic.reshape(input_shape)
class LocalCoord():
"""
Allows conversions to local frames. In this case NED.
That is: North East Down from the start position in
meters.
"""
def __init__(self, init_geodetic, init_ecef):
self.init_ecef = init_ecef
lat, lon, _ = (np.pi/180)*np.array(init_geodetic)
self.ned2ecef_matrix = np.array([[-np.sin(lat)*np.cos(lon), -np.sin(lon), -np.cos(lat)*np.cos(lon)],
[-np.sin(lat)*np.sin(lon), np.cos(lon), -np.cos(lat)*np.sin(lon)],
[np.cos(lat), 0, -np.sin(lat)]])
self.ecef2ned_matrix = self.ned2ecef_matrix.T
@classmethod
def from_geodetic(cls, init_geodetic):
init_ecef = geodetic2ecef(init_geodetic)
return LocalCoord(init_geodetic, init_ecef)
@classmethod
def from_ecef(cls, init_ecef):
init_geodetic = ecef2geodetic(init_ecef)
return LocalCoord(init_geodetic, init_ecef)
def ecef2ned(self, ecef):
ecef = np.array(ecef)
return np.dot(self.ecef2ned_matrix, (ecef - self.init_ecef).T).T
def ned2ecef(self, ned):
ned = np.array(ned)
# Transpose so that init_ecef will broadcast correctly for 1d or 2d ned.
return (np.dot(self.ned2ecef_matrix, ned.T).T + self.init_ecef)
def geodetic2ned(self, geodetic):
ecef = geodetic2ecef(geodetic)
return self.ecef2ned(ecef)
def ned2geodetic(self, ned):
ecef = self.ned2ecef(ned)
return ecef2geodetic(ecef)
geodetic_from_ecef = ecef2geodetic
ecef_from_geodetic = geodetic2ecef

@ -2,6 +2,7 @@ import numpy as np
from common.transformations.camera import (FULL_FRAME_SIZE, eon_focal_length,
get_view_frame_from_road_frame,
get_view_frame_from_calib_frame,
vp_from_ke)
# segnet
@ -41,6 +42,17 @@ medmodel_intrinsics = np.array(
[ 0. , eon_focal_length / medmodel_zoom, MEDMODEL_CY],
[ 0. , 0. , 1.]])
# CAL model
CALMODEL_INPUT_SIZE = (512, 256)
CALMODEL_YUV_SIZE = (CALMODEL_INPUT_SIZE[0], CALMODEL_INPUT_SIZE[1] * 3 // 2)
CALMODEL_CY = 47.6
calmodel_zoom = 1.5
calmodel_intrinsics = np.array(
[[ eon_focal_length / calmodel_zoom, 0. , 0.5 * CALMODEL_INPUT_SIZE[0]],
[ 0. , eon_focal_length / calmodel_zoom, CALMODEL_CY],
[ 0. , 0. , 1.]])
# BIG model
@ -62,6 +74,9 @@ bigmodel_frame_from_road_frame = np.dot(bigmodel_intrinsics,
medmodel_frame_from_road_frame = np.dot(medmodel_intrinsics,
get_view_frame_from_road_frame(0, 0, 0, model_height))
medmodel_frame_from_calib_frame = np.dot(medmodel_intrinsics,
get_view_frame_from_calib_frame(0, 0, 0, 0))
model_frame_from_bigmodel_frame = np.dot(model_intrinsics, np.linalg.inv(bigmodel_intrinsics))
medmodel_frame_from_bigmodel_frame = np.dot(medmodel_intrinsics, np.linalg.inv(bigmodel_intrinsics))
@ -100,7 +115,7 @@ def get_camera_frame_from_model_frame(camera_frame_from_road_frame, height=model
# This function is super slow, so skip it if height is very close to canonical
# TODO: speed it up!
if abs(height - model_height) > 0.001: #
if abs(height - model_height) > 0.001:
camera_from_model_camera = get_model_height_transform(camera_frame_from_road_frame, height)
else:
camera_from_model_camera = np.eye(3)
@ -130,9 +145,9 @@ def get_camera_frame_from_bigmodel_frame(camera_frame_from_road_frame):
def get_model_frame(snu_full, camera_frame_from_model_frame, size):
idxs = camera_frame_from_model_frame.dot(np.column_stack([np.tile(np.arange(size[0]), size[1]),
np.tile(np.arange(size[1]), (size[0],1)).T.flatten(),
np.tile(np.arange(size[1]), (size[0], 1)).T.flatten(),
np.ones(size[0] * size[1])]).T).T.astype(int)
calib_flat = snu_full[idxs[:,1], idxs[:,0]]
calib_flat = snu_full[idxs[:, 1], idxs[:, 0]]
if len(snu_full.shape) == 3:
calib = calib_flat.reshape((size[1], size[0], 3))
elif len(snu_full.shape) == 2:

@ -0,0 +1,147 @@
#define _USE_MATH_DEFINES
#include <iostream>
#include <cmath>
#include <eigen3/Eigen/Dense>
#include "orientation.hpp"
#include "coordinates.hpp"
Eigen::Quaterniond ensure_unique(Eigen::Quaterniond quat){
if (quat.w() > 0){
return quat;
} else {
return Eigen::Quaterniond(-quat.w(), -quat.x(), -quat.y(), -quat.z());
}
}
Eigen::Quaterniond euler2quat(Eigen::Vector3d euler){
Eigen::Quaterniond q;
q = Eigen::AngleAxisd(euler(2), Eigen::Vector3d::UnitZ())
* Eigen::AngleAxisd(euler(1), Eigen::Vector3d::UnitY())
* Eigen::AngleAxisd(euler(0), Eigen::Vector3d::UnitX());
return ensure_unique(q);
}
Eigen::Vector3d quat2euler(Eigen::Quaterniond quat){
// TODO: switch to eigen implementation if the range of the Euler angles doesn't matter anymore
// Eigen::Vector3d euler = quat.toRotationMatrix().eulerAngles(2, 1, 0);
// return {euler(2), euler(1), euler(0)};
double gamma = atan2(2 * (quat.w() * quat.x() + quat.y() * quat.z()), 1 - 2 * (quat.x()*quat.x() + quat.y()*quat.y()));
double theta = asin(2 * (quat.w() * quat.y() - quat.z() * quat.x()));
double psi = atan2(2 * (quat.w() * quat.z() + quat.x() * quat.y()), 1 - 2 * (quat.y()*quat.y() + quat.z()*quat.z()));
return {gamma, theta, psi};
}
Eigen::Matrix3d quat2rot(Eigen::Quaterniond quat){
return quat.toRotationMatrix();
}
Eigen::Quaterniond rot2quat(Eigen::Matrix3d rot){
return ensure_unique(Eigen::Quaterniond(rot));
}
Eigen::Matrix3d euler2rot(Eigen::Vector3d euler){
return quat2rot(euler2quat(euler));
}
Eigen::Vector3d rot2euler(Eigen::Matrix3d rot){
return quat2euler(rot2quat(rot));
}
Eigen::Matrix3d rot_matrix(double roll, double pitch, double yaw){
return euler2rot({roll, pitch, yaw});
}
Eigen::Matrix3d rot(Eigen::Vector3d axis, double angle){
Eigen::Quaterniond q;
q = Eigen::AngleAxisd(angle, axis);
return q.toRotationMatrix();
}
Eigen::Vector3d ecef_euler_from_ned(ECEF ecef_init, Eigen::Vector3d ned_pose) {
/*
Using Rotations to Build Aerospace Coordinate Systems
Don Koks
https://apps.dtic.mil/dtic/tr/fulltext/u2/a484864.pdf
*/
LocalCoord converter = LocalCoord(ecef_init);
Eigen::Vector3d zero = ecef_init.to_vector();
Eigen::Vector3d x0 = converter.ned2ecef({1, 0, 0}).to_vector() - zero;
Eigen::Vector3d y0 = converter.ned2ecef({0, 1, 0}).to_vector() - zero;
Eigen::Vector3d z0 = converter.ned2ecef({0, 0, 1}).to_vector() - zero;
Eigen::Vector3d x1 = rot(z0, ned_pose(2)) * x0;
Eigen::Vector3d y1 = rot(z0, ned_pose(2)) * y0;
Eigen::Vector3d z1 = rot(z0, ned_pose(2)) * z0;
Eigen::Vector3d x2 = rot(y1, ned_pose(1)) * x1;
Eigen::Vector3d y2 = rot(y1, ned_pose(1)) * y1;
Eigen::Vector3d z2 = rot(y1, ned_pose(1)) * z1;
Eigen::Vector3d x3 = rot(x2, ned_pose(0)) * x2;
Eigen::Vector3d y3 = rot(x2, ned_pose(0)) * y2;
x0 = Eigen::Vector3d(1, 0, 0);
y0 = Eigen::Vector3d(0, 1, 0);
z0 = Eigen::Vector3d(0, 0, 1);
double psi = atan2(x3.dot(y0), x3.dot(x0));
double theta = atan2(-x3.dot(z0), sqrt(pow(x3.dot(x0), 2) + pow(x3.dot(y0), 2)));
y2 = rot(z0, psi) * y0;
z2 = rot(y2, theta) * z0;
double phi = atan2(y3.dot(z2), y3.dot(y2));
return {phi, theta, psi};
}
Eigen::Vector3d ned_euler_from_ecef(ECEF ecef_init, Eigen::Vector3d ecef_pose){
/*
Using Rotations to Build Aerospace Coordinate Systems
Don Koks
https://apps.dtic.mil/dtic/tr/fulltext/u2/a484864.pdf
*/
LocalCoord converter = LocalCoord(ecef_init);
Eigen::Vector3d x0 = Eigen::Vector3d(1, 0, 0);
Eigen::Vector3d y0 = Eigen::Vector3d(0, 1, 0);
Eigen::Vector3d z0 = Eigen::Vector3d(0, 0, 1);
Eigen::Vector3d x1 = rot(z0, ecef_pose(2)) * x0;
Eigen::Vector3d y1 = rot(z0, ecef_pose(2)) * y0;
Eigen::Vector3d z1 = rot(z0, ecef_pose(2)) * z0;
Eigen::Vector3d x2 = rot(y1, ecef_pose(1)) * x1;
Eigen::Vector3d y2 = rot(y1, ecef_pose(1)) * y1;
Eigen::Vector3d z2 = rot(y1, ecef_pose(1)) * z1;
Eigen::Vector3d x3 = rot(x2, ecef_pose(0)) * x2;
Eigen::Vector3d y3 = rot(x2, ecef_pose(0)) * y2;
Eigen::Vector3d zero = ecef_init.to_vector();
x0 = converter.ned2ecef({1, 0, 0}).to_vector() - zero;
y0 = converter.ned2ecef({0, 1, 0}).to_vector() - zero;
z0 = converter.ned2ecef({0, 0, 1}).to_vector() - zero;
double psi = atan2(x3.dot(y0), x3.dot(x0));
double theta = atan2(-x3.dot(z0), sqrt(pow(x3.dot(x0), 2) + pow(x3.dot(y0), 2)));
y2 = rot(z0, psi) * y0;
z2 = rot(y2, theta) * z0;
double phi = atan2(y3.dot(z2), y3.dot(y2));
return {phi, theta, psi};
}
int main(void){
}

@ -0,0 +1,17 @@
#pragma once
#include <eigen3/Eigen/Dense>
#include "coordinates.hpp"
Eigen::Quaterniond ensure_unique(Eigen::Quaterniond quat);
Eigen::Quaterniond euler2quat(Eigen::Vector3d euler);
Eigen::Vector3d quat2euler(Eigen::Quaterniond quat);
Eigen::Matrix3d quat2rot(Eigen::Quaterniond quat);
Eigen::Quaterniond rot2quat(Eigen::Matrix3d rot);
Eigen::Matrix3d euler2rot(Eigen::Vector3d euler);
Eigen::Vector3d rot2euler(Eigen::Matrix3d rot);
Eigen::Matrix3d rot_matrix(double roll, double pitch, double yaw);
Eigen::Matrix3d rot(Eigen::Vector3d axis, double angle);
Eigen::Vector3d ecef_euler_from_ned(ECEF ecef_init, Eigen::Vector3d ned_pose);
Eigen::Vector3d ned_euler_from_ecef(ECEF ecef_init, Eigen::Vector3d ecef_pose);

@ -1,295 +1,52 @@
# pylint: skip-file
import numpy as np
from numpy import dot, inner, array, linalg
from common.transformations.coordinates import LocalCoord
'''
Vectorized functions that transform between
rotation matrices, euler angles and quaternions.
All support lists, array or array of arrays as inputs.
Supports both x2y and y_from_x format (y_from_x preferred!).
'''
def euler2quat(eulers):
eulers = array(eulers)
if len(eulers.shape) > 1:
output_shape = (-1,4)
else:
output_shape = (4,)
eulers = np.atleast_2d(eulers)
gamma, theta, psi = eulers[:,0], eulers[:,1], eulers[:,2]
q0 = np.cos(gamma / 2) * np.cos(theta / 2) * np.cos(psi / 2) + \
np.sin(gamma / 2) * np.sin(theta / 2) * np.sin(psi / 2)
q1 = np.sin(gamma / 2) * np.cos(theta / 2) * np.cos(psi / 2) - \
np.cos(gamma / 2) * np.sin(theta / 2) * np.sin(psi / 2)
q2 = np.cos(gamma / 2) * np.sin(theta / 2) * np.cos(psi / 2) + \
np.sin(gamma / 2) * np.cos(theta / 2) * np.sin(psi / 2)
q3 = np.cos(gamma / 2) * np.cos(theta / 2) * np.sin(psi / 2) - \
np.sin(gamma / 2) * np.sin(theta / 2) * np.cos(psi / 2)
quats = array([q0, q1, q2, q3]).T
for i in range(len(quats)):
if quats[i,0] < 0:
quats[i] = -quats[i]
return quats.reshape(output_shape)
def quat2euler(quats):
quats = array(quats)
if len(quats.shape) > 1:
output_shape = (-1,3)
else:
output_shape = (3,)
quats = np.atleast_2d(quats)
q0, q1, q2, q3 = quats[:,0], quats[:,1], quats[:,2], quats[:,3]
gamma = np.arctan2(2 * (q0 * q1 + q2 * q3), 1 - 2 * (q1**2 + q2**2))
theta = np.arcsin(2 * (q0 * q2 - q3 * q1))
psi = np.arctan2(2 * (q0 * q3 + q1 * q2), 1 - 2 * (q2**2 + q3**2))
eulers = array([gamma, theta, psi]).T
return eulers.reshape(output_shape)
def quat2rot(quats):
quats = array(quats)
input_shape = quats.shape
quats = np.atleast_2d(quats)
Rs = np.zeros((quats.shape[0], 3, 3))
q0 = quats[:, 0]
q1 = quats[:, 1]
q2 = quats[:, 2]
q3 = quats[:, 3]
Rs[:, 0, 0] = q0 * q0 + q1 * q1 - q2 * q2 - q3 * q3
Rs[:, 0, 1] = 2 * (q1 * q2 - q0 * q3)
Rs[:, 0, 2] = 2 * (q0 * q2 + q1 * q3)
Rs[:, 1, 0] = 2 * (q1 * q2 + q0 * q3)
Rs[:, 1, 1] = q0 * q0 - q1 * q1 + q2 * q2 - q3 * q3
Rs[:, 1, 2] = 2 * (q2 * q3 - q0 * q1)
Rs[:, 2, 0] = 2 * (q1 * q3 - q0 * q2)
Rs[:, 2, 1] = 2 * (q0 * q1 + q2 * q3)
Rs[:, 2, 2] = q0 * q0 - q1 * q1 - q2 * q2 + q3 * q3
if len(input_shape) < 2:
return Rs[0]
else:
return Rs
def rot2quat(rots):
input_shape = rots.shape
if len(input_shape) < 3:
rots = array([rots])
K3 = np.empty((len(rots), 4, 4))
K3[:, 0, 0] = (rots[:, 0, 0] - rots[:, 1, 1] - rots[:, 2, 2]) / 3.0
K3[:, 0, 1] = (rots[:, 1, 0] + rots[:, 0, 1]) / 3.0
K3[:, 0, 2] = (rots[:, 2, 0] + rots[:, 0, 2]) / 3.0
K3[:, 0, 3] = (rots[:, 1, 2] - rots[:, 2, 1]) / 3.0
K3[:, 1, 0] = K3[:, 0, 1]
K3[:, 1, 1] = (rots[:, 1, 1] - rots[:, 0, 0] - rots[:, 2, 2]) / 3.0
K3[:, 1, 2] = (rots[:, 2, 1] + rots[:, 1, 2]) / 3.0
K3[:, 1, 3] = (rots[:, 2, 0] - rots[:, 0, 2]) / 3.0
K3[:, 2, 0] = K3[:, 0, 2]
K3[:, 2, 1] = K3[:, 1, 2]
K3[:, 2, 2] = (rots[:, 2, 2] - rots[:, 0, 0] - rots[:, 1, 1]) / 3.0
K3[:, 2, 3] = (rots[:, 0, 1] - rots[:, 1, 0]) / 3.0
K3[:, 3, 0] = K3[:, 0, 3]
K3[:, 3, 1] = K3[:, 1, 3]
K3[:, 3, 2] = K3[:, 2, 3]
K3[:, 3, 3] = (rots[:, 0, 0] + rots[:, 1, 1] + rots[:, 2, 2]) / 3.0
q = np.empty((len(rots), 4))
for i in range(len(rots)):
_, eigvecs = linalg.eigh(K3[i].T)
eigvecs = eigvecs[:,3:]
q[i, 0] = eigvecs[-1]
q[i, 1:] = -eigvecs[:-1].flatten()
if q[i, 0] < 0:
q[i] = -q[i]
if len(input_shape) < 3:
return q[0]
else:
return q
def euler2rot(eulers):
return rotations_from_quats(euler2quat(eulers))
def rot2euler(rots):
return quat2euler(quats_from_rotations(rots))
from common.transformations.transformations import (ecef_euler_from_ned_single,
euler2quat_single,
euler2rot_single,
ned_euler_from_ecef_single,
quat2euler_single,
quat2rot_single,
rot2euler_single,
rot2quat_single)
def numpy_wrap(function, input_shape, output_shape):
"""Wrap a function to take either an input or list of inputs and return the correct shape"""
def f(*inps):
*args, inp = inps
inp = np.array(inp)
shape = inp.shape
if len(shape) == len(input_shape):
out_shape = output_shape
else:
out_shape = (shape[0],) + output_shape
# Add empty dimension if inputs is not a list
if len(shape) == len(input_shape):
inp.shape = (1, ) + inp.shape
result = np.asarray([function(*args, i) for i in inp])
result.shape = out_shape
return result
return f
euler2quat = numpy_wrap(euler2quat_single, (3,), (4,))
quat2euler = numpy_wrap(quat2euler_single, (4,), (3,))
quat2rot = numpy_wrap(quat2rot_single, (4,), (3, 3))
rot2quat = numpy_wrap(rot2quat_single, (3, 3), (4,))
euler2rot = numpy_wrap(euler2rot_single, (3,), (3, 3))
rot2euler = numpy_wrap(rot2euler_single, (3, 3), (3,))
ecef_euler_from_ned = numpy_wrap(ecef_euler_from_ned_single, (3,), (3,))
ned_euler_from_ecef = numpy_wrap(ned_euler_from_ecef_single, (3,), (3,))
quats_from_rotations = rot2quat
quat_from_rot = rot2quat
rotations_from_quats = quat2rot
rot_from_quat= quat2rot
rot_from_quat= quat2rot
rot_from_quat = quat2rot
euler_from_rot = rot2euler
euler_from_quat = quat2euler
rot_from_euler = euler2rot
quat_from_euler = euler2quat
'''
Random helpers below
'''
def quat_product(q, r):
t = np.zeros(4)
t[0] = r[0] * q[0] - r[1] * q[1] - r[2] * q[2] - r[3] * q[3]
t[1] = r[0] * q[1] + r[1] * q[0] - r[2] * q[3] + r[3] * q[2]
t[2] = r[0] * q[2] + r[1] * q[3] + r[2] * q[0] - r[3] * q[1]
t[3] = r[0] * q[3] - r[1] * q[2] + r[2] * q[1] + r[3] * q[0]
return t
def rot_matrix(roll, pitch, yaw):
cr, sr = np.cos(roll), np.sin(roll)
cp, sp = np.cos(pitch), np.sin(pitch)
cy, sy = np.cos(yaw), np.sin(yaw)
rr = array([[1,0,0],[0, cr,-sr],[0, sr, cr]])
rp = array([[cp,0,sp],[0, 1,0],[-sp, 0, cp]])
ry = array([[cy,-sy,0],[sy, cy,0],[0, 0, 1]])
return ry.dot(rp.dot(rr))
def rot(axis, angle):
# Rotates around an arbitrary axis
ret_1 = (1 - np.cos(angle)) * array([[axis[0]**2, axis[0] * axis[1], axis[0] * axis[2]], [
axis[1] * axis[0], axis[1]**2, axis[1] * axis[2]
], [axis[2] * axis[0], axis[2] * axis[1], axis[2]**2]])
ret_2 = np.cos(angle) * np.eye(3)
ret_3 = np.sin(angle) * array([[0, -axis[2], axis[1]], [axis[2], 0, -axis[0]],
[-axis[1], axis[0], 0]])
return ret_1 + ret_2 + ret_3
def ecef_euler_from_ned(ned_ecef_init, ned_pose):
'''
Got it from here:
Using Rotations to Build Aerospace Coordinate Systems
-Don Koks
'''
converter = LocalCoord.from_ecef(ned_ecef_init)
x0 = converter.ned2ecef([1, 0, 0]) - converter.ned2ecef([0, 0, 0])
y0 = converter.ned2ecef([0, 1, 0]) - converter.ned2ecef([0, 0, 0])
z0 = converter.ned2ecef([0, 0, 1]) - converter.ned2ecef([0, 0, 0])
x1 = rot(z0, ned_pose[2]).dot(x0)
y1 = rot(z0, ned_pose[2]).dot(y0)
z1 = rot(z0, ned_pose[2]).dot(z0)
x2 = rot(y1, ned_pose[1]).dot(x1)
y2 = rot(y1, ned_pose[1]).dot(y1)
z2 = rot(y1, ned_pose[1]).dot(z1)
x3 = rot(x2, ned_pose[0]).dot(x2)
y3 = rot(x2, ned_pose[0]).dot(y2)
#z3 = rot(x2, ned_pose[0]).dot(z2)
x0 = array([1, 0, 0])
y0 = array([0, 1, 0])
z0 = array([0, 0, 1])
psi = np.arctan2(inner(x3, y0), inner(x3, x0))
theta = np.arctan2(-inner(x3, z0), np.sqrt(inner(x3, x0)**2 + inner(x3, y0)**2))
y2 = rot(z0, psi).dot(y0)
z2 = rot(y2, theta).dot(z0)
phi = np.arctan2(inner(y3, z2), inner(y3, y2))
ret = array([phi, theta, psi])
return ret
def ned_euler_from_ecef(ned_ecef_init, ecef_poses):
'''
Got the math from here:
Using Rotations to Build Aerospace Coordinate Systems
-Don Koks
Also accepts array of ecef_poses and array of ned_ecef_inits.
Where each row is a pose and an ecef_init.
'''
ned_ecef_init = array(ned_ecef_init)
ecef_poses = array(ecef_poses)
output_shape = ecef_poses.shape
ned_ecef_init = np.atleast_2d(ned_ecef_init)
if ned_ecef_init.shape[0] == 1:
ned_ecef_init = np.tile(ned_ecef_init[0], (output_shape[0], 1))
ecef_poses = np.atleast_2d(ecef_poses)
ned_poses = np.zeros(ecef_poses.shape)
for i, ecef_pose in enumerate(ecef_poses):
converter = LocalCoord.from_ecef(ned_ecef_init[i])
x0 = array([1, 0, 0])
y0 = array([0, 1, 0])
z0 = array([0, 0, 1])
x1 = rot(z0, ecef_pose[2]).dot(x0)
y1 = rot(z0, ecef_pose[2]).dot(y0)
z1 = rot(z0, ecef_pose[2]).dot(z0)
x2 = rot(y1, ecef_pose[1]).dot(x1)
y2 = rot(y1, ecef_pose[1]).dot(y1)
z2 = rot(y1, ecef_pose[1]).dot(z1)
x3 = rot(x2, ecef_pose[0]).dot(x2)
y3 = rot(x2, ecef_pose[0]).dot(y2)
#z3 = rot(x2, ecef_pose[0]).dot(z2)
x0 = converter.ned2ecef([1, 0, 0]) - converter.ned2ecef([0, 0, 0])
y0 = converter.ned2ecef([0, 1, 0]) - converter.ned2ecef([0, 0, 0])
z0 = converter.ned2ecef([0, 0, 1]) - converter.ned2ecef([0, 0, 0])
psi = np.arctan2(inner(x3, y0), inner(x3, x0))
theta = np.arctan2(-inner(x3, z0), np.sqrt(inner(x3, x0)**2 + inner(x3, y0)**2))
y2 = rot(z0, psi).dot(y0)
z2 = rot(y2, theta).dot(z0)
phi = np.arctan2(inner(y3, z2), inner(y3, y2))
ned_poses[i] = array([phi, theta, psi])
return ned_poses.reshape(output_shape)
def ecef2car(car_ecef, psi, theta, points_ecef, ned_converter):
"""
TODO: add roll rotation
Converts an array of points in ecef coordinates into
x-forward, y-left, z-up coordinates
Parameters
----------
psi: yaw, radian
theta: pitch, radian
Returns
-------
[x, y, z] coordinates in car frame
"""
# input is an array of points in ecef cocrdinates
# output is an array of points in car's coordinate (x-front, y-left, z-up)
# convert points to NED
points_ned = []
for p in points_ecef:
points_ned.append(ned_converter.ecef2ned_matrix.dot(array(p) - car_ecef))
points_ned = np.vstack(points_ned).T
# n, e, d -> x, y, z
# Calculate relative postions and rotate wrt to heading and pitch of car
invert_R = array([[1., 0., 0.], [0., -1., 0.], [0., 0., -1.]])
c, s = np.cos(psi), np.sin(psi)
yaw_R = array([[c, s, 0.], [-s, c, 0.], [0., 0., 1.]])
c, s = np.cos(theta), np.sin(theta)
pitch_R = array([[c, 0., -s], [0., 1., 0.], [s, 0., c]])
return dot(pitch_R, dot(yaw_R, dot(invert_R, points_ned)))

@ -0,0 +1,42 @@
import os
import numpy
import sysconfig
from Cython.Build import cythonize
from Cython.Distutils import build_ext
from distutils.core import Extension, setup # pylint: disable=import-error,no-name-in-module
def get_ext_filename_without_platform_suffix(filename):
name, ext = os.path.splitext(filename)
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix == ext:
return filename
ext_suffix = ext_suffix.replace(ext, '')
idx = name.find(ext_suffix)
if idx == -1:
return filename
else:
return name[:idx] + ext
class BuildExtWithoutPlatformSuffix(build_ext):
def get_ext_filename(self, ext_name):
filename = super().get_ext_filename(ext_name)
return get_ext_filename_without_platform_suffix(filename)
setup(
name='Cython transformations wrapper',
cmdclass={'build_ext': BuildExtWithoutPlatformSuffix},
ext_modules=cythonize(
Extension(
"transformations",
sources=["transformations.pyx"],
language="c++",
extra_compile_args=["-std=c++14"],
include_dirs=[numpy.get_include()],
)
))

@ -11,12 +11,6 @@ geodetic_positions = np.array([[37.7610403, -122.4778699, 115],
[15.1392514, 103.6976037, 24],
[24.2302229, 44.2835412, 1650]])
geodetic_positions_radians = np.array([[0.65905448, -2.13764209, 115],
[0.47968789, -1.19706477, 2380],
[0.5670869, -1.98361593, -6],
[0.26422978, 1.80986461, 24],
[0.42289717, 0.7728936, 1650]])
ecef_positions = np.array([[-2711076.55270557, -4259167.14692758, 3884579.87669935],
[ 2068042.69652729, -5273435.40316622, 2927004.89190746],
[-2160412.60461669, -4932588.89873832, 3406542.29652851],
@ -50,7 +44,6 @@ ned_offsets_batch = np.array([[ 53.88103168, 43.83445935, -46.27488057],
[ 78.56272609, 18.53100158, -43.25290759]])
class TestNED(unittest.TestCase):
def test_small_distances(self):
start_geodetic = np.array([33.8042184, -117.888593, 0.0])
@ -72,18 +65,13 @@ class TestNED(unittest.TestCase):
def test_ecef_geodetic(self):
# testing single
np.testing.assert_allclose(ecef_positions[0], coord.geodetic2ecef(geodetic_positions[0]), rtol=1e-9)
np.testing.assert_allclose(geodetic_positions[0,:2], coord.ecef2geodetic(ecef_positions[0])[:2], rtol=1e-9)
np.testing.assert_allclose(geodetic_positions[0,2], coord.ecef2geodetic(ecef_positions[0])[2], rtol=1e-9, atol=1e-4)
np.testing.assert_allclose(geodetic_positions[0, :2], coord.ecef2geodetic(ecef_positions[0])[:2], rtol=1e-9)
np.testing.assert_allclose(geodetic_positions[0, 2], coord.ecef2geodetic(ecef_positions[0])[2], rtol=1e-9, atol=1e-4)
np.testing.assert_allclose(geodetic_positions[:,:2], coord.ecef2geodetic(ecef_positions)[:,:2], rtol=1e-9)
np.testing.assert_allclose(geodetic_positions[:,2], coord.ecef2geodetic(ecef_positions)[:,2], rtol=1e-9, atol=1e-4)
np.testing.assert_allclose(geodetic_positions[:, :2], coord.ecef2geodetic(ecef_positions)[:, :2], rtol=1e-9)
np.testing.assert_allclose(geodetic_positions[:, 2], coord.ecef2geodetic(ecef_positions)[:, 2], rtol=1e-9, atol=1e-4)
np.testing.assert_allclose(ecef_positions, coord.geodetic2ecef(geodetic_positions), rtol=1e-9)
np.testing.assert_allclose(geodetic_positions_radians[0], coord.ecef2geodetic(ecef_positions[0], radians=True), rtol=1e-5)
np.testing.assert_allclose(geodetic_positions_radians[:,:2], coord.ecef2geodetic(ecef_positions, radians=True)[:,:2], rtol=1e-7)
np.testing.assert_allclose(geodetic_positions_radians[:,2], coord.ecef2geodetic(ecef_positions, radians=True)[:,2], rtol=1e-7, atol=1e-4)
def test_ned(self):
for ecef_pos in ecef_positions:
@ -95,11 +83,10 @@ class TestNED(unittest.TestCase):
for geo_pos in geodetic_positions:
converter = coord.LocalCoord.from_geodetic(geo_pos)
geo_pos_moved = geo_pos + np.array([0, 0, 10])
geo_pos_double_converted_moved = converter.ned2geodetic(converter.geodetic2ned(geo_pos) + np.array([0,0,-10]))
geo_pos_double_converted_moved = converter.ned2geodetic(converter.geodetic2ned(geo_pos) + np.array([0, 0, -10]))
np.testing.assert_allclose(geo_pos_moved[:2], geo_pos_double_converted_moved[:2], rtol=1e-9, atol=1e-6)
np.testing.assert_allclose(geo_pos_moved[2], geo_pos_double_converted_moved[2], rtol=1e-9, atol=1e-4)
def test_ned_saved_results(self):
for i, ecef_pos in enumerate(ecef_positions):
converter = coord.LocalCoord.from_ecef(ecef_pos)

@ -61,8 +61,7 @@ class TestOrientation(unittest.TestCase):
for i in range(len(eulers)):
np.testing.assert_allclose(ned_eulers[i], ned_euler_from_ecef(ecef_positions[i], eulers[i]), rtol=1e-7)
#np.testing.assert_allclose(eulers[i], ecef_euler_from_ned(ecef_positions[i], ned_eulers[i]), rtol=1e-7)
np.testing.assert_allclose(ned_eulers, ned_euler_from_ecef(ecef_positions, eulers), rtol=1e-7)
# np.testing.assert_allclose(ned_eulers, ned_euler_from_ecef(ecef_positions, eulers), rtol=1e-7)
if __name__ == "__main__":

@ -0,0 +1,71 @@
from libcpp cimport bool
cdef extern from "orientation.cc":
pass
cdef extern from "orientation.hpp":
cdef cppclass Quaternion "Eigen::Quaterniond":
Quaternion()
Quaternion(double, double, double, double)
double w()
double x()
double y()
double z()
cdef cppclass Vector3 "Eigen::Vector3d":
Vector3()
Vector3(double, double, double)
double operator()(int)
cdef cppclass Matrix3 "Eigen::Matrix3d":
Matrix3()
Matrix3(double*)
double operator()(int, int)
Quaternion euler2quat(Vector3)
Vector3 quat2euler(Quaternion)
Matrix3 quat2rot(Quaternion)
Quaternion rot2quat(Matrix3)
Vector3 rot2euler(Matrix3)
Matrix3 euler2rot(Vector3)
Matrix3 rot_matrix(double, double, double)
Vector3 ecef_euler_from_ned(ECEF, Vector3)
Vector3 ned_euler_from_ecef(ECEF, Vector3)
cdef extern from "coordinates.cc":
cdef struct ECEF:
double x
double y
double z
cdef struct NED:
double n
double e
double d
cdef struct Geodetic:
double lat
double lon
double alt
bool radians
ECEF geodetic2ecef(Geodetic)
Geodetic ecef2geodetic(ECEF)
cdef cppclass LocalCoord_c "LocalCoord":
Matrix3 ned2ecef_matrix
Matrix3 ecef2ned_matrix
LocalCoord_c(Geodetic, ECEF)
LocalCoord_c(Geodetic)
LocalCoord_c(ECEF)
NED ecef2ned(ECEF)
ECEF ned2ecef(NED)
NED geodetic2ned(Geodetic)
Geodetic ned2geodetic(NED)
cdef extern from "coordinates.hpp":
pass

@ -0,0 +1,172 @@
from transformations cimport Matrix3, Vector3, Quaternion
from transformations cimport ECEF, NED, Geodetic
from transformations cimport euler2quat as euler2quat_c
from transformations cimport quat2euler as quat2euler_c
from transformations cimport quat2rot as quat2rot_c
from transformations cimport rot2quat as rot2quat_c
from transformations cimport euler2rot as euler2rot_c
from transformations cimport rot2euler as rot2euler_c
from transformations cimport rot_matrix as rot_matrix_c
from transformations cimport ecef_euler_from_ned as ecef_euler_from_ned_c
from transformations cimport ned_euler_from_ecef as ned_euler_from_ecef_c
from transformations cimport geodetic2ecef as geodetic2ecef_c
from transformations cimport ecef2geodetic as ecef2geodetic_c
from transformations cimport LocalCoord_c
import cython
import numpy as np
cimport numpy as np
cdef np.ndarray[double, ndim=2] matrix2numpy(Matrix3 m):
return np.array([
[m(0, 0), m(0, 1), m(0, 2)],
[m(1, 0), m(1, 1), m(1, 2)],
[m(2, 0), m(2, 1), m(2, 2)],
])
cdef Matrix3 numpy2matrix (np.ndarray[double, ndim=2, mode="fortran"] m):
assert m.shape[0] == 3
assert m.shape[1] == 3
return Matrix3(<double*>m.data)
cdef ECEF list2ecef(ecef):
cdef ECEF e;
e.x = ecef[0]
e.y = ecef[1]
e.z = ecef[2]
return e
cdef NED list2ned(ned):
cdef NED n;
n.n = ned[0]
n.e = ned[1]
n.d = ned[2]
return n
cdef Geodetic list2geodetic(geodetic):
cdef Geodetic g
g.lat = geodetic[0]
g.lon = geodetic[1]
g.alt = geodetic[2]
return g
def euler2quat_single(euler):
cdef Vector3 e = Vector3(euler[0], euler[1], euler[2])
cdef Quaternion q = euler2quat_c(e)
return [q.w(), q.x(), q.y(), q.z()]
def quat2euler_single(quat):
cdef Quaternion q = Quaternion(quat[0], quat[1], quat[2], quat[3])
cdef Vector3 e = quat2euler_c(q);
return [e(0), e(1), e(2)]
def quat2rot_single(quat):
cdef Quaternion q = Quaternion(quat[0], quat[1], quat[2], quat[3])
cdef Matrix3 r = quat2rot_c(q)
return matrix2numpy(r)
def rot2quat_single(rot):
cdef Matrix3 r = numpy2matrix(np.asfortranarray(rot, dtype=np.double))
cdef Quaternion q = rot2quat_c(r)
return [q.w(), q.x(), q.y(), q.z()]
def euler2rot_single(euler):
cdef Vector3 e = Vector3(euler[0], euler[1], euler[2])
cdef Matrix3 r = euler2rot_c(e)
return matrix2numpy(r)
def rot2euler_single(rot):
cdef Matrix3 r = numpy2matrix(np.asfortranarray(rot, dtype=np.double))
cdef Vector3 e = rot2euler_c(r)
return [e(0), e(1), e(2)]
def rot_matrix(roll, pitch, yaw):
return matrix2numpy(rot_matrix_c(roll, pitch, yaw))
def ecef_euler_from_ned_single(ecef_init, ned_pose):
cdef ECEF init = list2ecef(ecef_init)
cdef Vector3 pose = Vector3(ned_pose[0], ned_pose[1], ned_pose[2])
cdef Vector3 e = ecef_euler_from_ned_c(init, pose)
return [e(0), e(1), e(2)]
def ned_euler_from_ecef_single(ecef_init, ecef_pose):
cdef ECEF init = list2ecef(ecef_init)
cdef Vector3 pose = Vector3(ecef_pose[0], ecef_pose[1], ecef_pose[2])
cdef Vector3 e = ned_euler_from_ecef_c(init, pose)
return [e(0), e(1), e(2)]
def geodetic2ecef_single(geodetic):
cdef Geodetic g = list2geodetic(geodetic)
cdef ECEF e = geodetic2ecef_c(g)
return [e.x, e.y, e.z]
def ecef2geodetic_single(ecef):
cdef ECEF e = list2ecef(ecef)
cdef Geodetic g = ecef2geodetic_c(e)
return [g.lat, g.lon, g.alt]
cdef class LocalCoord:
cdef LocalCoord_c * lc
def __init__(self, geodetic=None, ecef=None):
assert (geodetic is not None) or (ecef is not None)
if geodetic is not None:
self.lc = new LocalCoord_c(list2geodetic(geodetic))
elif ecef is not None:
self.lc = new LocalCoord_c(list2ecef(ecef))
@property
def ned2ecef_matrix(self):
return matrix2numpy(self.lc.ned2ecef_matrix)
@property
def ecef2ned_matrix(self):
return matrix2numpy(self.lc.ecef2ned_matrix)
@property
def ned_from_ecef_matrix(self):
return self.ecef2ned_matrix
@property
def ecef_from_ned_matrix(self):
return self.ned2ecef_matrix
@classmethod
def from_geodetic(cls, geodetic):
return cls(geodetic=geodetic)
@classmethod
def from_ecef(cls, ecef):
return cls(ecef=ecef)
def ecef2ned_single(self, ecef):
assert self.lc
cdef ECEF e = list2ecef(ecef)
cdef NED n = self.lc.ecef2ned(e)
return [n.n, n.e, n.d]
def ned2ecef_single(self, ned):
assert self.lc
cdef NED n = list2ned(ned)
cdef ECEF e = self.lc.ned2ecef(n)
return [e.x, e.y, e.z]
def geodetic2ned_single(self, geodetic):
assert self.lc
cdef Geodetic g = list2geodetic(geodetic)
cdef NED n = self.lc.geodetic2ned(g)
return [n.n, n.e, n.d]
def ned2geodetic_single(self, ned):
assert self.lc
cdef NED n = list2ned(ned)
cdef Geodetic g = self.lc.ned2geodetic(n)
return [g.lat, g.lon, g.alt]
def __dealloc__(self):
del self.lc

@ -1,35 +1,35 @@
import sys
import pygame
import pygame # pylint: disable=import-error
import cv2 # pylint: disable=import-error
class Window():
def __init__(self, w, h, caption="window", double=False):
self.w = w
self.h = h
pygame.init()
pygame.display.init()
pygame.display.set_caption(caption)
self.double = double
if self.double:
self.screen = pygame.display.set_mode((w*2,h*2), pygame.DOUBLEBUF)
self.screen = pygame.display.set_mode((w*2, h*2))
else:
self.screen = pygame.display.set_mode((w,h), pygame.DOUBLEBUF)
self.camera_surface = pygame.surface.Surface((w,h), 0, 24).convert()
self.screen = pygame.display.set_mode((w, h))
def draw(self, out):
pygame.surfarray.blit_array(self.camera_surface, out.swapaxes(0,1))
pygame.event.pump()
if self.double:
camera_surface_2x = pygame.transform.scale2x(self.camera_surface)
self.screen.blit(camera_surface_2x, (0, 0))
out2 = cv2.resize(out, (self.w*2, self.h*2))
pygame.surfarray.blit_array(self.screen, out2.swapaxes(0, 1))
else:
self.screen.blit(self.camera_surface, (0, 0))
pygame.surfarray.blit_array(self.screen, out.swapaxes(0, 1))
pygame.display.flip()
def getkey(self):
while 1:
event = pygame.event.wait()
if event.type == QUIT:
if event.type == pygame.QUIT:
pygame.quit()
sys.exit()
if event.type == KEYDOWN:
if event.type == pygame.KEYDOWN:
return event.key
def getclick(self):
@ -40,10 +40,9 @@ class Window():
if __name__ == "__main__":
import numpy as np
win = Window(200, 200)
img = np.zeros((200,200,3), np.uint8)
win = Window(200, 200, double=True)
img = np.zeros((200, 200, 3), np.uint8)
while 1:
print("draw")
img += 1
win.draw(img)

Binary file not shown.

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:e3c75edadf2e267be8a18870cafc911ea23d3b52dcff07fbe025edf815c06f5d
size 68732

@ -1 +0,0 @@
/opt/intel/opencl-1.2-6.4.0.37/lib64/libintelocl.so

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:381814ea42344b895624597bf31c24ebc13f3449aaaaf65245f4a041d953b4c6
size 17276236

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:25aaa33f5c338b6dcc33436fdebb5e6ad727cf85a9fae921be8d3b834166ab01
size 11432

@ -1,3 +0,0 @@
version https://git-lfs.github.com/spec/v1
oid sha256:f7300ebee63820b519c54a50c93847516dcaf37765a698826fde666990747459
size 20290860

@ -101,7 +101,7 @@ def get_host_binary_path(binary_name):
elif '.' not in binary_name:
binary_name += '.exe'
dir = os.path.join(dir, 'windows')
elif sys.platform == 'darwin': # OSX
elif sys.platform == 'darwin': # OSX
if binary_name.endswith('.so'):
binary_name = binary_name[0:-3] + '.dylib'
dir = os.path.join(dir, 'darwin')
@ -220,11 +220,9 @@ class AdbHelper(object):
self.adb_path = adb_path
self.enable_switch_to_root = enable_switch_to_root
def run(self, adb_args):
return self.run_and_return_output(adb_args)[0]
def run_and_return_output(self, adb_args, stdout_file=None, log_output=True):
adb_args = [self.adb_path] + adb_args
log_debug('run adb cmd: %s' % adb_args)
@ -247,14 +245,12 @@ class AdbHelper(object):
def check_run(self, adb_args):
self.check_run_and_return_output(adb_args)
def check_run_and_return_output(self, adb_args, stdout_file=None, log_output=True):
result, stdoutdata = self.run_and_return_output(adb_args, stdout_file, log_output)
if not result:
log_exit('run "adb %s" failed' % adb_args)
return stdoutdata
def _unroot(self):
result, stdoutdata = self.run_and_return_output(['shell', 'whoami'])
if not result:
@ -266,7 +262,6 @@ class AdbHelper(object):
self.run(['wait-for-device'])
time.sleep(1)
def switch_to_root(self):
if not self.enable_switch_to_root:
self._unroot()
@ -292,7 +287,6 @@ class AdbHelper(object):
def set_property(self, name, value):
return self.run(['shell', 'setprop', name, value])
def get_device_arch(self):
output = self.check_run_and_return_output(['shell', 'uname', '-m'])
if 'aarch64' in output:
@ -305,7 +299,6 @@ class AdbHelper(object):
return 'x86'
log_fatal('unsupported architecture: %s' % output.strip())
def get_android_version(self):
build_version = self.get_property('ro.build.version.release')
android_version = 0

@ -1,9 +0,0 @@
#!/usr/bin/env bash
# only pyflakes check (--select=F)
RESULT=$(python3 -m flake8 --select=F $(eval echo $(cat <(find cereal) <(find opendbc) release/files_common release/files_common | tr '\n' ' ') | tr ' ' '\n' | grep "\.py$"))
if [[ $RESULT ]]; then
echo "Pyflakes found errors in the code. Please fix and try again"
echo "$RESULT"
exit 1
fi

@ -0,0 +1,82 @@
#!/usr/bin/env python3
import os
import shutil
import subprocess
import tempfile
import time
import unittest
from common.basedir import BASEDIR
UPDATER_PATH = os.path.join(BASEDIR, "installer/updater")
UPDATER = os.path.join(UPDATER_PATH, "updater")
UPDATE_MANIFEST = os.path.join(UPDATER_PATH, "update.json")
class TestUpdater(unittest.TestCase):
@classmethod
def setUpClass(cls):
# test that the updater builds
cls.assertTrue(f"cd {UPDATER_PATH} && make clean && make", "updater failed to build")
# restore the checked-in version, since that's what actually runs on devices
os.system(f"git reset --hard {UPDATER_PATH}")
def setUp(self):
self._clear_dir()
def tearDown(self):
self._clear_dir()
def _clear_dir(self):
if os.path.isdir("/data/neoupdate"):
shutil.rmtree("/data/neoupdate")
def _assert_ok(self, cmd, msg=None):
self.assertTrue(os.system(cmd) == 0, msg)
def _assert_fails(self, cmd):
self.assertFalse(os.system(cmd) == 0)
def test_background_download(self):
self._assert_ok(f"{UPDATER} bgcache 'file://{UPDATE_MANIFEST}'")
def test_background_download_bad_manifest(self):
# update with bad manifest should fail
with tempfile.NamedTemporaryFile(mode="w", suffix=".json") as f:
f.write("{}")
self._assert_fails(f"{UPDATER} bgcache 'file://{f.name}'")
def test_cache_resume(self):
self._assert_ok(f"{UPDATER} bgcache 'file://{UPDATE_MANIFEST}'")
# a full download takes >1m, but resuming from fully cached should only be a few seconds
start_time = time.monotonic()
self._assert_ok(f"{UPDATER} bgcache 'file://{UPDATE_MANIFEST}'")
self.assertLess(time.monotonic() - start_time, 10)
# make sure we can recover from corrupt downloads
def test_recover_from_corrupt(self):
# download the whole update
self._assert_ok(f"{UPDATER} bgcache 'file://{UPDATE_MANIFEST}'")
# write some random bytes
for f in os.listdir("/data/neoupdate"):
with open(os.path.join("/data/neoupdate", f), "ab") as f:
f.write(b"\xab"*20)
# this attempt should fail, then it unlinks
self._assert_fails(f"{UPDATER} bgcache 'file://{UPDATE_MANIFEST}'")
# now it should pass
self._assert_ok(f"{UPDATER} bgcache 'file://{UPDATE_MANIFEST}'")
# simple test that the updater doesn't crash in UI mode
def test_ui_init(self):
with subprocess.Popen(UPDATER) as proc:
time.sleep(5)
self.assertTrue(proc.poll() is None)
proc.terminate()
if __name__ == "__main__":
unittest.main()

@ -0,0 +1,7 @@
{
"ota_url": "https://commadist.azureedge.net/neosupdate/ota-signed-3bd2b3bdd6a501569e00b8f12786d65e0fd2788c0dd238f8c986e3e2e504683a-kernel.zip",
"ota_hash": "3bd2b3bdd6a501569e00b8f12786d65e0fd2788c0dd238f8c986e3e2e504683a",
"recovery_url": "https://commadist.azureedge.net/neosupdate/recovery-97c27e6ed04ed6bb0608b845a2d4100912093f9380c3f2ba6b56bccd608e5f6e.img",
"recovery_len": 15861036,
"recovery_hash": "97c27e6ed04ed6bb0608b845a2d4100912093f9380c3f2ba6b56bccd608e5f6e"
}

Binary file not shown.

@ -10,6 +10,7 @@
#include <string>
#include <sstream>
#include <fstream>
#include <iostream>
#include <mutex>
#include <thread>
@ -33,10 +34,10 @@
#define USER_AGENT "NEOSUpdater-0.2"
#define MANIFEST_URL_EON_STAGING "https://github.com/commaai/eon-neos/raw/master/update.staging.json"
#define MANIFEST_URL_EON_LOCAL "http://192.168.5.1:8000/neosupdate/update.local.json"
#define MANIFEST_URL_EON "https://github.com/commaai/eon-neos/raw/master/update.json"
const char *manifest_url = MANIFEST_URL_EON;
#define MANIFEST_URL_NEOS_STAGING "https://github.com/commaai/eon-neos/raw/master/update.staging.json"
#define MANIFEST_URL_NEOS_LOCAL "http://192.168.5.1:8000/neosupdate/update.local.json"
#define MANIFEST_URL_NEOS "https://github.com/commaai/eon-neos/raw/master/update.json"
const char *manifest_url = MANIFEST_URL_NEOS;
#define RECOVERY_DEV "/dev/block/bootdevice/by-name/recovery"
#define RECOVERY_COMMAND "/cache/recovery/command"
@ -96,7 +97,7 @@ std::string download_string(CURL *curl, std::string url) {
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
curl_easy_setopt(curl, CURLOPT_NOSIGNAL, 1);
curl_easy_setopt(curl, CURLOPT_NOSIGNAL, 0);
curl_easy_setopt(curl, CURLOPT_USERAGENT, USER_AGENT);
curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1);
curl_easy_setopt(curl, CURLOPT_RESUME_FROM, 0);
@ -149,6 +150,32 @@ static void start_settings_activity(const char* name) {
system(launch_cmd);
}
bool is_settings_active() {
FILE *fp;
char sys_output[4096];
fp = popen("/bin/dumpsys window windows", "r");
if (fp == NULL) {
return false;
}
bool active = false;
while (fgets(sys_output, sizeof(sys_output), fp) != NULL) {
if (strstr(sys_output, "mCurrentFocus=null") != NULL) {
break;
}
if (strstr(sys_output, "mCurrentFocus=Window") != NULL) {
active = true;
break;
}
}
pclose(fp);
return active;
}
struct Updater {
bool do_exit = false;
@ -166,7 +193,6 @@ struct Updater {
std::mutex lock;
// i hate state machines give me coroutines already
enum UpdateState {
CONFIRMATION,
LOW_BATTERY,
@ -190,9 +216,15 @@ struct Updater {
int b_x, b_w, b_y, b_h;
int balt_x;
// download stage writes these for the installation stage
int recovery_len;
std::string recovery_hash;
std::string recovery_fn;
std::string ota_fn;
CURL *curl = NULL;
Updater() {
void ui_init() {
touch_init(&touch);
fb = framebuffer_init("updater", 0x00001000, false,
@ -218,7 +250,6 @@ struct Updater {
b_h = 220;
state = CONFIRMATION;
}
int download_file_xferinfo(curl_off_t dltotal, curl_off_t dlno,
@ -251,7 +282,7 @@ struct Updater {
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1);
curl_easy_setopt(curl, CURLOPT_NOSIGNAL, 1);
curl_easy_setopt(curl, CURLOPT_NOSIGNAL, 0);
curl_easy_setopt(curl, CURLOPT_USERAGENT, USER_AGENT);
curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1);
curl_easy_setopt(curl, CURLOPT_RESUME_FROM, resume_from);
@ -319,92 +350,78 @@ struct Updater {
state = RUNNING;
}
std::string stage_download(std::string url, std::string hash, std::string name) {
std::string download(std::string url, std::string hash, std::string name) {
std::string out_fn = UPDATE_DIR "/" + util::base_name(url);
set_progress("Downloading " + name + "...");
bool r = download_file(url, out_fn);
if (!r) {
set_error("failed to download " + name);
return "";
// start or resume downloading if hash doesn't match
std::string fn_hash = sha256_file(out_fn);
if (hash.compare(fn_hash) != 0) {
set_progress("Downloading " + name + "...");
bool r = download_file(url, out_fn);
if (!r) {
set_error("failed to download " + name);
unlink(out_fn.c_str());
return "";
}
fn_hash = sha256_file(out_fn);
}
set_progress("Verifying " + name + "...");
std::string fn_hash = sha256_file(out_fn);
printf("got %s hash: %s\n", name.c_str(), hash.c_str());
if (fn_hash != hash) {
set_error(name + " was corrupt");
unlink(out_fn.c_str());
return "";
}
return out_fn;
}
void run_stages() {
bool download_stage() {
curl = curl_easy_init();
assert(curl);
if (!check_battery()) {
set_battery_low();
int battery_cap = battery_capacity();
while(battery_cap < min_battery_cap) {
battery_cap = battery_capacity();
battery_cap_text = std::to_string(battery_cap);
usleep(1000000);
}
set_running();
}
// ** quick checks before download **
if (!check_space()) {
set_error("2GB of free space required to update");
return;
return false;
}
mkdir(UPDATE_DIR, 0777);
const int EON = (access("/EON", F_OK) != -1);
set_progress("Finding latest version...");
std::string manifest_s;
if (EON) {
manifest_s = download_string(curl, manifest_url);
} else {
// don't update NEO
exit(0);
}
std::string manifest_s = download_string(curl, manifest_url);
printf("manifest: %s\n", manifest_s.c_str());
std::string err;
auto manifest = json11::Json::parse(manifest_s, err);
if (manifest.is_null() || !err.empty()) {
set_error("failed to load update manifest");
return;
return false;
}
std::string ota_url = manifest["ota_url"].string_value();
std::string ota_hash = manifest["ota_hash"].string_value();
std::string recovery_url = manifest["recovery_url"].string_value();
std::string recovery_hash = manifest["recovery_hash"].string_value();
int recovery_len = manifest["recovery_len"].int_value();
recovery_hash = manifest["recovery_hash"].string_value();
recovery_len = manifest["recovery_len"].int_value();
// std::string installer_url = manifest["installer_url"].string_value();
// std::string installer_hash = manifest["installer_hash"].string_value();
if (ota_url.empty() || ota_hash.empty()) {
set_error("invalid update manifest");
return;
return false;
}
// std::string installer_fn = stage_download(installer_url, installer_hash, "installer");
// std::string installer_fn = download(installer_url, installer_hash, "installer");
// if (installer_fn.empty()) {
// //error'd
// return;
// }
std::string recovery_fn;
// ** handle recovery download **
if (recovery_url.empty() || recovery_hash.empty() || recovery_len == 0) {
set_progress("Skipping recovery flash...");
} else {
@ -414,20 +431,50 @@ struct Updater {
printf("existing recovery hash: %s\n", existing_recovery_hash.c_str());
if (existing_recovery_hash != recovery_hash) {
recovery_fn = stage_download(recovery_url, recovery_hash, "recovery");
recovery_fn = download(recovery_url, recovery_hash, "recovery");
if (recovery_fn.empty()) {
// error'd
return;
return false;
}
}
}
std::string ota_fn = stage_download(ota_url, ota_hash, "update");
// ** handle ota download **
ota_fn = download(ota_url, ota_hash, "update");
if (ota_fn.empty()) {
//error'd
return false;
}
// download sucessful
return true;
}
// thread that handles downloading and installing the update
void run_stages() {
printf("run_stages start\n");
// ** download update **
if (!check_battery()) {
set_battery_low();
int battery_cap = battery_capacity();
while(battery_cap < min_battery_cap) {
battery_cap = battery_capacity();
battery_cap_text = std::to_string(battery_cap);
usleep(1000000);
}
set_running();
}
bool sucess = download_stage();
if (!sucess) {
return;
}
// ** install update **
if (!check_battery()) {
set_battery_low();
int battery_cap = battery_capacity();
@ -601,7 +648,7 @@ struct Updater {
int powerprompt_y = 312;
nvgFontFace(vg, "opensans_regular");
nvgFontSize(vg, 64.0f);
nvgText(vg, fb_w/2, 740, "Ensure EON is connected to power.", NULL);
nvgText(vg, fb_w/2, 740, "Ensure your device remains connected to a power source.", NULL);
NVGpaint paint = nvgBoxGradient(
vg, progress_x + 1, progress_y + 1,
@ -657,9 +704,7 @@ struct Updater {
void ui_update() {
std::lock_guard<std::mutex> guard(lock);
switch (state) {
case ERROR:
case CONFIRMATION: {
if (state == ERROR || state == CONFIRMATION) {
int touch_x = -1, touch_y = -1;
int res = touch_poll(&touch, &touch_x, &touch_y, 0);
if (res == 1 && !is_settings_active()) {
@ -678,13 +723,11 @@ struct Updater {
}
}
}
default:
break;
}
}
void go() {
ui_init();
while (!do_exit) {
ui_update();
@ -718,51 +761,37 @@ struct Updater {
update_thread_handle.join();
}
// reboot
system("service call power 16 i32 0 i32 0 i32 1");
}
bool is_settings_active() {
FILE *fp;
char sys_output[4096];
fp = popen("/bin/dumpsys window windows", "r");
if (fp == NULL) {
return false;
}
bool active = false;
while (fgets(sys_output, sizeof(sys_output), fp) != NULL) {
if (strstr(sys_output, "mCurrentFocus=null") != NULL) {
break;
}
if (strstr(sys_output, "mCurrentFocus=Window") != NULL) {
active = true;
break;
}
}
pclose(fp);
return active;
}
};
}
int main(int argc, char *argv[]) {
bool background_cache = false;
if (argc > 1) {
if (strcmp(argv[1], "local") == 0) {
manifest_url = MANIFEST_URL_EON_LOCAL;
manifest_url = MANIFEST_URL_NEOS_LOCAL;
} else if (strcmp(argv[1], "staging") == 0) {
manifest_url = MANIFEST_URL_EON_STAGING;
manifest_url = MANIFEST_URL_NEOS_STAGING;
} else if (strcmp(argv[1], "bgcache") == 0) {
manifest_url = argv[2];
background_cache = true;
} else {
manifest_url = argv[1];
}
}
printf("updating from %s\n", manifest_url);
Updater updater;
updater.go();
return 0;
int err = 0;
if (background_cache) {
err = !updater.download_stage();
} else {
updater.go();
}
return err;
}

@ -1 +1 @@
Subproject commit d172b27f4f346e642802a4c7cbf14405a4161d35
Subproject commit 765c6584c3d7f27e1af0f1180cc29766d5319f09

@ -1,25 +1,20 @@
#!/usr/bin/bash
export OMP_NUM_THREADS=1
export MKL_NUM_THREADS=1
export NUMEXPR_NUM_THREADS=1
export OPENBLAS_NUM_THREADS=1
export VECLIB_MAXIMUM_THREADS=1
if [ -z "$BASEDIR" ]; then
BASEDIR="/data/openpilot"
fi
if [ -z "$PASSIVE" ]; then
export PASSIVE="1"
fi
source "$BASEDIR/launch_env.sh"
STAGING_ROOT="/data/safe_staging"
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
function launch {
# Wifi scan
wpa_cli IFNAME=wlan0 SCAN
# Remove orphaned git lock if it exists on boot
[ -f "$DIR/.git/index.lock" ] && rm -f $DIR/.git/index.lock
# Check to see if there's a valid overlay-based update available. Conditions
# are as follows:
#
@ -41,11 +36,15 @@ function launch {
mv $BASEDIR /data/safe_staging/old_openpilot
mv "${STAGING_ROOT}/finalized" $BASEDIR
cd $BASEDIR
# The mv changed our working directory to /data/safe_staging/old_openpilot
cd "${BASEDIR}"
# Partial mitigation for symlink-related filesystem corruption
# Ensure all files match the repo versions after update
git reset --hard
git submodule foreach --recursive git reset --hard
echo "Restarting launch script ${LAUNCHER_LOCATION}"
unset REQUIRED_NEOS_VERSION
exec "${LAUNCHER_LOCATION}"
else
echo "openpilot backup found, not updating"
@ -55,36 +54,57 @@ function launch {
fi
fi
# no cpu rationing for now
echo 0-3 > /dev/cpuset/background/cpus
echo 0-3 > /dev/cpuset/system-background/cpus
echo 0-3 > /dev/cpuset/foreground/boost/cpus
echo 0-3 > /dev/cpuset/foreground/cpus
echo 0-3 > /dev/cpuset/android/cpus
# change interrupt affinity
echo 3 > /proc/irq/6/smp_affinity_list # MDSS
echo 1 > /proc/irq/78/smp_affinity_list # Modem, can potentially lock up
echo 2 > /proc/irq/733/smp_affinity_list # USB
echo 2 > /proc/irq/736/smp_affinity_list # USB
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null && pwd )"
# Remove old NEOS update file
# TODO: move this code to the updater
if [ -d /data/neoupdate ]; then
rm -rf /data/neoupdate
fi
# Android and other system processes are not permitted to run on CPU 3
# NEOS installed app processes can run anywhere
echo 0-2 > /dev/cpuset/background/cpus
echo 0-2 > /dev/cpuset/system-background/cpus
[ -d "/dev/cpuset/foreground/boost/cpus" ] && echo 0-2 > /dev/cpuset/foreground/boost/cpus # Not present in < NEOS 15
echo 0-2 > /dev/cpuset/foreground/cpus
echo 0-2 > /dev/cpuset/android/cpus
echo 0-3 > /dev/cpuset/app/cpus
# Collect RIL and other possibly long-running I/O interrupts onto CPU 1
echo 1 > /proc/irq/78/smp_affinity_list # qcom,smd-modem (LTE radio)
echo 1 > /proc/irq/33/smp_affinity_list # ufshcd (flash storage)
echo 1 > /proc/irq/35/smp_affinity_list # wifi (wlan_pci)
# USB traffic needs realtime handling on cpu 3
[ -d "/proc/irq/733" ] && echo 3 > /proc/irq/733/smp_affinity_list # USB for LeEco
[ -d "/proc/irq/736" ] && echo 3 > /proc/irq/736/smp_affinity_list # USB for OP3T
# Check for NEOS update
if [ $(< /VERSION) != "14" ]; then
if [ $(< /VERSION) != "$REQUIRED_NEOS_VERSION" ]; then
if [ -f "$DIR/scripts/continue.sh" ]; then
cp "$DIR/scripts/continue.sh" "/data/data/com.termux/files/continue.sh"
fi
if [ ! -f "$BASEDIR/prebuilt" ]; then
# Clean old build products, but preserve the scons cache
cd $DIR
scons --clean
git clean -xdf
git submodule foreach --recursive git clean -xdf
fi
"$DIR/installer/updater/updater" "file://$DIR/installer/updater/update.json"
else
if [[ $(uname -v) == "#1 SMP PREEMPT Wed Jun 10 12:40:53 PDT 2020" ]]; then
"$DIR/installer/updater/updater" "file://$DIR/installer/updater/update_kernel.json"
fi
fi
# One-time fix for a subset of OP3T with gyro orientation offsets.
# Remove and regenerate qcom sensor registry. Only done on OP3T mainboards.
# Performed exactly once. The old registry is preserved just-in-case, and
# doubles as a flag denoting we've already done the reset.
# TODO: we should really grow per-platform detect and setup routines
if ! $(grep -q "letv" /proc/cmdline) && [ ! -f "/persist/comma/op3t-sns-reg-backup" ]; then
echo "Performing OP3T sensor registry reset"
mv /persist/sensors/sns.reg /persist/comma/op3t-sns-reg-backup &&
rm -f /persist/sensors/sensors_settings /persist/sensors/error_log /persist/sensors/gyro_sensitity_cal &&
echo "restart" > /sys/kernel/debug/msm_subsys/slpi &&
sleep 5 # Give Android sensor subsystem a moment to recover
fi
# handle pythonpath
ln -sfn $(pwd) /data/pythonpath

@ -0,0 +1,17 @@
#!/usr/bin/bash
export OMP_NUM_THREADS=1
export MKL_NUM_THREADS=1
export NUMEXPR_NUM_THREADS=1
export OPENBLAS_NUM_THREADS=1
export VECLIB_MAXIMUM_THREADS=1
if [ -z "$REQUIRED_NEOS_VERSION" ]; then
export REQUIRED_NEOS_VERSION="14"
fi
if [ -z "$PASSIVE" ]; then
export PASSIVE="1"
fi
export STAGING_ROOT="/data/safe_staging"

@ -1 +1 @@
43221d85-46fd-40b9-bff0-2b1b18a86b07
e96f9be6-5741-42ea-bdcd-0be6515b4230

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:5c39a2096f7058541b5339ec36bc4c468955e67285078080ed6d8802fed06c1d
size 814176
oid sha256:09aa11a17a5a8173e231071898c499f9ea632e6e64285586122828b1bbc70d41
size 4165968

@ -1,3 +1,3 @@
version https://git-lfs.github.com/spec/v1
oid sha256:29504dfd101ba2a0b48550fac2f86f9d0b8d1245af3d2d8d658247b4a73077a2
size 230121
oid sha256:beecf140ddc5da96cbdae3b869ebb3f5453dcd8e61e09d7d079c91e006b6df98
size 1134208

@ -0,0 +1,4 @@
[mypy]
python_version = 3.8
ignore_missing_imports = True

@ -1 +1 @@
Subproject commit 45c0d9ecce255e028163539e22e9a169735de69d
Subproject commit b7cf1a67bc71b674e6793ba1f2fff5d29fee1e6b

@ -1 +1 @@
Subproject commit 6b19fa4961d5dc6e6ea77987eb3a99ce28b0f5cd
Subproject commit ecef0a19d0f72d8fd3151593b7bd1a112d5f63e2

@ -1 +0,0 @@
from .utils import LogentriesHandler

@ -1,49 +0,0 @@
""" This file contains some helpers methods in both Python2 and 3 """
import sys
import re
if sys.version < '3':
# Python2.x imports
import Queue
import codecs
else:
# Python 3.x imports
import queue
def check_token(token):
""" Checks if the given token is a valid UUID."""
valid = re.compile(r"^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-"
r"[0-9a-fA-F]{4}-[0-9a-fA-F]{4}-[0-9a-fA-F]{12}$")
return valid.match(token)
# We need to do some things different pending if its Python 2.x or 3.x
if sys.version < '3':
def to_unicode(ch):
return codecs.unicode_escape_decode(ch)[0]
def is_unicode(ch):
return isinstance(ch, unicode)
def create_unicode(ch):
try:
return unicode(ch, 'utf-8')
except UnicodeDecodeError as e:
return str(e)
def create_queue(max_size):
return Queue.Queue(max_size)
else:
def to_unicode(ch):
return ch
def is_unicode(ch):
return isinstance(ch, str)
def create_unicode(ch):
return str(ch)
def create_queue(max_size):
return queue.Queue(max_size)

@ -1,57 +0,0 @@
from logentries import LogentriesHandler
from threading import Lock
from functools import wraps
import logging
import time
import sys
import psutil
glob_time = 0
glob_name = 0
log = logging.getLogger('logentries')
log.setLevel(logging.INFO)
class Metric(object):
def __init__(self, token):
self._count = 0.0
self._sum = 0.0
self._lock = Lock()
self.token = token
handler = LogentriesHandler(token)
log.addHandler(handler)
def observe(self, amount):
with self._lock:
self._count += 1
self._sum += amount
def metric(self):
'''Mesaure function execution time in seconds
and forward it to Logentries'''
class Timer(object):
def __init__(self, summary):
self._summary = summary
def __enter__(self):
self._start = time.time()
def __exit__(self, typ, value, traceback):
global glob_time
self._summary.observe(max(time.time() - self._start, 0))
glob_time = time.time()- self._start
log.info("function_name=" + glob_name + " " + "execution_time=" + str(glob_time) + " " + "cpu=" + str(psutil.cpu_percent(interval=None)) + " " + "cpu_count=" + str(psutil.cpu_count())+ " " + "memory=" + str(psutil.virtual_memory()) )
def __call__(self, f):
@wraps(f)
def wrapped(*args, **kwargs):
with self:
global glob_name
glob_name = f.__name__
return f(*args, **kwargs)
return wrapped
return Timer(self)

@ -1,218 +0,0 @@
# coding: utf-8
# vim: set ts=4 sw=4 et:
""" This file contains some utils for connecting to Logentries
as well as storing logs in a queue and sending them."""
VERSION = '2.0.7'
from logentries import helpers as le_helpers
import logging
import threading
import socket
import random
import time
import sys
import certifi
# Size of the internal event queue
QUEUE_SIZE = 32768
# Logentries API server address
LE_API_DEFAULT = "data.logentries.com"
# Port number for token logging to Logentries API server
LE_PORT_DEFAULT = 80
LE_TLS_PORT_DEFAULT = 443
# Minimal delay between attempts to reconnect in seconds
MIN_DELAY = 0.1
# Maximal delay between attempts to recconect in seconds
MAX_DELAY = 10
# Unicode Line separator character \u2028
LINE_SEP = le_helpers.to_unicode('\u2028')
# LE appender signature - used for debugging messages
LE = "LE: "
# Error message displayed when an incorrect Token has been detected
INVALID_TOKEN = ("\n\nIt appears the LOGENTRIES_TOKEN "
"parameter you entered is incorrect!\n\n")
def dbg(msg):
print(LE + msg)
class PlainTextSocketAppender(threading.Thread):
def __init__(self, verbose=True, le_api=LE_API_DEFAULT, le_port=LE_PORT_DEFAULT, le_tls_port=LE_TLS_PORT_DEFAULT):
threading.Thread.__init__(self)
# Logentries API server address
self.le_api = le_api
# Port number for token logging to Logentries API server
self.le_port = le_port
self.le_tls_port = le_tls_port
self.daemon = True
self.verbose = verbose
self._conn = None
self._queue = le_helpers.create_queue(QUEUE_SIZE)
def empty(self):
return self._queue.empty()
def open_connection(self):
self._conn = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._conn.connect((self.le_api, self.le_port))
def reopen_connection(self):
self.close_connection()
root_delay = MIN_DELAY
while True:
try:
self.open_connection()
return
except Exception:
if self.verbose:
dbg("Unable to connect to Logentries")
root_delay *= 2
if(root_delay > MAX_DELAY):
root_delay = MAX_DELAY
wait_for = root_delay + random.uniform(0, root_delay)
try:
time.sleep(wait_for)
except KeyboardInterrupt:
raise
def close_connection(self):
if self._conn is not None:
self._conn.close()
def run(self):
try:
# Open connection
self.reopen_connection()
# Send data in queue
while True:
# Take data from queue
data = self._queue.get(block=True)
# Replace newlines with Unicode line separator
# for multi-line events
if not le_helpers.is_unicode(data):
multiline = le_helpers.create_unicode(data).replace(
'\n', LINE_SEP)
else:
multiline = data.replace('\n', LINE_SEP)
multiline += "\n"
# Send data, reconnect if needed
while True:
try:
self._conn.send(multiline.encode('utf-8'))
except socket.error:
self.reopen_connection()
continue
break
except KeyboardInterrupt:
if self.verbose:
dbg("Logentries asynchronous socket client interrupted")
self.close_connection()
SocketAppender = PlainTextSocketAppender
try:
import ssl
ssl_enabled = True
except ImportError: # for systems without TLS support.
ssl_enabled = False
dbg("Unable to import ssl module. Will send over port 80.")
else:
class TLSSocketAppender(PlainTextSocketAppender):
def open_connection(self):
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock = ssl.wrap_socket(
sock=sock,
keyfile=None,
certfile=None,
server_side=False,
cert_reqs=ssl.CERT_REQUIRED,
ssl_version=getattr(
ssl,
'PROTOCOL_TLSv1_2',
ssl.PROTOCOL_TLSv1
),
ca_certs=certifi.where(),
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
)
sock.connect((self.le_api, self.le_tls_port))
self._conn = sock
class LogentriesHandler(logging.Handler):
def __init__(self, token, use_tls=True, verbose=True, format=None, le_api=LE_API_DEFAULT, le_port=LE_PORT_DEFAULT, le_tls_port=LE_TLS_PORT_DEFAULT):
logging.Handler.__init__(self)
self.token = token
self.good_config = True
self.verbose = verbose
# give the socket 10 seconds to flush,
# otherwise drop logs
self.timeout = 10
if not le_helpers.check_token(token):
if self.verbose:
dbg(INVALID_TOKEN)
self.good_config = False
if format is None:
format = logging.Formatter('%(asctime)s : %(levelname)s, %(message)s',
'%a %b %d %H:%M:%S %Z %Y')
self.setFormatter(format)
self.setLevel(logging.DEBUG)
if use_tls and ssl_enabled:
self._thread = TLSSocketAppender(verbose=verbose, le_api=le_api, le_port=le_port, le_tls_port=le_tls_port)
else:
self._thread = SocketAppender(verbose=verbose, le_api=le_api, le_port=le_port, le_tls_port=le_tls_port)
def flush(self):
# wait for all queued logs to be send
now = time.time()
while not self._thread.empty():
time.sleep(0.2)
if time.time() - now > self.timeout:
break
def emit_raw(self, msg):
if self.good_config and not self._thread.is_alive():
try:
self._thread.start()
if self.verbose:
dbg("Starting Logentries Asynchronous Socket Appender")
except RuntimeError: # It's already started.
pass
msg = self.token + msg
try:
self._thread._queue.put_nowait(msg)
except Exception:
# Queue is full, try to remove the oldest message and put again
try:
self._thread._queue.get_nowait()
self._thread._queue.put_nowait(msg)
except Exception:
# Race condition, no need for any action here
pass
def emit(self, record):
msg = self.format(record).rstrip('\n')
self.emit_raw(msg)
def close(self):
logging.Handler.close(self)

@ -1,11 +0,0 @@
#!/usr/bin/env bash
python3 -m pylint --disable=R,C,W $(eval echo <(find cereal) <(find opendbc) $(cat release/files_common release/files_common | tr '\n' ' ') | tr ' ' '\n' | grep "\.py$")
exit_status=$?
(( res = exit_status & 3 ))
if [[ $res != 0 ]]; then
echo "Pylint found errors in the code. Please fix and try again"
exit 1
fi

@ -1 +1 @@
Subproject commit d3a79c6a421b4eec952eeb8d1546a4c3c3ff030e
Subproject commit 2e556b8219185708ed974a4b6502796607d7ce0d

@ -1,30 +1,4 @@
#!/usr/bin/env bash
set -e
mkdir -p /dev/shm
chmod 777 /dev/shm
# Write cpuset
echo $$ > /dev/cpuset/app/tasks
echo $PPID > /dev/cpuset/app/tasks
add_subtree() {
echo "[-] adding $2 subtree T=$SECONDS"
if [ -d "$2" ]; then
if git subtree pull --prefix "$2" https://github.com/commaai/"$1".git "$3" --squash -m "Merge $2 subtree"; then
echo "git subtree pull succeeds"
else
echo "git subtree pull failed, fixing"
git merge --abort || true
git rm -r $2
git commit -m "Remove old $2 subtree"
git subtree add --prefix "$2" https://github.com/commaai/"$1".git "$3" --squash
fi
else
git subtree add --prefix "$2" https://github.com/commaai/"$1".git "$3" --squash
fi
}
#!/usr/bin/bash -e
SOURCE_DIR=/data/openpilot_source
TARGET_DIR=/data/openpilot
@ -35,17 +9,16 @@ export GIT_COMMITTER_NAME="Vehicle Researcher"
export GIT_COMMITTER_EMAIL="user@comma.ai"
export GIT_AUTHOR_NAME="Vehicle Researcher"
export GIT_AUTHOR_EMAIL="user@comma.ai"
export GIT_SSH_COMMAND="ssh -i /tmp/deploy_key"
export GIT_SSH_COMMAND="ssh -i /data/gitkey"
echo "[-] Setting up repo T=$SECONDS"
if [ ! -d "$TARGET_DIR" ]; then
mkdir -p $TARGET_DIR
cd $TARGET_DIR
git init
git remote add origin git@github.com:commaai/openpilot.git
mkdir -p $TARGET_DIR
cd $TARGET_DIR
git init
git remote add origin git@github.com:commaai/openpilot.git
fi
echo "[-] fetching public T=$SECONDS"
cd $TARGET_DIR
git prune || true
@ -55,27 +28,15 @@ echo "[-] bringing master-ci and devel in sync T=$SECONDS"
git fetch origin master-ci
git fetch origin devel
git checkout --track origin/master-ci || true
git checkout -f --track origin/master-ci
git reset --hard master-ci
git checkout master-ci
git reset --hard origin/devel
git clean -xdf
# subtrees to make updates more reliable. updating them needs a clean tree
add_subtree "cereal" "cereal" master
add_subtree "panda" "panda" master
add_subtree "opendbc" "opendbc" master
add_subtree "openpilot-pyextra" "pyextra" master
# leave .git alone
# remove everything except .git
echo "[-] erasing old openpilot T=$SECONDS"
rm -rf $TARGET_DIR/* $TARGET_DIR/.gitmodules
# delete dotfiles in root
find . -maxdepth 1 -type f -delete
# dont delete our subtrees
git checkout -- cereal panda opendbc pyextra
find . -maxdepth 1 -not -path './.git' -not -name '.' -not -name '..' -exec rm -rf '{}' \;
# reset tree and get version
cd $SOURCE_DIR
@ -103,15 +64,6 @@ git commit -a -m "openpilot v$VERSION release"
# Run build
SCONS_CACHE=1 scons -j3
echo "[-] testing openpilot T=$SECONDS"
echo -n "0" > /data/params/d/Passive
echo -n "0.2.0" > /data/params/d/CompletedTrainingVersion
echo -n "1" > /data/params/d/HasCompletedSetup
echo -n "1" > /data/params/d/CommunityFeaturesToggle
PYTHONPATH="$TARGET_DIR:$TARGET_DIR/pyextra" nosetests -s selfdrive/test/test_openpilot.py
PYTHONPATH="$TARGET_DIR:$TARGET_DIR/pyextra" GET_CPU_USAGE=1 selfdrive/manager.py
echo "[-] testing panda build T=$SECONDS"
pushd panda/board/
make bin
@ -122,15 +74,10 @@ pushd panda/board/pedal
make obj/comma.bin
popd
if [ ! -z "$PUSH" ]; then
echo "[-] Pushing to $PUSH T=$SECONDS"
git push -f origin master-ci:$PUSH
if [ ! -z "$CI_PUSH" ]; then
echo "[-] Pushing to $CI_PUSH T=$SECONDS"
git remote set-url origin git@github.com:commaai/openpilot.git
git push -f origin master-ci:$CI_PUSH
fi
echo "[-] done pushing T=$SECONDS"
# reset version
cd $SOURCE_DIR
git checkout -- selfdrive/common/version.h
echo "[-] done T=$SECONDS"

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save