Merge remote-tracking branch 'upstream/master' into toyota-fuzzy-v2

pull/28641/head
Shane Smiskol 2 years ago
commit 8a1e91a00b
  1. 6
      Jenkinsfile
  2. 2
      codecov.yml
  3. 8
      selfdrive/car/hyundai/tests/test_hyundai.py
  4. 6
      selfdrive/car/tests/test_fw_fingerprint.py
  5. 6
      selfdrive/car/toyota/tests/test_toyota.py
  6. 6
      selfdrive/sentry.py
  7. 31
      selfdrive/tombstoned.py
  8. 2
      tools/cabana/messageswidget.cc
  9. 29
      tools/lib/vidindex.py
  10. 2
      tools/sim/launch_openpilot.sh

6
Jenkinsfile vendored

@ -81,10 +81,14 @@ def pcStage(String stageName, Closure body) {
checkout scm checkout scm
def dockerArgs = '--user=root -v /tmp/comma_download_cache:/tmp/comma_download_cache -v /tmp/scons_cache:/tmp/scons_cache'; def dockerArgs = '--user=batman -v /tmp/comma_download_cache:/tmp/comma_download_cache -v /tmp/scons_cache:/tmp/scons_cache';
docker.build("openpilot-base:build-${env.GIT_COMMIT}", "-f Dockerfile.openpilot_base .").inside(dockerArgs) { docker.build("openpilot-base:build-${env.GIT_COMMIT}", "-f Dockerfile.openpilot_base .").inside(dockerArgs) {
timeout(time: 20, unit: 'MINUTES') { timeout(time: 20, unit: 'MINUTES') {
try { try {
// TODO: remove these after all jenkins jobs are running as batman (merged with master)
sh "sudo chown -R batman:batman /tmp/scons_cache"
sh "sudo chown -R batman:batman /tmp/comma_download_cache"
sh "git config --global --add safe.directory '*'" sh "git config --global --add safe.directory '*'"
sh "git submodule update --init --recursive" sh "git submodule update --init --recursive"
sh "git lfs pull" sh "git lfs pull"

@ -6,3 +6,5 @@ coverage:
informational: true informational: true
patch: off patch: off
ignore:
- "**/test_*.py"

@ -62,7 +62,7 @@ class TestHyundaiFingerprint(unittest.TestCase):
# Asserts no ECUs known to be shared across platforms exist in the database. # Asserts no ECUs known to be shared across platforms exist in the database.
# Tucson having Santa Cruz camera and EPS for example # Tucson having Santa Cruz camera and EPS for example
for car_model, ecus in FW_VERSIONS.items(): for car_model, ecus in FW_VERSIONS.items():
with self.subTest(car_model=car_model): with self.subTest(car_model=car_model.value):
if car_model == CAR.SANTA_CRUZ_1ST_GEN: if car_model == CAR.SANTA_CRUZ_1ST_GEN:
raise unittest.SkipTest("Skip checking Santa Cruz for its parts") raise unittest.SkipTest("Skip checking Santa Cruz for its parts")
@ -79,7 +79,7 @@ class TestHyundaiFingerprint(unittest.TestCase):
""" """
expected_fw_prefix = HYUNDAI_VERSION_REQUEST_LONG[1:] expected_fw_prefix = HYUNDAI_VERSION_REQUEST_LONG[1:]
for car_model, ecus in FW_VERSIONS.items(): for car_model, ecus in FW_VERSIONS.items():
with self.subTest(car_model=car_model): with self.subTest(car_model=car_model.value):
for ecu, fws in ecus.items(): for ecu, fws in ecus.items():
# TODO: enable for Ecu.fwdRadar, Ecu.abs, Ecu.eps, Ecu.transmission # TODO: enable for Ecu.fwdRadar, Ecu.abs, Ecu.eps, Ecu.transmission
if ecu[0] in (Ecu.fwdCamera,): if ecu[0] in (Ecu.fwdCamera,):
@ -103,7 +103,7 @@ class TestHyundaiFingerprint(unittest.TestCase):
# Asserts ECU keys essential for fuzzy fingerprinting are available on all platforms # Asserts ECU keys essential for fuzzy fingerprinting are available on all platforms
for car_model, ecus in FW_VERSIONS.items(): for car_model, ecus in FW_VERSIONS.items():
with self.subTest(car_model=car_model): with self.subTest(car_model=car_model.value):
for platform_code_ecu in PLATFORM_CODE_ECUS: for platform_code_ecu in PLATFORM_CODE_ECUS:
if platform_code_ecu in (Ecu.fwdRadar, Ecu.eps) and car_model == CAR.HYUNDAI_GENESIS: if platform_code_ecu in (Ecu.fwdRadar, Ecu.eps) and car_model == CAR.HYUNDAI_GENESIS:
continue continue
@ -118,7 +118,7 @@ class TestHyundaiFingerprint(unittest.TestCase):
# - expected parsing of ECU FW dates # - expected parsing of ECU FW dates
for car_model, ecus in FW_VERSIONS.items(): for car_model, ecus in FW_VERSIONS.items():
with self.subTest(car_model=car_model): with self.subTest(car_model=car_model.value):
for ecu, fws in ecus.items(): for ecu, fws in ecus.items():
if ecu[0] not in PLATFORM_CODE_ECUS: if ecu[0] not in PLATFORM_CODE_ECUS:
continue continue

@ -97,7 +97,7 @@ class TestFwFingerprint(unittest.TestCase):
def test_fw_version_lists(self): def test_fw_version_lists(self):
for car_model, ecus in FW_VERSIONS.items(): for car_model, ecus in FW_VERSIONS.items():
with self.subTest(car_model=car_model): with self.subTest(car_model=car_model.value):
for ecu, ecu_fw in ecus.items(): for ecu, ecu_fw in ecus.items():
with self.subTest(ecu): with self.subTest(ecu):
duplicates = {fw for fw in ecu_fw if ecu_fw.count(fw) > 1} duplicates = {fw for fw in ecu_fw if ecu_fw.count(fw) > 1}
@ -119,13 +119,13 @@ class TestFwFingerprint(unittest.TestCase):
for brand, config in FW_QUERY_CONFIGS.items(): for brand, config in FW_QUERY_CONFIGS.items():
for car_model, ecus in VERSIONS[brand].items(): for car_model, ecus in VERSIONS[brand].items():
bad_ecus = set(ecus).intersection(config.extra_ecus) bad_ecus = set(ecus).intersection(config.extra_ecus)
with self.subTest(car_model=car_model): with self.subTest(car_model=car_model.value):
self.assertFalse(len(bad_ecus), f'{car_model}: Fingerprints contain ECUs added for data collection: {bad_ecus}') self.assertFalse(len(bad_ecus), f'{car_model}: Fingerprints contain ECUs added for data collection: {bad_ecus}')
def test_blacklisted_ecus(self): def test_blacklisted_ecus(self):
blacklisted_addrs = (0x7c4, 0x7d0) # includes A/C ecu and an unknown ecu blacklisted_addrs = (0x7c4, 0x7d0) # includes A/C ecu and an unknown ecu
for car_model, ecus in FW_VERSIONS.items(): for car_model, ecus in FW_VERSIONS.items():
with self.subTest(car_model=car_model): with self.subTest(car_model=car_model.value):
CP = interfaces[car_model][0].get_non_essential_params(car_model) CP = interfaces[car_model][0].get_non_essential_params(car_model)
if CP.carName == 'subaru': if CP.carName == 'subaru':
for ecu in ecus.keys(): for ecu in ecus.keys():

@ -28,7 +28,7 @@ class TestToyotaInterfaces(unittest.TestCase):
# Asserts standard ECUs exist for each platform # Asserts standard ECUs exist for each platform
common_ecus = {Ecu.fwdRadar, Ecu.fwdCamera} common_ecus = {Ecu.fwdRadar, Ecu.fwdCamera}
for car_model, ecus in FW_VERSIONS.items(): for car_model, ecus in FW_VERSIONS.items():
with self.subTest(car_model=car_model): with self.subTest(car_model=car_model.value):
present_ecus = {ecu[0] for ecu in ecus} present_ecus = {ecu[0] for ecu in ecus}
missing_ecus = common_ecus - present_ecus missing_ecus = common_ecus - present_ecus
self.assertEqual(len(missing_ecus), 0) self.assertEqual(len(missing_ecus), 0)
@ -57,7 +57,7 @@ class TestToyotaFingerprint(unittest.TestCase):
def test_platform_code_ecus_available(self): def test_platform_code_ecus_available(self):
# Asserts ECU keys essential for fuzzy fingerprinting are available on all platforms # Asserts ECU keys essential for fuzzy fingerprinting are available on all platforms
for car_model, ecus in FW_VERSIONS.items(): for car_model, ecus in FW_VERSIONS.items():
with self.subTest(car_model=car_model): with self.subTest(car_model=car_model.value):
for platform_code_ecu in PLATFORM_CODE_ECUS: for platform_code_ecu in PLATFORM_CODE_ECUS:
if platform_code_ecu == Ecu.eps and car_model in (CAR.PRIUS_V, CAR.LEXUS_CTH,): if platform_code_ecu == Ecu.eps and car_model in (CAR.PRIUS_V, CAR.LEXUS_CTH,):
continue continue
@ -72,7 +72,7 @@ class TestToyotaFingerprint(unittest.TestCase):
# - expected parsing of ECU sub-versions # - expected parsing of ECU sub-versions
for car_model, ecus in FW_VERSIONS.items(): for car_model, ecus in FW_VERSIONS.items():
with self.subTest(car_model=car_model): with self.subTest(car_model=car_model.value):
for ecu, fws in ecus.items(): for ecu, fws in ecus.items():
if ecu[0] not in PLATFORM_CODE_ECUS: if ecu[0] not in PLATFORM_CODE_ECUS:
continue continue

@ -42,11 +42,11 @@ def set_tag(key: str, value: str) -> None:
sentry_sdk.set_tag(key, value) sentry_sdk.set_tag(key, value)
def init(project: SentryProject) -> None: def init(project: SentryProject) -> bool:
# forks like to mess with this, so double check # forks like to mess with this, so double check
comma_remote = is_comma_remote() and "commaai" in get_origin(default="") comma_remote = is_comma_remote() and "commaai" in get_origin(default="")
if not comma_remote or not is_registered_device() or PC: if not comma_remote or not is_registered_device() or PC:
return return False
env = "release" if is_tested_branch() else "master" env = "release" if is_tested_branch() else "master"
dongle_id = Params().get("DongleId", encoding='utf-8') dongle_id = Params().get("DongleId", encoding='utf-8')
@ -73,3 +73,5 @@ def init(project: SentryProject) -> None:
if project == SentryProject.SELFDRIVE: if project == SentryProject.SELFDRIVE:
sentry_sdk.Hub.current.start_session() sentry_sdk.Hub.current.start_session()
return True

@ -46,19 +46,16 @@ def get_apport_stacktrace(fn):
def get_tombstones(): def get_tombstones():
"""Returns list of (filename, ctime) for all tombstones in /data/tombstones """Returns list of (filename, ctime) for all crashlogs"""
and apport crashlogs in /var/crash"""
files = [] files = []
for folder in [TOMBSTONE_DIR, APPORT_DIR]: if os.path.exists(APPORT_DIR):
if os.path.exists(folder): with os.scandir(APPORT_DIR) as d:
with os.scandir(folder) as d: # Loop over first 1000 directory entries
for _, f in zip(range(1000), d, strict=False):
# Loop over first 1000 directory entries if f.name.startswith("tombstone"):
for _, f in zip(range(1000), d, strict=False): files.append((f.path, int(f.stat().st_ctime)))
if f.name.startswith("tombstone"): elif f.name.endswith(".crash") and f.stat().st_mode == 0o100640:
files.append((f.path, int(f.stat().st_ctime))) files.append((f.path, int(f.stat().st_ctime)))
elif f.name.endswith(".crash") and f.stat().st_mode == 0o100640:
files.append((f.path, int(f.stat().st_ctime)))
return files return files
@ -143,7 +140,7 @@ def report_tombstone_apport(fn):
def main() -> NoReturn: def main() -> NoReturn:
sentry.init(sentry.SentryProject.SELFDRIVE_NATIVE) should_report = sentry.init(sentry.SentryProject.SELFDRIVE_NATIVE)
# Clear apport folder on start, otherwise duplicate crashes won't register # Clear apport folder on start, otherwise duplicate crashes won't register
clear_apport_folder() clear_apport_folder()
@ -153,6 +150,14 @@ def main() -> NoReturn:
now_tombstones = set(get_tombstones()) now_tombstones = set(get_tombstones())
for fn, _ in (now_tombstones - initial_tombstones): for fn, _ in (now_tombstones - initial_tombstones):
# clear logs if we're not interested in them
if not should_report:
try:
os.remove(fn)
except Exception:
pass
continue
try: try:
cloudlog.info(f"reporting new tombstone {fn}") cloudlog.info(f"reporting new tombstone {fn}")
if fn.endswith(".crash"): if fn.endswith(".crash"):

@ -169,6 +169,8 @@ QVariant MessageListModel::headerData(int section, Qt::Orientation orientation,
} }
QVariant MessageListModel::data(const QModelIndex &index, int role) const { QVariant MessageListModel::data(const QModelIndex &index, int role) const {
if (!index.isValid() || index.row() >= msgs.size()) return {};
const auto &id = msgs[index.row()]; const auto &id = msgs[index.row()];
auto &can_data = can->lastMessage(id); auto &can_data = can->lastMessage(id);

@ -1,11 +1,17 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import argparse import argparse
import os
import struct import struct
from enum import IntEnum from enum import IntEnum
from typing import Tuple from typing import Tuple
from openpilot.tools.lib.filereader import FileReader from openpilot.tools.lib.filereader import FileReader
DEBUG = int(os.getenv("DEBUG", "0"))
# compare to ffmpeg parsing
# ffmpeg -i <input.hevc> -c copy -bsf:v trace_headers -f null - 2>&1 | grep -B4 -A32 '] 0 '
# H.265 specification # H.265 specification
# https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-H.265-201802-S!!PDF-E&type=items # https://www.itu.int/rec/dologin_pub.asp?lang=e&id=T-REC-H.265-201802-S!!PDF-E&type=items
@ -143,15 +149,9 @@ def get_ue(dat: bytes, start_idx: int, skip_bits: int) -> Tuple[int, int]:
raise VideoFileInvalid("invalid exponential-golomb code") raise VideoFileInvalid("invalid exponential-golomb code")
def require_nal_unit_start(dat: bytes, nal_unit_start: int) -> None: def require_nal_unit_start(dat: bytes, nal_unit_start: int) -> None:
if nal_unit_start >= len(dat):
raise ValueError("start index must be less than data length")
if nal_unit_start < 1: if nal_unit_start < 1:
raise ValueError("start index must be greater than zero") raise ValueError("start index must be greater than zero")
if dat[nal_unit_start-1] != 0x00:
raise VideoFileInvalid("start code must be preceded by 0x00")
if dat[nal_unit_start:nal_unit_start + NAL_UNIT_START_CODE_SIZE] != NAL_UNIT_START_CODE: if dat[nal_unit_start:nal_unit_start + NAL_UNIT_START_CODE_SIZE] != NAL_UNIT_START_CODE:
raise VideoFileInvalid("data must begin with start code") raise VideoFileInvalid("data must begin with start code")
@ -163,6 +163,8 @@ def get_hevc_nal_unit_length(dat: bytes, nal_unit_start: int) -> int:
# length of NAL unit is byte count up to next NAL unit start index # length of NAL unit is byte count up to next NAL unit start index
nal_unit_len = (pos if pos != -1 else len(dat)) - nal_unit_start nal_unit_len = (pos if pos != -1 else len(dat)) - nal_unit_start
if DEBUG:
print(" nal_unit_len:", nal_unit_len)
return nal_unit_len return nal_unit_len
def get_hevc_nal_unit_type(dat: bytes, nal_unit_start: int) -> HevcNalUnitType: def get_hevc_nal_unit_type(dat: bytes, nal_unit_start: int) -> HevcNalUnitType:
@ -177,7 +179,10 @@ def get_hevc_nal_unit_type(dat: bytes, nal_unit_start: int) -> HevcNalUnitType:
nal_unit_header = dat[header_start:header_start + NAL_UNIT_HEADER_SIZE] nal_unit_header = dat[header_start:header_start + NAL_UNIT_HEADER_SIZE]
if len(nal_unit_header) != 2: if len(nal_unit_header) != 2:
raise VideoFileInvalid("data to short to contain nal unit header") raise VideoFileInvalid("data to short to contain nal unit header")
return HevcNalUnitType((nal_unit_header[0] >> 1) & 0x3F) nal_unit_type = HevcNalUnitType((nal_unit_header[0] >> 1) & 0x3F)
if DEBUG:
print(" nal_unit_type:", nal_unit_type.name, f"({nal_unit_type.value})")
return nal_unit_type
def get_hevc_slice_type(dat: bytes, nal_unit_start: int, nal_unit_type: HevcNalUnitType) -> Tuple[int, bool]: def get_hevc_slice_type(dat: bytes, nal_unit_start: int, nal_unit_type: HevcNalUnitType) -> Tuple[int, bool]:
# 7.3.2.9 Slice segment layer RBSP syntax # 7.3.2.9 Slice segment layer RBSP syntax
@ -248,6 +253,8 @@ def get_hevc_slice_type(dat: bytes, nal_unit_start: int, nal_unit_type: HevcNalU
# 2 | I (I slice) # 2 | I (I slice)
# unsigned integer 0-th order Exp-Golomb-coded syntax element with the left bit first # unsigned integer 0-th order Exp-Golomb-coded syntax element with the left bit first
slice_type, _ = get_ue(dat, rbsp_start, skip_bits) slice_type, _ = get_ue(dat, rbsp_start, skip_bits)
if DEBUG:
print(" slice_type:", slice_type, f"(first slice: {is_first_slice})")
if slice_type > 2: if slice_type > 2:
raise VideoFileInvalid("slice_type must be 0, 1, or 2") raise VideoFileInvalid("slice_type must be 0, 1, or 2")
return slice_type, is_first_slice return slice_type, is_first_slice
@ -259,11 +266,14 @@ def hevc_index(hevc_file_name: str, allow_corrupt: bool=False) -> Tuple[list, in
if len(dat) < NAL_UNIT_START_CODE_SIZE + 1: if len(dat) < NAL_UNIT_START_CODE_SIZE + 1:
raise VideoFileInvalid("data is too short") raise VideoFileInvalid("data is too short")
if dat[0] != 0x00:
raise VideoFileInvalid("first byte must be 0x00")
prefix_dat = b"" prefix_dat = b""
frame_types = list() frame_types = list()
i = 1 # skip past first byte 0x00
try: try:
i = 1 # skip past first byte 0x00 (verified by get_hevc_nal_info)
while i < len(dat): while i < len(dat):
require_nal_unit_start(dat, i) require_nal_unit_start(dat, i)
nal_unit_len = get_hevc_nal_unit_length(dat, i) nal_unit_len = get_hevc_nal_unit_length(dat, i)
@ -275,9 +285,10 @@ def hevc_index(hevc_file_name: str, allow_corrupt: bool=False) -> Tuple[list, in
if is_first_slice: if is_first_slice:
frame_types.append((slice_type, i)) frame_types.append((slice_type, i))
i += nal_unit_len i += nal_unit_len
except Exception: except Exception as e:
if not allow_corrupt: if not allow_corrupt:
raise raise
print(f"ERROR: NAL unit skipped @ {i}\n", str(e))
return frame_types, len(dat), prefix_dat return frame_types, len(dat), prefix_dat

@ -12,6 +12,8 @@ if [[ "$CI" ]]; then
export BLOCK="${BLOCK},ui" export BLOCK="${BLOCK},ui"
fi fi
python -c "from openpilot.selfdrive.test.helpers import set_params_enabled; set_params_enabled()"
SCRIPT_DIR=$(dirname "$0") SCRIPT_DIR=$(dirname "$0")
OPENPILOT_DIR=$SCRIPT_DIR/../../ OPENPILOT_DIR=$SCRIPT_DIR/../../

Loading…
Cancel
Save