commit
73a0861bcc
94 changed files with 7039 additions and 1068 deletions
File diff suppressed because it is too large
Load Diff
@ -1 +1 @@ |
||||
Subproject commit df08568318da97ed6f87747caee0a5b2c30086c4 |
||||
Subproject commit cda60ec9652c05de4ccfcad1fae7936e708434a3 |
@ -1,12 +1,9 @@ |
||||
#include "common/watchdog.h" |
||||
#include "common/timing.h" |
||||
#include "common/util.h" |
||||
|
||||
const std::string watchdog_fn_prefix = "/dev/shm/wd_"; // + <pid>
|
||||
|
||||
bool watchdog_kick() { |
||||
bool watchdog_kick(uint64_t ts) { |
||||
static std::string fn = watchdog_fn_prefix + std::to_string(getpid()); |
||||
|
||||
uint64_t ts = nanos_since_boot(); |
||||
return util::write_file(fn.c_str(), &ts, sizeof(ts), O_WRONLY | O_CREAT) > 0; |
||||
} |
||||
|
@ -1,3 +1,5 @@ |
||||
#pragma once |
||||
|
||||
bool watchdog_kick(); |
||||
#include <cstdint> |
||||
|
||||
bool watchdog_kick(uint64_t ts); |
||||
|
@ -1 +1 @@ |
||||
Subproject commit 6e87f536dbe8cf80040f724c89798e66ca17cf9d |
||||
Subproject commit 828612e1b8848ccf70072d5513c0b7977f1707da |
@ -1 +1 @@ |
||||
Subproject commit 225dbacbaac312f85eaaee0b97a3acc31f9c6b47 |
||||
Subproject commit 3b6bd703b7a7667e4f82d0b81ef9a454819b94bd |
@ -0,0 +1,11 @@ |
||||
#!/usr/bin/bash |
||||
set -e |
||||
|
||||
DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null && pwd)" |
||||
cd $DIR |
||||
|
||||
if [ ! -z "$(git status --porcelain)" ]; then |
||||
echo "Dirty working tree after build:" |
||||
git status --porcelain |
||||
exit 1 |
||||
fi |
@ -1,56 +1,70 @@ |
||||
from cereal import car |
||||
from selfdrive.car import make_can_msg |
||||
|
||||
from selfdrive.car.chrysler.values import RAM_CARS |
||||
|
||||
GearShifter = car.CarState.GearShifter |
||||
VisualAlert = car.CarControl.HUDControl.VisualAlert |
||||
|
||||
def create_lkas_hud(packer, gear, lkas_active, hud_alert, hud_count, lkas_car_model): |
||||
# LKAS_HUD 0x2a6 (678) Controls what lane-keeping icon is displayed. |
||||
def create_lkas_hud(packer, CP, lkas_active, hud_alert, hud_count, car_model, auto_high_beam): |
||||
# LKAS_HUD - Controls what lane-keeping icon is displayed |
||||
|
||||
# == Color == |
||||
# 0 hidden? |
||||
# 1 white |
||||
# 2 green |
||||
# 3 ldw |
||||
|
||||
# == Lines == |
||||
# 03 white Lines |
||||
# 04 grey lines |
||||
# 09 left lane close |
||||
# 0A right lane close |
||||
# 0B left Lane very close |
||||
# 0C right Lane very close |
||||
# 0D left cross cross |
||||
# 0E right lane cross |
||||
|
||||
if hud_alert in (VisualAlert.steerRequired, VisualAlert.ldw): |
||||
msg = b'\x00\x00\x00\x03\x00\x00\x00\x00' |
||||
return make_can_msg(0x2a6, msg, 0) |
||||
# == Alerts == |
||||
# 7 Normal |
||||
# 6 lane departure place hands on wheel |
||||
|
||||
color = 1 # default values are for park or neutral in 2017 are 0 0, but trying 1 1 for 2019 |
||||
lines = 1 |
||||
alerts = 0 |
||||
color = 2 if lkas_active else 1 |
||||
lines = 3 if lkas_active else 0 |
||||
alerts = 7 if lkas_active else 0 |
||||
|
||||
if hud_count < (1 * 4): # first 3 seconds, 4Hz |
||||
alerts = 1 |
||||
# CAR.PACIFICA_2018_HYBRID and CAR.PACIFICA_2019_HYBRID |
||||
# had color = 1 and lines = 1 but trying 2017 hybrid style for now. |
||||
if gear in (GearShifter.drive, GearShifter.reverse, GearShifter.low): |
||||
if lkas_active: |
||||
color = 2 # control active, display green. |
||||
lines = 6 |
||||
else: |
||||
color = 1 # control off, display white. |
||||
lines = 1 |
||||
|
||||
if hud_alert in (VisualAlert.ldw, VisualAlert.steerRequired): |
||||
color = 4 |
||||
lines = 0 |
||||
alerts = 6 |
||||
|
||||
values = { |
||||
"LKAS_ICON_COLOR": color, # byte 0, last 2 bits |
||||
"CAR_MODEL": lkas_car_model, # byte 1 |
||||
"LKAS_LANE_LINES": lines, # byte 2, last 4 bits |
||||
"LKAS_ALERTS": alerts, # byte 3, last 4 bits |
||||
"LKAS_ICON_COLOR": color, |
||||
"CAR_MODEL": car_model, |
||||
"LKAS_LANE_LINES": lines, |
||||
"LKAS_ALERTS": alerts, |
||||
} |
||||
|
||||
return packer.make_can_msg("LKAS_HUD", 0, values) # 0x2a6 |
||||
if CP.carFingerprint in RAM_CARS: |
||||
values['AUTO_HIGH_BEAM_ON'] = auto_high_beam |
||||
|
||||
return packer.make_can_msg("DAS_6", 0, values) |
||||
|
||||
|
||||
def create_lkas_command(packer, apply_steer, moving_fast, frame): |
||||
# LKAS_COMMAND 0x292 (658) Lane-keeping signal to turn the wheel. |
||||
def create_lkas_command(packer, CP, apply_steer, lat_active, frame): |
||||
# LKAS_COMMAND Lane-keeping signal to turn the wheel |
||||
enabled_val = 2 if CP.carFingerprint in RAM_CARS else 1 |
||||
values = { |
||||
"LKAS_STEERING_TORQUE": apply_steer, |
||||
"LKAS_HIGH_TORQUE": int(moving_fast), |
||||
"COUNTER": frame % 0x10, |
||||
"STEERING_TORQUE": apply_steer, |
||||
"LKAS_CONTROL_BIT": enabled_val if lat_active else 0, |
||||
} |
||||
return packer.make_can_msg("LKAS_COMMAND", 0, values) |
||||
return packer.make_can_msg("LKAS_COMMAND", 0, values, frame % 0x10) |
||||
|
||||
|
||||
def create_cruise_buttons(packer, frame, cancel=False): |
||||
def create_cruise_buttons(packer, frame, bus, cancel=False, resume=False): |
||||
values = { |
||||
"ACC_Cancel": cancel, |
||||
"COUNTER": frame % 0x10, |
||||
"ACC_Resume": resume, |
||||
} |
||||
return packer.make_can_msg("CRUISE_BUTTONS", 0, values) |
||||
return packer.make_can_msg("CRUISE_BUTTONS", bus, values, frame % 0x10) |
||||
|
@ -1,3 +1,4 @@ |
||||
loggerd |
||||
encoderd |
||||
bootlog |
||||
tests/test_logger |
||||
|
@ -1 +1 @@ |
||||
806984d4206056fb132625c5dad6c0ca1835a2d6 |
||||
825acfae98543c915c18d3b19a9c5d2503e431a6 |
@ -1,3 +1,6 @@ |
||||
{ |
||||
"English": "" |
||||
"English": "", |
||||
"中文(繁體)": "main_zh-CHT", |
||||
"中文(简体)": "main_zh-CHS", |
||||
"한국어": "main_ko" |
||||
} |
||||
|
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
Binary file not shown.
File diff suppressed because it is too large
Load Diff
@ -0,0 +1,198 @@ |
||||
#!/usr/bin/env python3 |
||||
import io |
||||
import lzma |
||||
import os |
||||
import struct |
||||
import sys |
||||
import time |
||||
from abc import ABC, abstractmethod |
||||
from collections import defaultdict, namedtuple |
||||
from typing import Callable, Dict, List, Optional, Tuple |
||||
|
||||
import requests |
||||
from Crypto.Hash import SHA512 |
||||
|
||||
CA_FORMAT_INDEX = 0x96824d9c7b129ff9 |
||||
CA_FORMAT_TABLE = 0xe75b9e112f17417d |
||||
CA_FORMAT_TABLE_TAIL_MARKER = 0xe75b9e112f17417 |
||||
FLAGS = 0xb000000000000000 |
||||
|
||||
CA_HEADER_LEN = 48 |
||||
CA_TABLE_HEADER_LEN = 16 |
||||
CA_TABLE_ENTRY_LEN = 40 |
||||
CA_TABLE_MIN_LEN = CA_TABLE_HEADER_LEN + CA_TABLE_ENTRY_LEN |
||||
|
||||
CHUNK_DOWNLOAD_TIMEOUT = 60 |
||||
CHUNK_DOWNLOAD_RETRIES = 3 |
||||
|
||||
CAIBX_DOWNLOAD_TIMEOUT = 120 |
||||
|
||||
Chunk = namedtuple('Chunk', ['sha', 'offset', 'length']) |
||||
ChunkDict = Dict[bytes, Chunk] |
||||
|
||||
|
||||
class ChunkReader(ABC): |
||||
@abstractmethod |
||||
def read(self, chunk: Chunk) -> bytes: |
||||
... |
||||
|
||||
|
||||
class FileChunkReader(ChunkReader): |
||||
"""Reads chunks from a local file""" |
||||
def __init__(self, fn: str) -> None: |
||||
|
||||
super().__init__() |
||||
self.f = open(fn, 'rb') |
||||
|
||||
def read(self, chunk: Chunk) -> bytes: |
||||
self.f.seek(chunk.offset) |
||||
return self.f.read(chunk.length) |
||||
|
||||
|
||||
class RemoteChunkReader(ChunkReader): |
||||
"""Reads lzma compressed chunks from a remote store""" |
||||
|
||||
def __init__(self, url: str) -> None: |
||||
super().__init__() |
||||
self.url = url |
||||
self.session = requests.Session() |
||||
|
||||
def read(self, chunk: Chunk) -> bytes: |
||||
sha_hex = chunk.sha.hex() |
||||
url = os.path.join(self.url, sha_hex[:4], sha_hex + ".cacnk") |
||||
|
||||
for i in range(CHUNK_DOWNLOAD_RETRIES): |
||||
try: |
||||
resp = self.session.get(url, timeout=CHUNK_DOWNLOAD_TIMEOUT) |
||||
break |
||||
except Exception: |
||||
if i == CHUNK_DOWNLOAD_RETRIES - 1: |
||||
raise |
||||
time.sleep(CHUNK_DOWNLOAD_TIMEOUT) |
||||
|
||||
resp.raise_for_status() |
||||
|
||||
decompressor = lzma.LZMADecompressor(format=lzma.FORMAT_AUTO) |
||||
return decompressor.decompress(resp.content) |
||||
|
||||
|
||||
def parse_caibx(caibx_path: str) -> List[Chunk]: |
||||
"""Parses the chunks from a caibx file. Can handle both local and remote files. |
||||
Returns a list of chunks with hash, offset and length""" |
||||
if os.path.isfile(caibx_path): |
||||
caibx = open(caibx_path, 'rb') |
||||
else: |
||||
resp = requests.get(caibx_path, timeout=CAIBX_DOWNLOAD_TIMEOUT) |
||||
resp.raise_for_status() |
||||
caibx = io.BytesIO(resp.content) |
||||
|
||||
caibx.seek(0, os.SEEK_END) |
||||
caibx_len = caibx.tell() |
||||
caibx.seek(0, os.SEEK_SET) |
||||
|
||||
# Parse header |
||||
length, magic, flags, min_size, _, max_size = struct.unpack("<QQQQQQ", caibx.read(CA_HEADER_LEN)) |
||||
assert flags == flags |
||||
assert length == CA_HEADER_LEN |
||||
assert magic == CA_FORMAT_INDEX |
||||
|
||||
# Parse table header |
||||
length, magic = struct.unpack("<QQ", caibx.read(CA_TABLE_HEADER_LEN)) |
||||
assert magic == CA_FORMAT_TABLE |
||||
|
||||
# Parse chunks |
||||
num_chunks = (caibx_len - CA_HEADER_LEN - CA_TABLE_MIN_LEN) // CA_TABLE_ENTRY_LEN |
||||
chunks = [] |
||||
|
||||
offset = 0 |
||||
for i in range(num_chunks): |
||||
new_offset = struct.unpack("<Q", caibx.read(8))[0] |
||||
|
||||
sha = caibx.read(32) |
||||
length = new_offset - offset |
||||
|
||||
assert length <= max_size |
||||
|
||||
# Last chunk can be smaller |
||||
if i < num_chunks - 1: |
||||
assert length >= min_size |
||||
|
||||
chunks.append(Chunk(sha, offset, length)) |
||||
offset = new_offset |
||||
|
||||
return chunks |
||||
|
||||
|
||||
def build_chunk_dict(chunks: List[Chunk]) -> ChunkDict: |
||||
"""Turn a list of chunks into a dict for faster lookups based on hash. |
||||
Keep first chunk since it's more likely to be already downloaded.""" |
||||
r = {} |
||||
for c in chunks: |
||||
if c.sha not in r: |
||||
r[c.sha] = c |
||||
return r |
||||
|
||||
|
||||
def extract(target: List[Chunk], |
||||
sources: List[Tuple[str, ChunkReader, ChunkDict]], |
||||
out_path: str, |
||||
progress: Optional[Callable[[int], None]] = None): |
||||
stats: Dict[str, int] = defaultdict(int) |
||||
|
||||
with open(out_path, 'wb') as out: |
||||
for cur_chunk in target: |
||||
|
||||
# Find source for desired chunk |
||||
for name, chunk_reader, store_chunks in sources: |
||||
if cur_chunk.sha in store_chunks: |
||||
bts = chunk_reader.read(store_chunks[cur_chunk.sha]) |
||||
|
||||
# Check length |
||||
if len(bts) != cur_chunk.length: |
||||
continue |
||||
|
||||
# Check hash |
||||
if SHA512.new(bts, truncate="256").digest() != cur_chunk.sha: |
||||
continue |
||||
|
||||
# Write to output |
||||
out.seek(cur_chunk.offset) |
||||
out.write(bts) |
||||
|
||||
stats[name] += cur_chunk.length |
||||
|
||||
if progress is not None: |
||||
progress(sum(stats.values())) |
||||
|
||||
break |
||||
else: |
||||
raise RuntimeError("Desired chunk not found in provided stores") |
||||
|
||||
return stats |
||||
|
||||
|
||||
def print_stats(stats: Dict[str, int]): |
||||
total_bytes = sum(stats.values()) |
||||
print(f"Total size: {total_bytes / 1024 / 1024:.2f} MB") |
||||
for name, total in stats.items(): |
||||
print(f" {name}: {total / 1024 / 1024:.2f} MB ({total / total_bytes * 100:.1f}%)") |
||||
|
||||
|
||||
def extract_simple(caibx_path, out_path, store_path): |
||||
# (name, callback, chunks) |
||||
target = parse_caibx(caibx_path) |
||||
sources = [ |
||||
# (store_path, RemoteChunkReader(store_path), build_chunk_dict(target)), |
||||
(store_path, FileChunkReader(store_path), build_chunk_dict(target)), |
||||
] |
||||
|
||||
return extract(target, sources, out_path) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
caibx = sys.argv[1] |
||||
out = sys.argv[2] |
||||
store = sys.argv[3] |
||||
|
||||
stats = extract_simple(caibx, out, store) |
||||
print_stats(stats) |
Loading…
Reference in new issue