commit
c1b39bb9bd
174 changed files with 3686 additions and 2253 deletions
@ -1,69 +1,79 @@ |
||||
CI / testing: |
||||
- all: |
||||
- changed-files: ['.github/**', '**/test_*', 'Jenkinsfile'] |
||||
- changed-files: |
||||
- any-glob-to-all-files: "{.github/**,**/test_*,Jenkinsfile}" |
||||
|
||||
car: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/**'] |
||||
car: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/**' |
||||
|
||||
body: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/body/*'] |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/body/*' |
||||
|
||||
chrysler: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/chrysler/*'] |
||||
ford: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/ford/*'] |
||||
gm: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/gm/*'] |
||||
honda: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/honda/*'] |
||||
hyundai: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/hyundai/*'] |
||||
mazda: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/mazda/*'] |
||||
nissan: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/nissan/*'] |
||||
subaru: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/subaru/*'] |
||||
tesla: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/tesla/*'] |
||||
toyota: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/toyota/*'] |
||||
volkswagen: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/volkswagen/*'] |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/chrysler/*' |
||||
|
||||
ford: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/ford/*' |
||||
|
||||
gm: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/gm/*' |
||||
|
||||
honda: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/honda/*' |
||||
|
||||
hyundai: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/hyundai/*' |
||||
|
||||
mazda: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/mazda/*' |
||||
|
||||
nissan: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/nissan/*' |
||||
|
||||
subaru: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/subaru/*' |
||||
|
||||
tesla: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/telsa/*' |
||||
|
||||
toyota: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/toyota/*' |
||||
|
||||
volkswagen: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/volkswagen/*' |
||||
|
||||
fingerprint: |
||||
- all: |
||||
- changed-files: ['selfdrive/car/*/fingerprints.py'] |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/car/*/fingerprints.py' |
||||
|
||||
simulation: |
||||
- all: |
||||
- changed-files: ['tools/sim/**'] |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'tools/sim/**' |
||||
|
||||
ui: |
||||
- all: |
||||
- changed-files: ['selfdrive/ui/**'] |
||||
tools: |
||||
- all: |
||||
- changed-files: ['tools/**'] |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/ui/**' |
||||
|
||||
tools: |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'tools/**' |
||||
|
||||
multilanguage: |
||||
- all: |
||||
- changed-files: ['selfdrive/ui/translations/**'] |
||||
- changed-files: |
||||
- any-glob-to-all-files: 'selfdrive/ui/translations/**' |
||||
|
||||
research: |
||||
- all: |
||||
- changed-files: [ |
||||
'selfdrive/modeld/models/**', |
||||
'selfdrive/test/process_replay/model_replay_ref_commit', |
||||
] |
||||
- changed-files: |
||||
- any-glob-to-all-files: "{selfdrive/modeld/models/**,selfdrive/test/process_replay/model_replay_ref_commit}" |
||||
|
@ -1 +1 @@ |
||||
Subproject commit 3aa61382b7ea9328cab7f1a2fe1ec701dffd018f |
||||
Subproject commit 61ace31efad27ae0d6d86888842f82bc92545e72 |
@ -1 +1 @@ |
||||
Subproject commit bceb8b942d3e622c2476e197102950efc4fe0bfd |
||||
Subproject commit d81d86e7cd83d1eb40314964a4d194231381d557 |
@ -1,45 +0,0 @@ |
||||
import time |
||||
|
||||
class Profiler(): |
||||
def __init__(self, enabled=False): |
||||
self.enabled = enabled |
||||
self.cp = {} |
||||
self.cp_ignored = [] |
||||
self.iter = 0 |
||||
self.start_time = time.time() |
||||
self.last_time = self.start_time |
||||
self.tot = 0. |
||||
|
||||
def reset(self, enabled=False): |
||||
self.enabled = enabled |
||||
self.cp = {} |
||||
self.cp_ignored = [] |
||||
self.iter = 0 |
||||
self.start_time = time.time() |
||||
self.last_time = self.start_time |
||||
|
||||
def checkpoint(self, name, ignore=False): |
||||
# ignore flag needed when benchmarking threads with ratekeeper |
||||
if not self.enabled: |
||||
return |
||||
tt = time.time() |
||||
if name not in self.cp: |
||||
self.cp[name] = 0. |
||||
if ignore: |
||||
self.cp_ignored.append(name) |
||||
self.cp[name] += tt - self.last_time |
||||
if not ignore: |
||||
self.tot += tt - self.last_time |
||||
self.last_time = tt |
||||
|
||||
def display(self): |
||||
if not self.enabled: |
||||
return |
||||
self.iter += 1 |
||||
print("******* Profiling %d *******" % self.iter) |
||||
for n, ms in sorted(self.cp.items(), key=lambda x: -x[1]): |
||||
if n in self.cp_ignored: |
||||
print("%30s: %9.2f avg: %7.2f percent: %3.0f IGNORED" % (n, ms*1000.0, ms*1000.0/self.iter, ms/self.tot*100)) |
||||
else: |
||||
print("%30s: %9.2f avg: %7.2f percent: %3.0f" % (n, ms*1000.0, ms*1000.0/self.iter, ms/self.tot*100)) |
||||
print(f"Iter clock: {self.tot / self.iter:2.6f} TOTAL: {self.tot:2.2f}") |
@ -1 +1 @@ |
||||
Subproject commit ff235b706b46c01ca34661e91cbbf769fe782ec9 |
||||
Subproject commit d47ab8751ffa64fe15ce5c1767e04193b06bd189 |
@ -1 +1 @@ |
||||
Subproject commit 114b85a649341d55d6beb36d7414eda5e6d324a2 |
||||
Subproject commit d66161966d8468223b645c8eba1324e9a49de916 |
File diff suppressed because it is too large
Load Diff
@ -1 +1 @@ |
||||
Subproject commit 44e8a891a2810f274a1fa980775155d9463e87b9 |
||||
Subproject commit 18b91458fd396530d43e1a2fe9a3ac9055fa9109 |
@ -0,0 +1,20 @@ |
||||
from cereal import car |
||||
import unittest |
||||
from openpilot.selfdrive.car.subaru.fingerprints import FW_VERSIONS |
||||
|
||||
Ecu = car.CarParams.Ecu |
||||
|
||||
ECU_NAME = {v: k for k, v in Ecu.schema.enumerants.items()} |
||||
|
||||
|
||||
class TestSubaruFingerprint(unittest.TestCase): |
||||
def test_fw_version_format(self): |
||||
for platform, fws_per_ecu in FW_VERSIONS.items(): |
||||
for (ecu, _, _), fws in fws_per_ecu.items(): |
||||
fw_size = len(fws[0]) |
||||
for fw in fws: |
||||
self.assertEqual(len(fw), fw_size, f"{platform} {ecu}: {len(fw)} {fw_size}") |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
unittest.main() |
@ -1,111 +0,0 @@ |
||||
#!/usr/bin/env python3 |
||||
''' |
||||
printing the gap between interrupts in a histogram to check if the |
||||
frequency is what we expect, the bmx is not interrupt driven for as we |
||||
get interrupts in a 2kHz rate. |
||||
''' |
||||
|
||||
import argparse |
||||
import sys |
||||
import numpy as np |
||||
from collections import defaultdict |
||||
|
||||
from openpilot.tools.lib.logreader import LogReader |
||||
from openpilot.tools.lib.route import Route |
||||
|
||||
import matplotlib.pyplot as plt |
||||
|
||||
SRC_BMX = "bmx055" |
||||
SRC_LSM = "lsm6ds3" |
||||
|
||||
|
||||
def parseEvents(log_reader): |
||||
bmx_data = defaultdict(list) |
||||
lsm_data = defaultdict(list) |
||||
|
||||
for m in log_reader: |
||||
if m.which() not in ['accelerometer', 'gyroscope']: |
||||
continue |
||||
|
||||
d = getattr(m, m.which()).to_dict() |
||||
|
||||
if d["source"] == SRC_BMX and "acceleration" in d: |
||||
bmx_data["accel"].append(d["timestamp"] / 1e9) |
||||
|
||||
if d["source"] == SRC_BMX and "gyroUncalibrated" in d: |
||||
bmx_data["gyro"].append(d["timestamp"] / 1e9) |
||||
|
||||
if d["source"] == SRC_LSM and "acceleration" in d: |
||||
lsm_data["accel"].append(d["timestamp"] / 1e9) |
||||
|
||||
if d["source"] == SRC_LSM and "gyroUncalibrated" in d: |
||||
lsm_data["gyro"].append(d["timestamp"] / 1e9) |
||||
|
||||
return bmx_data, lsm_data |
||||
|
||||
|
||||
def cleanData(data): |
||||
if len(data) == 0: |
||||
return [], [] |
||||
|
||||
data.sort() |
||||
diffs = np.diff(data) |
||||
return data, diffs |
||||
|
||||
|
||||
def logAvgValues(data, sensor): |
||||
if len(data) == 0: |
||||
print(f"{sensor}: no data to average") |
||||
return |
||||
|
||||
avg = sum(data) / len(data) |
||||
hz = 1 / avg |
||||
print(f"{sensor}: data_points: {len(data)} avg [ns]: {avg} avg [Hz]: {hz}") |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
parser = argparse.ArgumentParser() |
||||
parser.add_argument("route", type=str, help="route name") |
||||
parser.add_argument("segment", type=int, help="segment number") |
||||
args = parser.parse_args() |
||||
|
||||
r = Route(args.route) |
||||
logs = r.log_paths() |
||||
|
||||
if len(logs) == 0: |
||||
print("NO data routes") |
||||
sys.exit(0) |
||||
|
||||
if args.segment >= len(logs): |
||||
print(f"RouteID: {args.segment} out of range, max: {len(logs) -1}") |
||||
sys.exit(0) |
||||
|
||||
lr = LogReader(logs[args.segment]) |
||||
bmx_data, lsm_data = parseEvents(lr) |
||||
|
||||
# sort bmx accel data, and then cal all the diffs, and to a histogram of those |
||||
bmx_accel, bmx_accel_diffs = cleanData(bmx_data["accel"]) |
||||
bmx_gyro, bmx_gyro_diffs = cleanData(bmx_data["gyro"]) |
||||
lsm_accel, lsm_accel_diffs = cleanData(lsm_data["accel"]) |
||||
lsm_gyro, lsm_gyro_diffs = cleanData(lsm_data["gyro"]) |
||||
|
||||
# get out the averages |
||||
logAvgValues(bmx_accel_diffs, "bmx accel") |
||||
logAvgValues(bmx_gyro_diffs, "bmx gyro ") |
||||
logAvgValues(lsm_accel_diffs, "lsm accel") |
||||
logAvgValues(lsm_gyro_diffs, "lsm gyro ") |
||||
|
||||
fig, axs = plt.subplots(1, 2, tight_layout=True) |
||||
axs[0].hist(bmx_accel_diffs, bins=50) |
||||
axs[0].set_title("bmx_accel") |
||||
axs[1].hist(bmx_gyro_diffs, bins=50) |
||||
axs[1].set_title("bmx_gyro") |
||||
|
||||
figl, axsl = plt.subplots(1, 2, tight_layout=True) |
||||
axsl[0].hist(lsm_accel_diffs, bins=50) |
||||
axsl[0].set_title("lsm_accel") |
||||
axsl[1].hist(lsm_gyro_diffs, bins=50) |
||||
axsl[1].set_title("lsm_gyro") |
||||
|
||||
print("check plot...") |
||||
plt.show() |
@ -1,3 +1,3 @@ |
||||
version https://git-lfs.github.com/spec/v1 |
||||
oid sha256:adc5aca6753b6ae0a1469f3e5bcb943d00cc9de75218489f2e4c3d960e7af048 |
||||
size 14138061 |
||||
oid sha256:4971931accb5ba2e534bb3e0c591826ee507e2988df2eccf1fe862c303ddf9c5 |
||||
size 14221074 |
||||
|
@ -1,3 +1,3 @@ |
||||
version https://git-lfs.github.com/spec/v1 |
||||
oid sha256:c808717d073a0bb347f9ba929953c0b2b792ce9997f343f7e44a0b2b0e139132 |
||||
oid sha256:fa346ada6f8c6326a5ee5fcd27e45e3e710049358079413c6a4624b20c6e1e47 |
||||
size 3630942 |
||||
|
@ -1,3 +1,3 @@ |
||||
version https://git-lfs.github.com/spec/v1 |
||||
oid sha256:cf6133c5bff295a3ee69eeb01297ba77adb6b83dbc1d774442a48117dbaf4626 |
||||
size 48457192 |
||||
oid sha256:ae44fe832fe48b89998f09cebb1bcd129864a8f51497b636cd38e66e46d69a89 |
||||
size 48457850 |
||||
|
@ -0,0 +1,61 @@ |
||||
#!/usr/bin/env python3 |
||||
import json |
||||
import random |
||||
import unittest |
||||
import numpy as np |
||||
|
||||
import cereal.messaging as messaging |
||||
from openpilot.common.params import Params |
||||
from openpilot.selfdrive.manager.process_config import managed_processes |
||||
|
||||
|
||||
class TestNavd(unittest.TestCase): |
||||
def setUp(self): |
||||
self.params = Params() |
||||
self.sm = messaging.SubMaster(['navRoute', 'navInstruction']) |
||||
|
||||
def tearDown(self): |
||||
managed_processes['navd'].stop() |
||||
|
||||
def _check_route(self, start, end, check_coords=True): |
||||
self.params.put("NavDestination", json.dumps(end)) |
||||
self.params.put("LastGPSPosition", json.dumps(start)) |
||||
|
||||
managed_processes['navd'].start() |
||||
for _ in range(30): |
||||
self.sm.update(1000) |
||||
if all(f > 0 for f in self.sm.rcv_frame.values()): |
||||
break |
||||
else: |
||||
raise Exception("didn't get a route") |
||||
|
||||
assert managed_processes['navd'].proc.is_alive() |
||||
managed_processes['navd'].stop() |
||||
|
||||
# ensure start and end match up |
||||
if check_coords: |
||||
coords = self.sm['navRoute'].coordinates |
||||
assert np.allclose([start['latitude'], start['longitude'], end['latitude'], end['longitude']], |
||||
[coords[0].latitude, coords[0].longitude, coords[-1].latitude, coords[-1].longitude], |
||||
rtol=1e-3) |
||||
|
||||
def test_simple(self): |
||||
start = { |
||||
"latitude": 32.7427228, |
||||
"longitude": -117.2321177, |
||||
} |
||||
end = { |
||||
"latitude": 32.7557004, |
||||
"longitude": -117.268002, |
||||
} |
||||
self._check_route(start, end) |
||||
|
||||
def test_random(self): |
||||
for _ in range(10): |
||||
start = {"latitude": random.uniform(-90, 90), "longitude": random.uniform(-180, 180)} |
||||
end = {"latitude": random.uniform(-90, 90), "longitude": random.uniform(-180, 180)} |
||||
self._check_route(start, end, check_coords=False) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
unittest.main() |
@ -1 +1 @@ |
||||
91cd2bf71771c2770c0effc26c0bb23d27208138 |
||||
ad64b6f38c1362e9d184f3fc95299284eacb56d4 |
||||
|
@ -1 +1 @@ |
||||
ea96f935a7a16c53623c3b03e70c0fbfa6b249e7 |
||||
1b981ce7f817974d4a7a28b06f01f727a5a7ea7b |
@ -0,0 +1,138 @@ |
||||
#!/usr/bin/env python3 |
||||
|
||||
import argparse |
||||
import json |
||||
import os |
||||
import pathlib |
||||
import xml.etree.ElementTree as ET |
||||
from typing import cast |
||||
|
||||
import requests |
||||
|
||||
TRANSLATIONS_DIR = pathlib.Path(__file__).resolve().parent |
||||
TRANSLATIONS_LANGUAGES = TRANSLATIONS_DIR / "languages.json" |
||||
|
||||
OPENAI_MODEL = "gpt-4" |
||||
OPENAI_API_KEY = os.environ.get("OPENAI_API_KEY") |
||||
OPENAI_PROMPT = "You are a professional translator from English to {language} (ISO 639 language code). " + \ |
||||
"The following sentence or word is in the GUI of a software called openpilot, translate it accordingly." |
||||
|
||||
|
||||
def get_language_files(languages: list[str] | None = None) -> dict[str, pathlib.Path]: |
||||
files = {} |
||||
|
||||
with open(TRANSLATIONS_LANGUAGES) as fp: |
||||
language_dict = json.load(fp) |
||||
|
||||
for filename in language_dict.values(): |
||||
path = TRANSLATIONS_DIR / f"{filename}.ts" |
||||
language = path.stem.split("main_")[1] |
||||
|
||||
if languages is None or language in languages: |
||||
files[language] = path |
||||
|
||||
return files |
||||
|
||||
|
||||
def translate_phrase(text: str, language: str) -> str: |
||||
response = requests.post( |
||||
"https://api.openai.com/v1/chat/completions", |
||||
json={ |
||||
"model": OPENAI_MODEL, |
||||
"messages": [ |
||||
{ |
||||
"role": "system", |
||||
"content": OPENAI_PROMPT.format(language=language), |
||||
}, |
||||
{ |
||||
"role": "user", |
||||
"content": text, |
||||
}, |
||||
], |
||||
"temperature": 0.8, |
||||
"max_tokens": 1024, |
||||
"top_p": 1, |
||||
}, |
||||
headers={ |
||||
"Authorization": f"Bearer {OPENAI_API_KEY}", |
||||
"Content-Type": "application/json", |
||||
}, |
||||
) |
||||
|
||||
if 400 <= response.status_code < 600: |
||||
raise requests.HTTPError(f'Error {response.status_code}: {response.json()}', response=response) |
||||
|
||||
data = response.json() |
||||
|
||||
return cast(str, data["choices"][0]["message"]["content"]) |
||||
|
||||
|
||||
def translate_file(path: pathlib.Path, language: str, all_: bool) -> None: |
||||
tree = ET.parse(path) |
||||
|
||||
root = tree.getroot() |
||||
|
||||
for context in root.findall("./context"): |
||||
name = context.find("name") |
||||
if name is None: |
||||
raise ValueError("name not found") |
||||
|
||||
print(f"Context: {name.text}") |
||||
|
||||
for message in context.findall("./message"): |
||||
source = message.find("source") |
||||
translation = message.find("translation") |
||||
|
||||
if source is None or translation is None: |
||||
raise ValueError("source or translation not found") |
||||
|
||||
if not all_ and translation.attrib.get("type") != "unfinished": |
||||
continue |
||||
|
||||
llm_translation = translate_phrase(cast(str, source.text), language) |
||||
|
||||
print(f"Source: {source.text}\n" + |
||||
f"Current translation: {translation.text}\n" + |
||||
f"LLM translation: {llm_translation}") |
||||
|
||||
translation.text = llm_translation |
||||
|
||||
with path.open("w", encoding="utf-8") as fp: |
||||
fp.write('<?xml version="1.0" encoding="utf-8"?>\n' + |
||||
'<!DOCTYPE TS>\n' + |
||||
ET.tostring(root, encoding="utf-8").decode()) |
||||
|
||||
|
||||
def main(): |
||||
arg_parser = argparse.ArgumentParser("Auto translate") |
||||
|
||||
group = arg_parser.add_mutually_exclusive_group(required=True) |
||||
group.add_argument("-a", "--all-files", action="store_true", help="Translate all files") |
||||
group.add_argument("-f", "--file", nargs="+", help="Translate the selected files. (Example: -f fr de)") |
||||
|
||||
arg_parser.add_argument("-t", "--all-translations", action="store_true", default=False, help="Translate all sections. (Default: only unfinished)") |
||||
|
||||
args = arg_parser.parse_args() |
||||
|
||||
if OPENAI_API_KEY is None: |
||||
print("OpenAI API key is missing. (Hint: use `export OPENAI_API_KEY=YOUR-KEY` before you run the script).\n" + |
||||
"If you don't have one go to: https://beta.openai.com/account/api-keys.") |
||||
exit(1) |
||||
|
||||
files = get_language_files(None if args.all_files else args.file) |
||||
|
||||
if args.file: |
||||
missing_files = set(args.file) - set(files) |
||||
if len(missing_files): |
||||
print(f"No language files found: {missing_files}") |
||||
exit(1) |
||||
|
||||
print(f"Translation mode: {'all' if args.all_translations else 'only unfinished'}. Files: {list(files)}") |
||||
|
||||
for lang, path in files.items(): |
||||
print(f"Translate {lang} ({path})") |
||||
translate_file(path, lang, args.all_translations) |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
main() |
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in new issue