Merge remote-tracking branch 'origin/master' into online-lag

pull/34531/head
Kacper Rączy 6 months ago
commit a725fb8462
  1. 106
      common/clutil.cc
  2. 2
      common/clutil.h
  3. 19
      common/numpy_fast.py
  4. 2
      opendbc_repo
  5. 2
      panda
  6. 7
      release/build_release.sh
  7. 44
      selfdrive/assets/offroad/tc.html
  8. 4
      selfdrive/car/CARS_template.md
  9. 3
      selfdrive/car/cruise.py
  10. 5
      selfdrive/debug/test_fw_query_on_routes.py
  11. 2
      selfdrive/locationd/locationd.py
  12. 29
      selfdrive/locationd/paramsd.py
  13. 2
      selfdrive/test/process_replay/ref_commit
  14. 49
      selfdrive/ui/qt/offroad/onboarding.cc
  15. 3
      selfdrive/ui/qt/offroad/onboarding.h
  16. 5
      selfdrive/ui/qt/onroad/hud.cc
  17. 1
      selfdrive/ui/qt/onroad/hud.h
  18. 13
      selfdrive/ui/soundd.py
  19. 16
      selfdrive/ui/translations/main_ar.ts
  20. 16
      selfdrive/ui/translations/main_de.ts
  21. 16
      selfdrive/ui/translations/main_es.ts
  22. 16
      selfdrive/ui/translations/main_fr.ts
  23. 16
      selfdrive/ui/translations/main_ja.ts
  24. 16
      selfdrive/ui/translations/main_ko.ts
  25. 16
      selfdrive/ui/translations/main_pt-BR.ts
  26. 16
      selfdrive/ui/translations/main_th.ts
  27. 16
      selfdrive/ui/translations/main_tr.ts
  28. 16
      selfdrive/ui/translations/main_zh-CHS.ts
  29. 16
      selfdrive/ui/translations/main_zh-CHT.ts
  30. 2
      system/camerad/cameras/camera_common.cc
  31. 4
      system/camerad/cameras/camera_qcom2.cc
  32. 11
      system/camerad/cameras/hw.h
  33. 270
      system/camerad/cameras/spectra.cc
  34. 30
      system/camerad/cameras/spectra.h
  35. 3
      system/camerad/sensors/os04c10.cc
  36. 36
      system/camerad/test/test_camerad.py
  37. 6
      system/loggerd/deleter.py
  38. 8
      system/loggerd/tests/test_zstd_writer.cc
  39. 2
      tools/adb_shell.sh
  40. 2
      tools/plotjuggler/layouts/max-torque-debug.xml
  41. 8
      tools/plotjuggler/layouts/torque-controller.xml
  42. 5
      tools/replay/ui.py
  43. 24
      uv.lock

@ -96,109 +96,3 @@ cl_program cl_program_from_source(cl_context ctx, cl_device_id device_id, const
}
return prg;
}
cl_program cl_program_from_binary(cl_context ctx, cl_device_id device_id, const uint8_t* binary, size_t length, const char* args) {
cl_program prg = CL_CHECK_ERR(clCreateProgramWithBinary(ctx, 1, &device_id, &length, &binary, NULL, &err));
if (int err = clBuildProgram(prg, 1, &device_id, args, NULL, NULL); err != 0) {
cl_print_build_errors(prg, device_id);
assert(0);
}
return prg;
}
// Given a cl code and return a string representation
#define CL_ERR_TO_STR(err) case err: return #err
const char* cl_get_error_string(int err) {
switch (err) {
CL_ERR_TO_STR(CL_SUCCESS);
CL_ERR_TO_STR(CL_DEVICE_NOT_FOUND);
CL_ERR_TO_STR(CL_DEVICE_NOT_AVAILABLE);
CL_ERR_TO_STR(CL_COMPILER_NOT_AVAILABLE);
CL_ERR_TO_STR(CL_MEM_OBJECT_ALLOCATION_FAILURE);
CL_ERR_TO_STR(CL_OUT_OF_RESOURCES);
CL_ERR_TO_STR(CL_OUT_OF_HOST_MEMORY);
CL_ERR_TO_STR(CL_PROFILING_INFO_NOT_AVAILABLE);
CL_ERR_TO_STR(CL_MEM_COPY_OVERLAP);
CL_ERR_TO_STR(CL_IMAGE_FORMAT_MISMATCH);
CL_ERR_TO_STR(CL_IMAGE_FORMAT_NOT_SUPPORTED);
CL_ERR_TO_STR(CL_MAP_FAILURE);
CL_ERR_TO_STR(CL_MISALIGNED_SUB_BUFFER_OFFSET);
CL_ERR_TO_STR(CL_EXEC_STATUS_ERROR_FOR_EVENTS_IN_WAIT_LIST);
CL_ERR_TO_STR(CL_COMPILE_PROGRAM_FAILURE);
CL_ERR_TO_STR(CL_LINKER_NOT_AVAILABLE);
CL_ERR_TO_STR(CL_LINK_PROGRAM_FAILURE);
CL_ERR_TO_STR(CL_DEVICE_PARTITION_FAILED);
CL_ERR_TO_STR(CL_KERNEL_ARG_INFO_NOT_AVAILABLE);
CL_ERR_TO_STR(CL_INVALID_VALUE);
CL_ERR_TO_STR(CL_INVALID_DEVICE_TYPE);
CL_ERR_TO_STR(CL_INVALID_PLATFORM);
CL_ERR_TO_STR(CL_INVALID_DEVICE);
CL_ERR_TO_STR(CL_INVALID_CONTEXT);
CL_ERR_TO_STR(CL_INVALID_QUEUE_PROPERTIES);
CL_ERR_TO_STR(CL_INVALID_COMMAND_QUEUE);
CL_ERR_TO_STR(CL_INVALID_HOST_PTR);
CL_ERR_TO_STR(CL_INVALID_MEM_OBJECT);
CL_ERR_TO_STR(CL_INVALID_IMAGE_FORMAT_DESCRIPTOR);
CL_ERR_TO_STR(CL_INVALID_IMAGE_SIZE);
CL_ERR_TO_STR(CL_INVALID_SAMPLER);
CL_ERR_TO_STR(CL_INVALID_BINARY);
CL_ERR_TO_STR(CL_INVALID_BUILD_OPTIONS);
CL_ERR_TO_STR(CL_INVALID_PROGRAM);
CL_ERR_TO_STR(CL_INVALID_PROGRAM_EXECUTABLE);
CL_ERR_TO_STR(CL_INVALID_KERNEL_NAME);
CL_ERR_TO_STR(CL_INVALID_KERNEL_DEFINITION);
CL_ERR_TO_STR(CL_INVALID_KERNEL);
CL_ERR_TO_STR(CL_INVALID_ARG_INDEX);
CL_ERR_TO_STR(CL_INVALID_ARG_VALUE);
CL_ERR_TO_STR(CL_INVALID_ARG_SIZE);
CL_ERR_TO_STR(CL_INVALID_KERNEL_ARGS);
CL_ERR_TO_STR(CL_INVALID_WORK_DIMENSION);
CL_ERR_TO_STR(CL_INVALID_WORK_GROUP_SIZE);
CL_ERR_TO_STR(CL_INVALID_WORK_ITEM_SIZE);
CL_ERR_TO_STR(CL_INVALID_GLOBAL_OFFSET);
CL_ERR_TO_STR(CL_INVALID_EVENT_WAIT_LIST);
CL_ERR_TO_STR(CL_INVALID_EVENT);
CL_ERR_TO_STR(CL_INVALID_OPERATION);
CL_ERR_TO_STR(CL_INVALID_GL_OBJECT);
CL_ERR_TO_STR(CL_INVALID_BUFFER_SIZE);
CL_ERR_TO_STR(CL_INVALID_MIP_LEVEL);
CL_ERR_TO_STR(CL_INVALID_GLOBAL_WORK_SIZE);
CL_ERR_TO_STR(CL_INVALID_PROPERTY);
CL_ERR_TO_STR(CL_INVALID_IMAGE_DESCRIPTOR);
CL_ERR_TO_STR(CL_INVALID_COMPILER_OPTIONS);
CL_ERR_TO_STR(CL_INVALID_LINKER_OPTIONS);
CL_ERR_TO_STR(CL_INVALID_DEVICE_PARTITION_COUNT);
case -69: return "CL_INVALID_PIPE_SIZE";
case -70: return "CL_INVALID_DEVICE_QUEUE";
case -71: return "CL_INVALID_SPEC_ID";
case -72: return "CL_MAX_SIZE_RESTRICTION_EXCEEDED";
case -1002: return "CL_INVALID_D3D10_DEVICE_KHR";
case -1003: return "CL_INVALID_D3D10_RESOURCE_KHR";
case -1004: return "CL_D3D10_RESOURCE_ALREADY_ACQUIRED_KHR";
case -1005: return "CL_D3D10_RESOURCE_NOT_ACQUIRED_KHR";
case -1006: return "CL_INVALID_D3D11_DEVICE_KHR";
case -1007: return "CL_INVALID_D3D11_RESOURCE_KHR";
case -1008: return "CL_D3D11_RESOURCE_ALREADY_ACQUIRED_KHR";
case -1009: return "CL_D3D11_RESOURCE_NOT_ACQUIRED_KHR";
case -1010: return "CL_INVALID_DX9_MEDIA_ADAPTER_KHR";
case -1011: return "CL_INVALID_DX9_MEDIA_SURFACE_KHR";
case -1012: return "CL_DX9_MEDIA_SURFACE_ALREADY_ACQUIRED_KHR";
case -1013: return "CL_DX9_MEDIA_SURFACE_NOT_ACQUIRED_KHR";
case -1093: return "CL_INVALID_EGL_OBJECT_KHR";
case -1092: return "CL_EGL_RESOURCE_NOT_ACQUIRED_KHR";
case -1001: return "CL_PLATFORM_NOT_FOUND_KHR";
case -1057: return "CL_DEVICE_PARTITION_FAILED_EXT";
case -1058: return "CL_INVALID_PARTITION_COUNT_EXT";
case -1059: return "CL_INVALID_PARTITION_NAME_EXT";
case -1094: return "CL_INVALID_ACCELERATOR_INTEL";
case -1095: return "CL_INVALID_ACCELERATOR_TYPE_INTEL";
case -1096: return "CL_INVALID_ACCELERATOR_DESCRIPTOR_INTEL";
case -1097: return "CL_ACCELERATOR_TYPE_NOT_SUPPORTED_INTEL";
case -1000: return "CL_INVALID_GL_SHAREGROUP_REFERENCE_KHR";
case -1098: return "CL_INVALID_VA_API_MEDIA_ADAPTER_INTEL";
case -1099: return "CL_INVALID_VA_API_MEDIA_SURFACE_INTEL";
case -1100: return "CL_VA_API_MEDIA_SURFACE_ALREADY_ACQUIRED_INTEL";
case -1101: return "CL_VA_API_MEDIA_SURFACE_NOT_ACQUIRED_INTEL";
default: return "CL_UNKNOWN_ERROR";
}
}

@ -25,6 +25,4 @@ cl_device_id cl_get_device_id(cl_device_type device_type);
cl_context cl_create_context(cl_device_id device_id);
void cl_release_context(cl_context context);
cl_program cl_program_from_source(cl_context ctx, cl_device_id device_id, const std::string& src, const char* args = nullptr);
cl_program cl_program_from_binary(cl_context ctx, cl_device_id device_id, const uint8_t* binary, size_t length, const char* args = nullptr);
cl_program cl_program_from_file(cl_context ctx, cl_device_id device_id, const char* path, const char* args);
const char* cl_get_error_string(int err);

@ -1,19 +0,0 @@
def clip(x, lo, hi):
return max(lo, min(hi, x))
def interp(x, xp, fp):
N = len(xp)
def get_interp(xv):
hi = 0
while hi < N and xv > xp[hi]:
hi += 1
low = hi - 1
return fp[-1] if hi == N and xv > xp[low] else (
fp[0] if hi == 0 else
(xv - xp[low]) * (fp[hi] - fp[low]) / (xp[hi] - xp[low]) + fp[low])
return [get_interp(v) for v in x] if hasattr(x, '__iter__') else get_interp(x)
def mean(x):
return sum(x) / len(x)

@ -1 +1 @@
Subproject commit e7d08ac4794c859e604d260da010d4685c164e5b
Subproject commit 866beab8faec5acd47bd042bc12183dccfa11c39

@ -1 +1 @@
Subproject commit 3bf1f8e871f3c577fccba07f57958f30508a2187
Subproject commit 1d5b89956b32bbda2940724ce70c5166e44668c5

@ -92,14 +92,9 @@ git add -f .
git commit --amend -m "openpilot v$VERSION"
# Run tests
TEST_FILES="tools/"
cd $SOURCE_DIR
cp -pR -n --parents $TEST_FILES $BUILD_DIR/
cd $BUILD_DIR
RELEASE=1 pytest -n0 -s selfdrive/test/test_onroad.py
#system/manager/test/test_manager.py
pytest selfdrive/car/tests/test_car_interfaces.py
rm -rf $TEST_FILES
#pytest selfdrive/car/tests/test_car_interfaces.py
if [ ! -z "$RELEASE_BRANCH" ]; then
echo "[-] pushing release T=$SECONDS"

@ -1,44 +0,0 @@
<!DOCTYPE html>
<head>
<meta charset="utf-8" />
<title>openpilot Terms of Service</title>
<style type="text/css">
p {
margin: 45px 0;
}
</style>
</head>
<body>
<p>The Terms and Conditions below are effective for all users</p>
<p>Last Updated on October 18, 2019</p>
<p>Please read these Terms of Use (“Terms”) carefully before using openpilot which is open-sourced software developed by Comma.ai, Inc., a corporation organized under the laws of Delaware (“comma,” “us,” “we,” or “our”).</p>
<p><strong>Before using and by accessing openpilot, you indicate that you have read, understood, and agree to these Terms.</strong> These Terms apply to all users and others who access or use openpilot. If others use openpilot through your user account or vehicle, you are responsible to ensure that they only use openpilot when it is safe to do so, and in compliance with these Terms and with applicable law. If you disagree with any part of the Terms, you should not access or use openpilot.</p>
<h3 id="communications">Communications</h3>
<p>You agree that comma may contact you by email or telephone in connection with openpilot or for other business purposes. You may opt out of receiving email messages at any time by contacting us at support@comma.ai.</p>
<p>We collect, use, and share information from and about you and your vehicle in connection with openpilot. You consent to comma accessing the systems associated with openpilot, without additional notice or consent, for the purposes of providing openpilot, data collection, software updates, safety and cybersecurity, suspension or removal of your account, and as disclosed in the Privacy Policy (available at https://connect.comma.ai/privacy).</p>
<h3 id="safety">Safety</h3>
<p>openpilot performs the functions of Adaptive Cruise Control (ACC) and Lane Keeping Assist System (LKAS) designed for use in compatible motor vehicles. While using openpilot, it is your responsibility to obey all laws, traffic rules, and traffic regulations governing your vehicle and its operation. Access to and use of openpilot is at your own risk and responsibility, and openpilot should be accessed and/or used only when you can do so safely.</p>
<p>openpilot does not make your vehicle “autonomous” or capable of operation without the active monitoring of a licensed driver. It is designed to assist a licensed driver. A licensed driver must pay attention to the road, remain aware of navigation at all times, and be prepared to take immediate action. Failure to do so can cause damage, injury, or death.</p>
<h3 id="supported-locations-and-models">Supported Locations and Models</h3>
<p>openpilot is compatible only with particular makes and models of vehicles. For a complete list of currently supported vehicles, visit https://comma.ai. openpilot will not function properly when installed in an incompatible vehicle. openpilot is compatible only within the geographical boundaries of the United States of America.</p>
<h3 id="indemnification">Indemnification</h3>
<p><strong>To the maximum extent allowable by law, you agree to defend, indemnify and hold harmless comma, and its employees, partners, suppliers, contractors, investors, agents, officers, directors, and affiliates, from and against any and all claims, damages, causes of action, penalties, interest, demands, obligations, losses, liabilities, costs or debt, additional taxes, and expenses (including but not limited to attorneys’ fees), resulting from or arising out of (i) your use and access of, or inability to use or access, openpilot, (ii) your breach of these Terms, (iii) the inaccuracy of any information, representation or warranty made by you, (iv) activities of anyone other than you in connection with openpilot conducted through your comma device or account, (v) any other of your activities under or in connection with these Terms or openpilot.</strong></p>
<h3 id="limitation-of-liability">Limitation of Liability</h3>
<p>In no event shall comma, nor its directors, employees, partners, agents, suppliers, or affiliates, be liable for any indirect, incidental, special, consequential or punitive damages, including without limitation, loss of profits, data, use, goodwill, or other intangible losses, resulting from (i) your access to or use of or inability to access or use of the Software; or (ii) any conduct or content of any third party on the Software whether based on warranty, contract, tort (including negligence) or any other legal theory, whether or not we have been informed of the possibility of such damage, and even if a remedy set forth herein is found to have failed of its essential purpose.</p>
<h3 id="no-warranty-or-obligations-to-maintain-or-service">No Warranty or Obligations to Maintain or Service</h3>
<p>comma provides openpilot without representations, conditions, or warranties of any kind. openpilot is provided on an “AS IS” and “AS AVAILABLE” basis, including with all faults and errors as may occur. To the extent permitted by law and unless prohibited by law, comma on behalf of itself and all persons and parties acting by, through, or for comma, explicitly disclaims all warranties or conditions, express, implied, or collateral, including any implied warranties of merchantability, satisfactory quality, and fitness for a particular purpose in respect of openpilot.</p>
<p>To the extent permitted by law, comma does not warrant the operation, performance, or availability of openpilot under all conditions. comma is not responsible for any failures caused by server errors, misdirected or redirected transmissions, failed internet connections, interruptions or failures in the transmission of data, any computer virus, or any acts or omissions of third parties that damage the network or impair wireless service.</p>
<p>We undertake reasonable measures to preserve and secure information collected through our openpilot. However, no data collection, transmission or storage system is 100% secure, and there is always a risk that your information may be intercepted without our consent. <em>In using openpilot, you acknowledge that comma is not responsible for intercepted information, and you hereby release us from any and all claims arising out of or related to the use of intercepted information in any unauthorized manner.</em></p>
<p>By providing openpilot, comma does not transfer or license its intellectual property or grant rights in its brand names, nor does comma make representations with respect to third-party intellectual property rights.</p>
<p>We are not obligated to provide any maintenance or support for openpilot, technical or otherwise. If we voluntarily provide any maintenance or support for openpilot, we may stop any such maintenance, support, or services at any time in our sole discretion.</p>
<h3 id="modification-of-software">Modification of Software</h3>
<p>In no event shall comma, nor its directors, employees, partners, agents, suppliers, or affiliates, be liable if you choose to modify the software.</p>
<h3 id="changes">Changes</h3>
<p>We reserve the right, at our sole discretion, to modify or replace these Terms at any time. If a revision is material we will provide at least 15 days’ notice prior to any new terms taking effect. What constitutes a material change will be determined at our sole discretion.</p>
<p>By continuing to access or use our Software after any revisions become effective, you agree to be bound by the revised terms. If you do not agree to the new terms, you are no longer authorized to use the Software.</p>
<h3 id="contact-us">Contact Us</h3>
<p>If you have any questions about these Terms, please contact us at support@comma.ai.</p>
</body>
</html>

@ -12,11 +12,11 @@
A supported vehicle is one that just works when you install a comma device. All supported cars provide a better experience than any stock system. Supported vehicles reference the US market unless otherwise specified.
# {{all_car_docs | length}} Supported Cars
# {{all_car_docs | selectattr('support_type', 'eq', SupportType.UPSTREAM) | list | length}} Supported Cars
|{{Column | map(attribute='value') | join('|') | replace(hardware_col_name, wide_hardware_col_name)}}|
|---|---|---|{% for _ in range((Column | length) - 3) %}{{':---:|'}}{% endfor +%}
{% for car_docs in all_car_docs %}
{% for car_docs in all_car_docs | selectattr('support_type', 'eq', SupportType.UPSTREAM) %}
|{% for column in Column %}{{car_docs.get_column(column, star_icon, video_icon, footnote_tag)}}|{% endfor %}
{% endfor %}

@ -56,6 +56,9 @@ class VCruiseHelper:
if CS.cruiseState.speed == 0:
self.v_cruise_kph = V_CRUISE_UNSET
self.v_cruise_cluster_kph = V_CRUISE_UNSET
elif CS.cruiseState.speed == -1:
self.v_cruise_kph = -1
self.v_cruise_cluster_kph = -1
else:
self.v_cruise_kph = V_CRUISE_UNSET
self.v_cruise_cluster_kph = V_CRUISE_UNSET

@ -65,8 +65,7 @@ if __name__ == "__main__":
CP = msg.carParams
car_fw = [fw for fw in CP.carFw if not fw.logging]
if len(car_fw) == 0:
print("no fw")
break
print("WARNING: no fw")
live_fingerprint = CP.carFingerprint
live_fingerprint = MIGRATION.get(live_fingerprint, live_fingerprint)
@ -98,7 +97,7 @@ if __name__ == "__main__":
print("New style (exact):", exact_matches)
print("New style (fuzzy):", fuzzy_matches)
padding = max([len(fw.brand or UNKNOWN_BRAND) for fw in car_fw])
padding = max([len(fw.brand or UNKNOWN_BRAND) for fw in car_fw] + [0])
for version in sorted(car_fw, key=lambda fw: fw.brand):
subaddr = None if version.subAddress == 0 else hex(version.subAddress)
print(f" Brand: {version.brand or UNKNOWN_BRAND:{padding}}, bus: {version.bus} - " +

@ -147,13 +147,13 @@ class LocationEstimator:
self.car_speed = abs(msg.vEgo)
elif which == "liveCalibration":
# Note that we use this message during calibration
if len(msg.rpyCalib) > 0:
calib = np.array(msg.rpyCalib)
if calib.min() < -CALIB_RPY_SANITY_CHECK or calib.max() > CALIB_RPY_SANITY_CHECK:
return HandleLogResult.INPUT_INVALID
self.device_from_calib = rot_from_euler(calib)
self.calibrated = msg.calStatus == log.LiveCalibrationData.Status.calibrated
elif which == "cameraOdometry":
if not self._validate_timestamp(t):

@ -46,18 +46,25 @@ class ParamsLearner:
self.yaw_rate_std = 0.0
self.roll = 0.0
self.steering_angle = 0.0
self.roll_valid = False
def handle_log(self, t, which, msg):
if which == 'livePose':
device_pose = Pose.from_live_pose(msg)
calibrated_pose = self.calibrator.build_calibrated_pose(device_pose)
self.yaw_rate, self.yaw_rate_std = calibrated_pose.angular_velocity.z, calibrated_pose.angular_velocity.z_std
yaw_rate_valid = msg.angularVelocityDevice.valid and self.calibrator.calib_valid
yaw_rate_valid = yaw_rate_valid and 0 < self.yaw_rate_std < 10 # rad/s
yaw_rate_valid = yaw_rate_valid and abs(self.yaw_rate) < 1 # rad/s
if yaw_rate_valid:
self.yaw_rate, self.yaw_rate_std = calibrated_pose.angular_velocity.z, calibrated_pose.angular_velocity.z_std
else:
# This is done to bound the yaw rate estimate when localizer values are invalid or calibrating
self.yaw_rate, self.yaw_rate_std = 0.0, np.radians(1)
localizer_roll, localizer_roll_std = device_pose.orientation.x, device_pose.orientation.x_std
localizer_roll_std = np.radians(1) if np.isnan(localizer_roll_std) else localizer_roll_std
self.roll_valid = (localizer_roll_std < ROLL_STD_MAX) and (ROLL_MIN < localizer_roll < ROLL_MAX) and msg.sensorsOK
if self.roll_valid:
roll_valid = (localizer_roll_std < ROLL_STD_MAX) and (ROLL_MIN < localizer_roll < ROLL_MAX) and msg.sensorsOK
if roll_valid:
roll = localizer_roll
# Experimentally found multiplier of 2 to be best trade-off between stability and accuracy or similar?
roll_std = 2 * localizer_roll_std
@ -67,18 +74,12 @@ class ParamsLearner:
roll_std = np.radians(10.0)
self.roll = np.clip(roll, self.roll - ROLL_MAX_DELTA, self.roll + ROLL_MAX_DELTA)
yaw_rate_valid = msg.angularVelocityDevice.valid and self.calibrator.calib_valid
yaw_rate_valid = yaw_rate_valid and 0 < self.yaw_rate_std < 10 # rad/s
yaw_rate_valid = yaw_rate_valid and abs(self.yaw_rate) < 1 # rad/s
if self.active:
if msg.posenetOK:
if yaw_rate_valid:
self.kf.predict_and_observe(t,
ObservationKind.ROAD_FRAME_YAW_RATE,
np.array([[-self.yaw_rate]]),
np.array([np.atleast_2d(self.yaw_rate_std**2)]))
self.kf.predict_and_observe(t,
ObservationKind.ROAD_FRAME_YAW_RATE,
np.array([[-self.yaw_rate]]),
np.array([np.atleast_2d(self.yaw_rate_std**2)]))
self.kf.predict_and_observe(t,
ObservationKind.ROAD_ROLL,

@ -1 +1 @@
a9487c6a5c2f0bb83c9d629ff64c8f64a4e4ca13
465979047295dad5da3a552db9227ed776a26a79

@ -4,7 +4,6 @@
#include <QLabel>
#include <QPainter>
#include <QScrollBar>
#include <QTransform>
#include <QVBoxLayout>
@ -12,7 +11,6 @@
#include "common/params.h"
#include "selfdrive/ui/qt/util.h"
#include "selfdrive/ui/qt/widgets/input.h"
#include "selfdrive/ui/qt/widgets/scrollview.h"
TrainingGuide::TrainingGuide(QWidget *parent) : QFrame(parent) {
setAttribute(Qt::WA_OpaquePaintEvent);
@ -85,29 +83,25 @@ void TrainingGuide::paintEvent(QPaintEvent *event) {
}
void TermsPage::showEvent(QShowEvent *event) {
// late init, building QML widget takes 200ms
if (layout()) {
return;
}
QVBoxLayout *main_layout = new QVBoxLayout(this);
main_layout->setContentsMargins(45, 35, 45, 45);
main_layout->setSpacing(0);
QLabel *title = new QLabel(tr("Terms & Conditions"));
title->setStyleSheet("font-size: 90px; font-weight: 600;");
main_layout->addWidget(title);
QVBoxLayout *vlayout = new QVBoxLayout();
vlayout->setContentsMargins(165, 165, 165, 0);
main_layout->addLayout(vlayout);
QLabel *text = new QLabel(this);
text->setTextFormat(Qt::RichText);
text->setWordWrap(true);
text->setText(QString::fromStdString(util::read_file("../assets/offroad/tc.html")));
text->setStyleSheet("font-size:50px; font-weight: 200; color: #C9C9C9; background-color:#1B1B1B; padding:50px 50px;");
ScrollView *scroll = new ScrollView(text, this);
QLabel *title = new QLabel(tr("Welcome to openpilot"));
title->setStyleSheet("font-size: 90px; font-weight: 500;");
vlayout->addWidget(title, 0, Qt::AlignTop | Qt::AlignLeft);
vlayout->addSpacing(90);
QLabel *desc = new QLabel(tr("You must accept the Terms and Conditions to use openpilot. Read the latest terms at <span style='color: #465BEA;'>https://comma.ai/terms</span> before continuing."));
desc->setWordWrap(true);
desc->setStyleSheet("font-size: 80px; font-weight: 300;");
vlayout->addWidget(desc, 0);
main_layout->addSpacing(30);
main_layout->addWidget(scroll);
main_layout->addSpacing(50);
vlayout->addStretch();
QHBoxLayout* buttons = new QHBoxLayout;
buttons->setMargin(0);
@ -118,8 +112,7 @@ void TermsPage::showEvent(QShowEvent *event) {
buttons->addWidget(decline_btn);
QObject::connect(decline_btn, &QPushButton::clicked, this, &TermsPage::declinedTerms);
accept_btn = new QPushButton(tr("Scroll to accept"));
accept_btn->setEnabled(false);
accept_btn = new QPushButton(tr("Agree"));
accept_btn->setStyleSheet(R"(
QPushButton {
background-color: #465BEA;
@ -127,23 +120,9 @@ void TermsPage::showEvent(QShowEvent *event) {
QPushButton:pressed {
background-color: #3049F4;
}
QPushButton:disabled {
background-color: #4F4F4F;
}
)");
buttons->addWidget(accept_btn);
QObject::connect(accept_btn, &QPushButton::clicked, this, &TermsPage::acceptedTerms);
QScrollBar *scroll_bar = scroll->verticalScrollBar();
connect(scroll_bar, &QScrollBar::valueChanged, this, [this, scroll_bar](int value) {
if (value == scroll_bar->maximum()) {
enableAccept();
}
});
}
void TermsPage::enableAccept() {
accept_btn->setText(tr("Agree"));
accept_btn->setEnabled(true);
}
void DeclinePage::showEvent(QShowEvent *event) {

@ -65,9 +65,6 @@ class TermsPage : public QFrame {
public:
explicit TermsPage(QWidget *parent = 0) : QFrame(parent) {}
public slots:
void enableAccept();
private:
void showEvent(QShowEvent *event) override;

@ -26,6 +26,7 @@ void HudRenderer::updateState(const UIState &s) {
// Handle older routes where vCruiseCluster is not set
set_speed = car_state.getVCruiseCluster() == 0.0 ? controls_state.getVCruiseDEPRECATED() : car_state.getVCruiseCluster();
is_cruise_set = set_speed > 0 && set_speed != SET_SPEED_NA;
is_cruise_available = set_speed != -1;
if (is_cruise_set && !is_metric) {
set_speed *= KM_TO_MILE;
@ -47,7 +48,9 @@ void HudRenderer::draw(QPainter &p, const QRect &surface_rect) {
p.fillRect(0, 0, surface_rect.width(), UI_HEADER_HEIGHT, bg);
drawSetSpeed(p, surface_rect);
if (is_cruise_available) {
drawSetSpeed(p, surface_rect);
}
drawCurrentSpeed(p, surface_rect);
p.restore();

@ -19,6 +19,7 @@ private:
float speed = 0;
float set_speed = 0;
bool is_cruise_set = false;
bool is_cruise_available = true;
bool is_metric = false;
bool v_ego_cluster_seen = false;
int status = STATUS_DISENGAGED;

@ -69,14 +69,13 @@ class Soundd:
for sound in sound_list:
filename, play_count, volume = sound_list[sound]
wavefile = wave.open(BASEDIR + "/selfdrive/assets/sounds/" + filename, 'r')
with wave.open(BASEDIR + "/selfdrive/assets/sounds/" + filename, 'r') as wavefile:
assert wavefile.getnchannels() == 1
assert wavefile.getsampwidth() == 2
assert wavefile.getframerate() == SAMPLE_RATE
assert wavefile.getnchannels() == 1
assert wavefile.getsampwidth() == 2
assert wavefile.getframerate() == SAMPLE_RATE
length = wavefile.getnframes()
self.loaded_sounds[sound] = np.frombuffer(wavefile.readframes(length), dtype=np.int16).astype(np.float32) / (2**16/2)
length = wavefile.getnframes()
self.loaded_sounds[sound] = np.frombuffer(wavefile.readframes(length), dtype=np.int16).astype(np.float32) / (2**16/2)
def get_sound_data(self, frames): # get "frames" worth of data from the current alert sound, looping when required

@ -1003,22 +1003,22 @@ This may take up to a minute.</source>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation>الشروط والأحكام</translation>
</message>
<message>
<source>Decline</source>
<translation>رفض</translation>
</message>
<message>
<source>Scroll to accept</source>
<translation>قم بالتمرير للقبول</translation>
</message>
<message>
<source>Agree</source>
<translation>أوافق</translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -987,22 +987,22 @@ This may take up to a minute.</source>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation>Benutzungsbedingungen</translation>
</message>
<message>
<source>Decline</source>
<translation>Ablehnen</translation>
</message>
<message>
<source>Scroll to accept</source>
<translation>Scrolle herunter um zu akzeptieren</translation>
</message>
<message>
<source>Agree</source>
<translation>Zustimmen</translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -987,22 +987,22 @@ Esto puede tardar un minuto.</translation>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation>Términos &amp; Condiciones</translation>
</message>
<message>
<source>Decline</source>
<translation>Rechazar</translation>
</message>
<message>
<source>Scroll to accept</source>
<translation>Desliza para aceptar</translation>
</message>
<message>
<source>Agree</source>
<translation>Aceptar</translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -987,22 +987,22 @@ Cela peut prendre jusqu&apos;à une minute.</translation>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation>Termes &amp; Conditions</translation>
</message>
<message>
<source>Decline</source>
<translation>Refuser</translation>
</message>
<message>
<source>Scroll to accept</source>
<translation>Faire défiler pour accepter</translation>
</message>
<message>
<source>Agree</source>
<translation>Accepter</translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -983,22 +983,22 @@ This may take up to a minute.</source>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation></translation>
</message>
<message>
<source>Decline</source>
<translation></translation>
</message>
<message>
<source>Scroll to accept</source>
<translation></translation>
</message>
<message>
<source>Agree</source>
<translation></translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -983,22 +983,22 @@ This may take up to a minute.</source>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation></translation>
</message>
<message>
<source>Decline</source>
<translation></translation>
</message>
<message>
<source>Scroll to accept</source>
<translation> </translation>
</message>
<message>
<source>Agree</source>
<translation></translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -987,22 +987,22 @@ Isso pode levar até um minuto.</translation>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation>Termos &amp; Condições</translation>
</message>
<message>
<source>Decline</source>
<translation>Declinar</translation>
</message>
<message>
<source>Scroll to accept</source>
<translation>Role a tela para aceitar</translation>
</message>
<message>
<source>Agree</source>
<translation>Concordo</translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -983,22 +983,22 @@ This may take up to a minute.</source>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation></translation>
</message>
<message>
<source>Decline</source>
<translation></translation>
</message>
<message>
<source>Scroll to accept</source>
<translation></translation>
</message>
<message>
<source>Agree</source>
<translation></translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -981,22 +981,22 @@ This may take up to a minute.</source>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation>Şartlar ve Koşullar</translation>
</message>
<message>
<source>Decline</source>
<translation>Reddet</translation>
</message>
<message>
<source>Scroll to accept</source>
<translation>Kabul etmek için kaydırın</translation>
</message>
<message>
<source>Agree</source>
<translation>Kabul et</translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -983,22 +983,22 @@ This may take up to a minute.</source>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation></translation>
</message>
<message>
<source>Decline</source>
<translation></translation>
</message>
<message>
<source>Scroll to accept</source>
<translation></translation>
</message>
<message>
<source>Agree</source>
<translation></translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -983,22 +983,22 @@ This may take up to a minute.</source>
</context>
<context>
<name>TermsPage</name>
<message>
<source>Terms &amp; Conditions</source>
<translation></translation>
</message>
<message>
<source>Decline</source>
<translation></translation>
</message>
<message>
<source>Scroll to accept</source>
<translation></translation>
</message>
<message>
<source>Agree</source>
<translation></translation>
</message>
<message>
<source>Welcome to openpilot</source>
<translation type="unfinished"></translation>
</message>
<message>
<source>You must accept the Terms and Conditions to use openpilot. Read the latest terms at &lt;span style=&apos;color: #465BEA;&apos;&gt;https://comma.ai/terms&lt;/span&gt; before continuing.</source>
<translation type="unfinished"></translation>
</message>
</context>
<context>
<name>TogglesPanel</name>

@ -15,7 +15,7 @@ void CameraBuf::init(cl_device_id device_id, cl_context context, SpectraCamera *
const SensorInfo *sensor = cam->sensor.get();
// RAW frames from ISP
if (cam->output_type != ISP_IFE_PROCESSED) {
if (cam->cc.output_type != ISP_IFE_PROCESSED) {
camera_bufs_raw = std::make_unique<VisionBuf[]>(frame_buf_count);
const int raw_frame_size = (sensor->frame_height + sensor->extra_height) * sensor->frame_stride;

@ -55,7 +55,7 @@ public:
float fl_pix = 0;
std::unique_ptr<PubMaster> pm;
CameraState(SpectraMaster *master, const CameraConfig &config) : camera(master, config, config.stream_type == VISION_STREAM_DRIVER ? ISP_BPS_PROCESSED : ISP_IFE_PROCESSED) {};
CameraState(SpectraMaster *master, const CameraConfig &config) : camera(master, config) {};
~CameraState();
void init(VisionIpcServer *v, cl_device_id device_id, cl_context ctx);
void update_exposure_score(float desired_ev, int exp_t, int exp_g_idx, float exp_gain);
@ -268,7 +268,7 @@ void camerad_thread() {
// *** per-cam init ***
std::vector<std::unique_ptr<CameraState>> cams;
for (const auto &config : {WIDE_ROAD_CAMERA_CONFIG, ROAD_CAMERA_CONFIG, DRIVER_CAMERA_CONFIG}) {
for (const auto &config : ALL_CAMERA_CONFIGS) {
auto cam = std::make_unique<CameraState>(&m, config);
cam->init(&v, device_id, ctx);
cams.emplace_back(std::move(cam));

@ -6,6 +6,13 @@
#include "media/cam_isp_ife.h"
typedef enum {
ISP_RAW_OUTPUT, // raw frame from sensor
ISP_IFE_PROCESSED, // fully processed image through the IFE
ISP_BPS_PROCESSED, // fully processed image through the BPS
} SpectraOutputType;
// For the comma 3/3X three camera platform
struct CameraConfig {
@ -17,6 +24,7 @@ struct CameraConfig {
bool enabled;
uint32_t phy;
bool vignetting_correction;
SpectraOutputType output_type;
};
// NOTE: to be able to disable road and wide road, we still have to configure the sensor over i2c
@ -30,6 +38,7 @@ const CameraConfig WIDE_ROAD_CAMERA_CONFIG = {
.enabled = !getenv("DISABLE_WIDE_ROAD"),
.phy = CAM_ISP_IFE_IN_RES_PHY_0,
.vignetting_correction = false,
.output_type = ISP_IFE_PROCESSED,
};
const CameraConfig ROAD_CAMERA_CONFIG = {
@ -41,6 +50,7 @@ const CameraConfig ROAD_CAMERA_CONFIG = {
.enabled = !getenv("DISABLE_ROAD"),
.phy = CAM_ISP_IFE_IN_RES_PHY_1,
.vignetting_correction = true,
.output_type = ISP_IFE_PROCESSED,
};
const CameraConfig DRIVER_CAMERA_CONFIG = {
@ -52,6 +62,7 @@ const CameraConfig DRIVER_CAMERA_CONFIG = {
.enabled = !getenv("DISABLE_DRIVER"),
.phy = CAM_ISP_IFE_IN_RES_PHY_2,
.vignetting_correction = false,
.output_type = ISP_BPS_PROCESSED,
};
const CameraConfig ALL_CAMERA_CONFIGS[] = {WIDE_ROAD_CAMERA_CONFIG, ROAD_CAMERA_CONFIG, DRIVER_CAMERA_CONFIG};

@ -233,12 +233,11 @@ void SpectraMaster::init() {
// *** SpectraCamera ***
SpectraCamera::SpectraCamera(SpectraMaster *master, const CameraConfig &config, SpectraOutputType out)
SpectraCamera::SpectraCamera(SpectraMaster *master, const CameraConfig &config)
: m(master),
enabled(config.enabled),
cc(config),
output_type(out) {
ife_buf_depth = (out == ISP_RAW_OUTPUT) ? 4 : VIPC_BUFFER_COUNT;
cc(config) {
ife_buf_depth = (cc.output_type == ISP_RAW_OUTPUT) ? 4 : VIPC_BUFFER_COUNT;
assert(ife_buf_depth < MAX_IFE_BUFS);
}
@ -249,6 +248,7 @@ SpectraCamera::~SpectraCamera() {
}
int SpectraCamera::clear_req_queue() {
// for "non-realtime" BPS
if (icp_dev_handle > 0) {
struct cam_flush_dev_cmd cmd = {
.session_handle = session_handle,
@ -260,6 +260,7 @@ int SpectraCamera::clear_req_queue() {
LOGD("flushed bps: %d", err);
}
// for "realtime" devices
struct cam_req_mgr_flush_info req_mgr_flush_request = {0};
req_mgr_flush_request.session_hdl = session_handle;
req_mgr_flush_request.link_hdl = link_handle;
@ -288,7 +289,7 @@ void SpectraCamera::camera_open(VisionIpcServer *v, cl_device_id device_id, cl_c
uv_height = VENUS_UV_SCANLINES(COLOR_FMT_NV12, sensor->frame_height);
uv_offset = stride*y_height;
yuv_size = uv_offset + stride*uv_height;
if (output_type != ISP_RAW_OUTPUT) {
if (cc.output_type != ISP_RAW_OUTPUT) {
uv_offset = ALIGNED_SIZE(uv_offset, 0x1000);
yuv_size = uv_offset + ALIGNED_SIZE(stride*uv_height, 0x1000);
}
@ -297,21 +298,14 @@ void SpectraCamera::camera_open(VisionIpcServer *v, cl_device_id device_id, cl_c
open = true;
configISP();
if (output_type == ISP_BPS_PROCESSED) configICP();
if (cc.output_type == ISP_BPS_PROCESSED) configICP();
configCSIPHY();
linkDevices();
LOGD("camera init %d", cc.camera_num);
buf.init(device_id, ctx, this, v, ife_buf_depth, cc.stream_type);
camera_map_bufs();
enqueue_req_multi(1, ife_buf_depth);
}
void SpectraCamera::enqueue_req_multi(uint64_t start, int n) {
for (uint64_t request_id = start; request_id < start + n; ++request_id) {
uint64_t idx = (request_id - 1) % ife_buf_depth;
enqueue_buffer(idx, request_id);
}
clearAndRequeue(1);
}
void SpectraCamera::sensors_start() {
@ -741,7 +735,7 @@ void SpectraCamera::config_ife(int idx, int request_id, bool init) {
buf_desc[0].offset = ife_cmd.aligned_size()*idx;
// stream of IFE register writes
bool is_raw = output_type != ISP_IFE_PROCESSED;
bool is_raw = cc.output_type != ISP_IFE_PROCESSED;
if (!is_raw) {
if (init) {
buf_desc[0].length = build_initial_config((unsigned char*)ife_cmd.ptr + buf_desc[0].offset, cc, sensor.get(), patches);
@ -830,7 +824,7 @@ void SpectraCamera::config_ife(int idx, int request_id, bool init) {
pkt->io_configs_offset = sizeof(struct cam_cmd_buf_desc)*pkt->num_cmd_buf;
struct cam_buf_io_cfg *io_cfg = (struct cam_buf_io_cfg *)((char*)&pkt->payload + pkt->io_configs_offset);
if (output_type != ISP_IFE_PROCESSED) {
if (cc.output_type != ISP_IFE_PROCESSED) {
io_cfg[0].mem_handle[0] = buf_handle_raw[idx];
io_cfg[0].planes[0] = (struct cam_plane_cfg){
.width = sensor->frame_width,
@ -901,59 +895,14 @@ void SpectraCamera::config_ife(int idx, int request_id, bool init) {
assert(ret == 0);
}
// Enqueue buffer for the given index and return true if the frame is ready
bool SpectraCamera::enqueue_buffer(int i, uint64_t request_id) {
int ret;
bool frame_ready = false;
// Before queuing up a new frame, wait for the
// previous one in this slot (index) to come in.
if (sync_objs_ife[i]) {
// TODO: write a test to stress test w/ a low timeout and check camera frame ids match
struct cam_sync_wait sync_wait = {0};
// *** Wait for IFE ***
// in RAW_OUTPUT mode, this is just the frame readout from the sensor
// in IFE_PROCESSED mode, this is both frame readout and image processing (~1ms)
sync_wait.sync_obj = sync_objs_ife[i];
sync_wait.timeout_ms = 100;
if (stress_test("IFE sync")) {
sync_wait.timeout_ms = 1;
}
ret = do_sync_control(m->cam_sync_fd, CAM_SYNC_WAIT, &sync_wait, sizeof(sync_wait));
if (ret != 0) {
LOGE("failed to wait for IFE sync: %d %d", ret, sync_wait.sync_obj);
}
// *** Wait for BPS ***
if (ret == 0 && sync_objs_bps[i]) {
sync_wait.sync_obj = sync_objs_bps[i];
sync_wait.timeout_ms = 50; // typically 7ms
if (stress_test("BPS sync")) {
sync_wait.timeout_ms = 1;
}
ret = do_sync_control(m->cam_sync_fd, CAM_SYNC_WAIT, &sync_wait, sizeof(sync_wait));
if (ret != 0) {
LOGE("failed to wait for BPS sync: %d %d", ret, sync_wait.sync_obj);
}
}
if (ret == 0) {
// all good, hand off frame
frame_ready = true;
destroySyncObjectAt(i);
} else {
// need to start over on sync failures,
// otherwise future frames will tear
clear_req_queue();
}
}
void SpectraCamera::enqueue_frame(uint64_t request_id) {
int i = request_id % ife_buf_depth;
assert(sync_objs_ife[i] == 0);
// create output fences
struct cam_sync_info sync_create = {0};
strcpy(sync_create.name, "NodeOutputPortFence");
ret = do_sync_control(m->cam_sync_fd, CAM_SYNC_CREATE, &sync_create, sizeof(sync_create));
int ret = do_sync_control(m->cam_sync_fd, CAM_SYNC_CREATE, &sync_create, sizeof(sync_create));
if (ret != 0) {
LOGE("failed to create fence: %d %d", ret, sync_create.sync_obj);
} else {
@ -984,9 +933,7 @@ bool SpectraCamera::enqueue_buffer(int i, uint64_t request_id) {
// submit request to IFE and BPS
config_ife(i, request_id);
if (output_type == ISP_BPS_PROCESSED) config_bps(i, request_id);
return frame_ready;
if (cc.output_type == ISP_BPS_PROCESSED) config_bps(i, request_id);
}
void SpectraCamera::destroySyncObjectAt(int index) {
@ -1019,7 +966,7 @@ void SpectraCamera::camera_map_bufs() {
mem_mgr_map_cmd.mmu_hdls[1] = m->icp_device_iommu;
}
if (output_type != ISP_IFE_PROCESSED) {
if (cc.output_type != ISP_IFE_PROCESSED) {
// RAW bayer images
mem_mgr_map_cmd.fd = buf.camera_bufs_raw[i].fd;
ret = do_cam_control(m->video0_fd, CAM_REQ_MGR_MAP_BUF, &mem_mgr_map_cmd, sizeof(mem_mgr_map_cmd));
@ -1028,7 +975,7 @@ void SpectraCamera::camera_map_bufs() {
buf_handle_raw[i] = mem_mgr_map_cmd.out.buf_handle;
}
if (output_type != ISP_RAW_OUTPUT) {
if (cc.output_type != ISP_RAW_OUTPUT) {
// final processed images
VisionBuf *vb = buf.vipc_server->get_buffer(buf.stream_type, i);
mem_mgr_map_cmd.fd = vb->fd;
@ -1125,7 +1072,7 @@ void SpectraCamera::configISP() {
},
};
if (output_type != ISP_IFE_PROCESSED) {
if (cc.output_type != ISP_IFE_PROCESSED) {
in_port_info.line_start = 0;
in_port_info.line_stop = sensor->frame_height + sensor->extra_height - 1;
in_port_info.height = sensor->frame_height + sensor->extra_height;
@ -1148,7 +1095,7 @@ void SpectraCamera::configISP() {
// allocate IFE memory, then configure it
ife_cmd.init(m, 67984, 0x20, false, m->device_iommu, m->cdm_iommu, ife_buf_depth);
if (output_type == ISP_IFE_PROCESSED) {
if (cc.output_type == ISP_IFE_PROCESSED) {
assert(sensor->gamma_lut_rgb.size() == 64);
ife_gamma_lut.init(m, sensor->gamma_lut_rgb.size()*sizeof(uint32_t), 0x20, false, m->device_iommu, m->cdm_iommu, 3); // 3 for RGB
for (int i = 0; i < 3; i++) {
@ -1300,7 +1247,7 @@ void SpectraCamera::linkDevices() {
ret = device_control(m->isp_fd, CAM_START_DEV, session_handle, isp_dev_handle);
LOGD("start isp: %d", ret);
assert(ret == 0);
if (output_type == ISP_BPS_PROCESSED) {
if (cc.output_type == ISP_BPS_PROCESSED) {
ret = device_control(m->icp_fd, CAM_START_DEV, session_handle, icp_dev_handle);
LOGD("start icp: %d", ret);
assert(ret == 0);
@ -1315,7 +1262,7 @@ void SpectraCamera::camera_close() {
// LOGD("stop sensor: %d", ret);
int ret = device_control(m->isp_fd, CAM_STOP_DEV, session_handle, isp_dev_handle);
LOGD("stop isp: %d", ret);
if (output_type == ISP_BPS_PROCESSED) {
if (cc.output_type == ISP_BPS_PROCESSED) {
ret = device_control(m->icp_fd, CAM_STOP_DEV, session_handle, icp_dev_handle);
LOGD("stop icp: %d", ret);
}
@ -1344,7 +1291,7 @@ void SpectraCamera::camera_close() {
LOGD("-- Release devices");
ret = device_control(m->isp_fd, CAM_RELEASE_DEV, session_handle, isp_dev_handle);
LOGD("release isp: %d", ret);
if (output_type == ISP_BPS_PROCESSED) {
if (cc.output_type == ISP_BPS_PROCESSED) {
ret = device_control(m->icp_fd, CAM_RELEASE_DEV, session_handle, icp_dev_handle);
LOGD("release icp: %d", ret);
}
@ -1371,90 +1318,129 @@ void SpectraCamera::camera_close() {
LOGD("destroyed session %d: %d", cc.camera_num, ret);
}
// Processes camera events and returns true if the frame is ready for further processing
bool SpectraCamera::handle_camera_event(const cam_req_mgr_message *event_data) {
if (stress_test("skipping handling camera event")) {
LOGW("skipping event");
/*
Handles camera SOF event. Returns true if the frame is valid for publishing.
*/
uint64_t request_id = event_data->u.frame_msg.request_id; // ID from the camera request manager
uint64_t frame_id_raw = event_data->u.frame_msg.frame_id; // raw as opposed to our re-indexed frame ID
uint64_t timestamp = event_data->u.frame_msg.timestamp; // timestamped in the kernel's SOF IRQ callback
//LOGD("handle cam %d ts %lu req id %lu frame id %lu", cc.camera_num, timestamp, request_id, frame_id_raw);
if (stress_test("skipping SOF event")) return false;
if (!validateEvent(request_id, frame_id_raw)) {
return false;
}
// ID from the qcom camera request manager
uint64_t request_id = event_data->u.frame_msg.request_id;
// raw as opposed to our re-indexed frame ID
uint64_t frame_id_raw = event_data->u.frame_msg.frame_id;
// Update tracking variables
if (request_id == request_id_last + 1) {
skip_expected = false;
}
frame_id_raw_last = frame_id_raw;
request_id_last = request_id;
// Wait until frame's fully read out and processed
if (!waitForFrameReady(request_id)) {
// Reset queue on sync failure to prevent frame tearing
LOGE("camera %d sync failure %ld %ld ", cc.camera_num, request_id, frame_id_raw);
clearAndRequeue(request_id + 1);
return false;
}
//LOGD("handle cam %d, request id %lu -> %lu, frame id raw %lu", cc.camera_num, request_id_last, request_id, frame_id_raw);
int buf_idx = request_id % ife_buf_depth;
bool ret = processFrame(buf_idx, request_id, frame_id_raw, timestamp);
destroySyncObjectAt(buf_idx);
enqueue_frame(request_id + ife_buf_depth); // request next frame for this slot
return ret;
}
if (request_id != 0) { // next ready
// check for skipped_last frames
if (frame_id_raw > frame_id_raw_last + 1 && !skipped_last) {
LOGE("camera %d realign", cc.camera_num);
clear_req_queue();
enqueue_req_multi(request_id + 1, ife_buf_depth - 1);
skipped_last = true;
} else if (frame_id_raw == frame_id_raw_last + 1) {
skipped_last = false;
bool SpectraCamera::validateEvent(uint64_t request_id, uint64_t frame_id_raw) {
// check if the request ID is even valid. this happens after queued
// requests are cleared. unclear if it happens any other time.
if (request_id == 0) {
if (invalid_request_count++ > ife_buf_depth+2) {
LOGE("camera %d reset after half second of invalid requests", cc.camera_num);
clearAndRequeue(request_id_last + 1);
invalid_request_count = 0;
}
// check for dropped requests
if (request_id > request_id_last + 1) {
LOGE("camera %d dropped requests %ld %ld", cc.camera_num, request_id, request_id_last);
enqueue_req_multi(request_id_last + 1 + ife_buf_depth, request_id - (request_id_last + 1));
return false;
}
invalid_request_count = 0;
// check for skips in frame_id or request_id
if (!skip_expected) {
if (frame_id_raw != frame_id_raw_last + 1) {
LOGE("camera %d frame ID skipped, %lu -> %lu", cc.camera_num, frame_id_raw_last, frame_id_raw);
clearAndRequeue(request_id + 1);
return false;
}
// metas
frame_id_raw_last = frame_id_raw;
request_id_last = request_id;
int buf_idx = (request_id - 1) % ife_buf_depth;
uint64_t timestamp = event_data->u.frame_msg.timestamp; // this is timestamped in the kernel's SOF IRQ callback
if (syncFirstFrame(cc.camera_num, request_id, frame_id_raw, timestamp)) {
// wait for this frame's EOF, then queue up the next one
if (enqueue_buffer(buf_idx, request_id + ife_buf_depth)) {
// Frame is ready
// in IFE_PROCESSED mode, we can't know the true EOF, so recover it with sensor readout time
uint64_t timestamp_eof = timestamp + sensor->readout_time_ns;
// Update buffer and frame data
buf.cur_buf_idx = buf_idx;
buf.cur_frame_data = {
.frame_id = (uint32_t)(frame_id_raw - camera_sync_data[cc.camera_num].frame_id_offset),
.request_id = (uint32_t)request_id,
.timestamp_sof = timestamp,
.timestamp_eof = timestamp_eof,
.processing_time = float((nanos_since_boot() - timestamp_eof) * 1e-9)
};
return true;
}
// LOGW("camerad %d synced req %d fid %d, publishing ts %.2f cereal_frame_id %d", cc.camera_num, (int)request_id, (int)frame_id_raw, (double)(timestamp)*1e-6, meta_data.frame_id);
} else {
// Frames not yet synced
enqueue_req_multi(request_id + ife_buf_depth, 1);
// LOGW("camerad %d not synced req %d fid %d", cc.camera_num, (int)request_id, (int)frame_id_raw);
}
} else { // not ready
if (frame_id_raw > frame_id_raw_last + 10) {
LOGE("camera %d reset after half second of no response", cc.camera_num);
clear_req_queue();
enqueue_req_multi(request_id_last + 1, ife_buf_depth);
frame_id_raw_last = frame_id_raw;
skipped_last = true;
if (request_id != request_id_last + 1) {
LOGE("camera %d requests skipped %ld -> %ld", cc.camera_num, request_id_last, request_id);
clearAndRequeue(request_id_last + 1);
return false;
}
}
return true;
}
return false;
void SpectraCamera::clearAndRequeue(uint64_t from_request_id) {
// clear everything, then queue up a fresh set of frames
LOGW("clearing and requeuing camera %d from %lu", cc.camera_num, from_request_id);
clear_req_queue();
for (uint64_t id = from_request_id; id < from_request_id + ife_buf_depth; ++id) {
enqueue_frame(id);
}
skip_expected = true;
}
bool SpectraCamera::syncFirstFrame(int camera_id, uint64_t request_id, uint64_t raw_id, uint64_t timestamp) {
if (first_frame_synced) return true;
bool SpectraCamera::waitForFrameReady(uint64_t request_id) {
int buf_idx = request_id % ife_buf_depth;
assert(sync_objs_ife[buf_idx]);
// OX and OS cameras require a few frames for the FSIN to sync up
if (request_id < 3) {
auto waitForSync = [&](uint32_t sync_obj, int timeout_ms, const char *sync_type) {
struct cam_sync_wait sync_wait = {};
sync_wait.sync_obj = sync_obj;
sync_wait.timeout_ms = stress_test(sync_type) ? 1 : timeout_ms;
return do_sync_control(m->cam_sync_fd, CAM_SYNC_WAIT, &sync_wait, sizeof(sync_wait)) == 0;
};
// wait for frame from IFE
// - in RAW_OUTPUT mode, this time is just the frame readout from the sensor
// - in IFE_PROCESSED mode, this time also includes image processing (~1ms)
bool success = waitForSync(sync_objs_ife[buf_idx], 100, "IFE sync");
if (success && sync_objs_bps[buf_idx]) {
// BPS is typically 7ms
success = waitForSync(sync_objs_bps[buf_idx], 50, "BPS sync");
}
return success;
}
bool SpectraCamera::processFrame(int buf_idx, uint64_t request_id, uint64_t frame_id_raw, uint64_t timestamp) {
if (!syncFirstFrame(cc.camera_num, request_id, frame_id_raw, timestamp)) {
return false;
}
// in IFE_PROCESSED mode, we can't know the true EOF, so recover it with sensor readout time
uint64_t timestamp_eof = timestamp + sensor->readout_time_ns;
// Update buffer and frame data
buf.cur_buf_idx = buf_idx;
buf.cur_frame_data = {
.frame_id = (uint32_t)(frame_id_raw - camera_sync_data[cc.camera_num].frame_id_offset),
.request_id = (uint32_t)request_id,
.timestamp_sof = timestamp,
.timestamp_eof = timestamp_eof,
.processing_time = float((nanos_since_boot() - timestamp_eof) * 1e-9)
};
return true;
}
bool SpectraCamera::syncFirstFrame(int camera_id, uint64_t request_id, uint64_t raw_id, uint64_t timestamp) {
if (first_frame_synced) return true;
// Store the frame data for this camera
camera_sync_data[camera_id] = SyncData{timestamp, raw_id + 1};
@ -1468,7 +1454,7 @@ bool SpectraCamera::syncFirstFrame(int camera_id, uint64_t request_id, uint64_t
for (const auto &[_, sync_data] : camera_sync_data) {
uint64_t diff = std::max(timestamp, sync_data.timestamp) -
std::min(timestamp, sync_data.timestamp);
if (diff > 0.5*1e6) { // within 0.5ms
if (diff > 0.2*1e6) { // milliseconds
all_cams_synced = false;
}
}

@ -22,7 +22,6 @@ const int MIPI_SETTLE_CNT = 33; // Calculated by camera_freqs.py
// For use with the Titan 170 ISP in the SDM845
// https://github.com/commaai/agnos-kernel-sdm845
// CSLDeviceType/CSLPacketOpcodesIFE from camx
// cam_packet_header.op_code = (device << 24) | (opcode);
#define CSLDeviceTypeImageSensor (0x01 << 24)
@ -31,12 +30,6 @@ const int MIPI_SETTLE_CNT = 33; // Calculated by camera_freqs.py
#define OpcodesIFEInitialConfig 0x0
#define OpcodesIFEUpdate 0x1
typedef enum {
ISP_RAW_OUTPUT, // raw frame from sensor
ISP_IFE_PROCESSED, // fully processed image through the IFE
ISP_BPS_PROCESSED, // fully processed image through the BPS
} SpectraOutputType;
std::optional<int32_t> device_acquire(int fd, int32_t session_handle, void *data, uint32_t num_resources=1);
int device_config(int fd, int32_t session_handle, int32_t dev_handle, uint64_t packet_handle);
int device_control(int fd, int op_code, int session_handle, int dev_handle);
@ -117,7 +110,7 @@ public:
class SpectraCamera {
public:
SpectraCamera(SpectraMaster *master, const CameraConfig &config, SpectraOutputType out);
SpectraCamera(SpectraMaster *master, const CameraConfig &config);
~SpectraCamera();
void camera_open(VisionIpcServer *v, cl_device_id device_id, cl_context ctx);
@ -128,8 +121,7 @@ public:
void config_ife(int idx, int request_id, bool init=false);
int clear_req_queue();
bool enqueue_buffer(int i, uint64_t request_id);
void enqueue_req_multi(uint64_t start, int n);
void enqueue_frame(uint64_t request_id);
int sensors_init();
void sensors_start();
@ -190,15 +182,17 @@ public:
int sync_objs_bps[MAX_IFE_BUFS] = {};
uint64_t request_id_last = 0;
uint64_t frame_id_raw_last = 0;
int64_t frame_id_offset = 0;
bool skipped_last = true;
SpectraOutputType output_type;
int invalid_request_count = 0;
bool skip_expected = true;
CameraBuf buf;
SpectraMaster *m;
private:
void clearAndRequeue(uint64_t from_request_id);
bool validateEvent(uint64_t request_id, uint64_t frame_id_raw);
bool waitForFrameReady(uint64_t request_id);
bool processFrame(int buf_idx, uint64_t request_id, uint64_t frame_id_raw, uint64_t timestamp);
static bool syncFirstFrame(int camera_id, uint64_t request_id, uint64_t raw_id, uint64_t timestamp);
struct SyncData {
uint64_t timestamp;
@ -208,11 +202,11 @@ private:
inline static bool first_frame_synced = false;
// a mode for stressing edge cases: realignment, sync failures, etc.
inline bool stress_test(const char* log, float prob=0.01) {
static bool enable = getenv("SPECTRA_STRESS_TEST") != nullptr;
bool triggered = enable && ((static_cast<double>(rand()) / RAND_MAX) < prob);
inline bool stress_test(const char* log) {
static double prob = std::stod(util::getenv("SPECTRA_ERROR_PROB", "-1"));;
bool triggered = (prob > 0) && ((static_cast<double>(rand()) / RAND_MAX) < prob);
if (triggered) {
LOGE("stress test: %s", log);
LOGE("stress test (cam %d): %s", cc.camera_num, log);
}
return triggered;
}

@ -43,6 +43,9 @@ OS04C10::OS04C10() {
frame_data_type = 0x2c;
mclk_frequency = 24000000; // Hz
// TODO: this was set from logs. actually calculate it out
readout_time_ns = 11000000;
ev_scale = 150.0;
dc_gain_factor = 1;
dc_gain_min_weight = 1; // always on is fine

@ -64,7 +64,35 @@ class TestCamerad:
laggy_frames = {k: v for k, v in diffs.items() if v > 1.1}
assert len(laggy_frames) == 0, f"Frames not synced properly: {laggy_frames=}"
@pytest.mark.skip("TODO: enable this")
def test_stress_test(self, logs):
os.environ['SPECTRA_STRESS_TEST'] = '1'
run_and_log(["camerad", ], CAMERAS, 5)
def test_sanity_checks(self, logs):
self._sanity_checks(logs)
def _sanity_checks(self, ts):
for c in CAMERAS:
assert c in ts
assert len(ts[c]['t']) > 20
# not a valid request id
assert 0 not in ts[c]['requestId']
# should monotonically increase
assert np.all(np.diff(ts[c]['frameId']) >= 1)
assert np.all(np.diff(ts[c]['requestId']) >= 1)
# EOF > SOF
assert np.all((ts[c]['timestampEof'] - ts[c]['timestampSof']) > 0)
# logMonoTime > SOF
assert np.all((ts[c]['t'] - ts[c]['timestampSof']/1e9) > 1e-7)
assert np.all((ts[c]['t'] - ts[c]['timestampEof']/1e9) > 1e-7)
def test_stress_test(self):
os.environ['SPECTRA_ERROR_PROB'] = '0.008'
logs = run_and_log(["camerad", ], CAMERAS, 10)
ts = msgs_to_time_series(logs)
# we should see some jumps from introduced errors
assert np.max([ np.max(np.diff(ts[c]['frameId'])) for c in CAMERAS ]) > 1
assert np.max([ np.max(np.diff(ts[c]['requestId'])) for c in CAMERAS ]) > 1
self._sanity_checks(ts)

@ -22,9 +22,9 @@ def has_preserve_xattr(d: str) -> bool:
return getxattr(os.path.join(Paths.log_root(), d), PRESERVE_ATTR_NAME) == PRESERVE_ATTR_VALUE
def get_preserved_segments(dirs_by_creation: list[str]) -> list[str]:
def get_preserved_segments(dirs_by_creation: list[str]) -> set[str]:
# skip deleting most recent N preserved segments (and their prior segment)
preserved = []
preserved = set()
for n, d in enumerate(filter(has_preserve_xattr, reversed(dirs_by_creation))):
if n == PRESERVE_COUNT:
break
@ -40,7 +40,7 @@ def get_preserved_segments(dirs_by_creation: list[str]) -> list[str]:
# preserve segment and two prior
for _seg_num in range(max(0, seg_num - 2), seg_num + 1):
preserved.append(f"{date_str}--{_seg_num}")
preserved.add(f"{date_str}--{_seg_num}")
return preserved

@ -18,15 +18,21 @@ TEST_CASE("ZstdFileWriter writes and compresses data correctly in loops", "[Zstd
// Step 1: Write compressed data to file in a loop
{
ZstdFileWriter writer(filename, LOG_COMPRESSION_LEVEL);
// Write various data sizes including edge cases
std::vector<size_t> testSizes = {dataSize, 1, 0, dataSize * 2}; // Normal, minimal, empty, large
for (int i = 0; i < iterations; ++i) {
std::string testData = util::random_string(dataSize);
size_t currentSize = testSizes[i % testSizes.size()];
std::string testData = util::random_string(currentSize);
totalTestData.append(testData);
writer.write((void *)testData.c_str(), testData.size());
}
}
// Step 2: Decompress the file and verify the data
auto compressedContent = util::read_file(filename);
REQUIRE(compressedContent.size() > 0);
REQUIRE(compressedContent.size() < totalTestData.size());
std::string decompressedData = zstd_decompress(compressedContent);
// Step 3: Verify that the decompressed data matches the original accumulated data

@ -1,7 +1,7 @@
#!/usr/bin/env expect
spawn adb shell
expect "#"
send "cd usr/comma\r"
send "cd data/openpilot\r"
send "export TERM=xterm-256color\r"
send "su comma\r"
send "clear\r"

@ -24,7 +24,7 @@
<range left="0.000450" top="1.050000" right="2483.624998" bottom="-1.050000"/>
<limitY/>
<curve color="#0097ff" name="/carState/steeringPressed"/>
<curve color="#d62728" name="/carOutput/actuatorsOutput/steer"/>
<curve color="#d62728" name="/carOutput/actuatorsOutput/torque"/>
</plot>
</DockArea>
<DockArea name="...">

@ -24,8 +24,8 @@
<plot flip_x="false" style="Lines" flip_y="false" mode="TimeSeries">
<range top="1.082031" right="269.643117" left="0.000140" bottom="-1.050781"/>
<limitY/>
<curve color="#9467bd" name="/carOutput/actuatorsOutput/steer">
<transform alias="/carOutput/actuatorsOutput/steer[Scale/Offset]" name="Scale/Offset">
<curve color="#9467bd" name="/carOutput/actuatorsOutput/torque">
<transform alias="/carOutput/actuatorsOutput/torque[Scale/Offset]" name="Scale/Offset">
<options value_scale="-1" time_offset="0" value_offset="0"/>
</transform>
</curve>
@ -114,8 +114,8 @@
<plot flip_x="false" style="Lines" flip_y="false" mode="TimeSeries">
<range top="1.082031" right="269.643117" left="0.000140" bottom="-1.050781"/>
<limitY/>
<curve color="#9467bd" name="/carOutput/actuatorsOutput/steer">
<transform alias="/carOutput/actuatorsOutput/steer[Scale/Offset]" name="Scale/Offset">
<curve color="#9467bd" name="/carOutput/actuatorsOutput/torque">
<transform alias="/carOutput/actuatorsOutput/torque[Scale/Offset]" name="Scale/Offset">
<options value_scale="-1.0" time_offset="0" value_offset="0"/>
</transform>
</curve>

@ -8,7 +8,6 @@ import numpy as np
import pygame
import cereal.messaging as messaging
from openpilot.common.numpy_fast import clip
from openpilot.common.basedir import BASEDIR
from openpilot.common.transformations.camera import DEVICE_CAMERAS
from openpilot.tools.replay.lib.ui_helpers import (UP,
@ -152,11 +151,11 @@ def ui_thread(addr):
plot_arr[-1, name_to_arr_idx['angle_steers_k']] = angle_steers_k
plot_arr[-1, name_to_arr_idx['gas']] = sm['carState'].gas
# TODO gas is deprecated
plot_arr[-1, name_to_arr_idx['computer_gas']] = clip(sm['carControl'].actuators.accel/4.0, 0.0, 1.0)
plot_arr[-1, name_to_arr_idx['computer_gas']] = np.clip(sm['carControl'].actuators.accel/4.0, 0.0, 1.0)
plot_arr[-1, name_to_arr_idx['user_brake']] = sm['carState'].brake
plot_arr[-1, name_to_arr_idx['steer_torque']] = sm['carControl'].actuators.torque * ANGLE_SCALE
# TODO brake is deprecated
plot_arr[-1, name_to_arr_idx['computer_brake']] = clip(-sm['carControl'].actuators.accel/4.0, 0.0, 1.0)
plot_arr[-1, name_to_arr_idx['computer_brake']] = np.clip(-sm['carControl'].actuators.accel/4.0, 0.0, 1.0)
plot_arr[-1, name_to_arr_idx['v_ego']] = sm['carState'].vEgo
plot_arr[-1, name_to_arr_idx['v_cruise']] = sm['carState'].cruiseState.speed
plot_arr[-1, name_to_arr_idx['a_ego']] = sm['carState'].aEgo

@ -12,11 +12,11 @@ resolution-markers = [
[[package]]
name = "aiohappyeyeballs"
version = "2.4.8"
version = "2.5.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/de/7c/79a15272e88d2563c9d63599fa59f05778975f35b255bf8f90c8b12b4ada/aiohappyeyeballs-2.4.8.tar.gz", hash = "sha256:19728772cb12263077982d2f55453babd8bec6a052a926cd5c0c42796da8bf62", size = 22337 }
sdist = { url = "https://files.pythonhosted.org/packages/a2/0c/458958007041f4b4de2d307e6b75d9e7554dad0baf26fe7a48b741aac126/aiohappyeyeballs-2.5.0.tar.gz", hash = "sha256:18fde6204a76deeabc97c48bdd01d5801cfda5d6b9c8bbeb1aaaee9d648ca191", size = 22494 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/52/0e/b187e2bb3eeb2644515109657c4474d65a84e7123de249bf1e8467d04a65/aiohappyeyeballs-2.4.8-py3-none-any.whl", hash = "sha256:6cac4f5dd6e34a9644e69cf9021ef679e4394f54e58a183056d12009e42ea9e3", size = 15005 },
{ url = "https://files.pythonhosted.org/packages/1b/9a/e4886864ce06e1579bd428208127fbdc0d62049c751e4e9e3b509c0059dc/aiohappyeyeballs-2.5.0-py3-none-any.whl", hash = "sha256:0850b580748c7071db98bffff6d4c94028d0d3035acc20fd721a0ce7e8cac35d", size = 15128 },
]
[[package]]
@ -628,7 +628,7 @@ wheels = [
[[package]]
name = "gymnasium"
version = "1.1.0"
version = "1.1.1"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "cloudpickle" },
@ -636,9 +636,9 @@ dependencies = [
{ name = "numpy" },
{ name = "typing-extensions" },
]
sdist = { url = "https://files.pythonhosted.org/packages/3c/74/db400493fbb86ff6b20ab46e798ac79a366a7db31ece785499e5e5bfd873/gymnasium-1.1.0.tar.gz", hash = "sha256:dedb5c8c83047d3927ef8b841fb4ebadaeaa43ab954e2e3aca7eadcf4226c5f2", size = 827655 }
sdist = { url = "https://files.pythonhosted.org/packages/90/69/70cd29e9fc4953d013b15981ee71d4c9ef4d8b2183e6ef2fe89756746dce/gymnasium-1.1.1.tar.gz", hash = "sha256:8bd9ea9bdef32c950a444ff36afc785e1d81051ec32d30435058953c20d2456d", size = 829326 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c1/2a/f931b9b7515c16e5285ecb0e6c2d773266a4f781332f24456e7c0d516c8d/gymnasium-1.1.0-py3-none-any.whl", hash = "sha256:8632bbb860512e565a46cd30fa8325ce8fbdac09b9c4f04171a772d6e95b232b", size = 965463 },
{ url = "https://files.pythonhosted.org/packages/f9/68/2bdc7b46b5f543dd865575f9d19716866bdb76e50dd33b71ed1a3dd8bb42/gymnasium-1.1.1-py3-none-any.whl", hash = "sha256:9c167ec0a2b388666e37f63b2849cd2552f7f5b71938574c637bb36487eb928a", size = 965410 },
]
[[package]]
@ -701,14 +701,14 @@ wheels = [
[[package]]
name = "jinja2"
version = "3.1.5"
version = "3.1.6"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "markupsafe" },
]
sdist = { url = "https://files.pythonhosted.org/packages/af/92/b3130cbbf5591acf9ade8708c365f3238046ac7cb8ccba6e81abccb0ccff/jinja2-3.1.5.tar.gz", hash = "sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb", size = 244674 }
sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bd/0f/2ba5fbcd631e3e88689309dbe978c5769e883e4b84ebfe7da30b43275c5a/jinja2-3.1.5-py3-none-any.whl", hash = "sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb", size = 134596 },
{ url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899 },
]
[[package]]
@ -4919,14 +4919,14 @@ wheels = [
[[package]]
name = "types-requests"
version = "2.32.0.20250301"
version = "2.32.0.20250306"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/87/88/365d6b46f1088ddeccbc89c26190c3180088ef6e7c8d162fc619496aab96/types_requests-2.32.0.20250301.tar.gz", hash = "sha256:3d909dc4eaab159c0d964ebe8bfa326a7afb4578d8706408d417e17d61b0c500", size = 22977 }
sdist = { url = "https://files.pythonhosted.org/packages/09/1a/beaeff79ef9efd186566ba5f0d95b44ae21f6d31e9413bcfbef3489b6ae3/types_requests-2.32.0.20250306.tar.gz", hash = "sha256:0962352694ec5b2f95fda877ee60a159abdf84a0fc6fdace599f20acb41a03d1", size = 23012 }
wheels = [
{ url = "https://files.pythonhosted.org/packages/b9/c2/e44564e8995dbc1738c2acacb8009d59c8cb19327da95a1b5c5d9cb68364/types_requests-2.32.0.20250301-py3-none-any.whl", hash = "sha256:0003e0124e2cbefefb88222ff822b48616af40c74df83350f599a650c8de483b", size = 20671 },
{ url = "https://files.pythonhosted.org/packages/99/26/645d89f56004aa0ba3b96fec27793e3c7e62b40982ee069e52568922b6db/types_requests-2.32.0.20250306-py3-none-any.whl", hash = "sha256:25f2cbb5c8710b2022f8bbee7b2b66f319ef14aeea2f35d80f18c9dbf3b60a0b", size = 20673 },
]
[[package]]

Loading…
Cancel
Save