Merge branch 'master' into mqb-fpv2

pull/20271/head
Jason Young 5 years ago committed by GitHub
commit ff64620371
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 26
      .github/workflows/sim_tests.yaml
  2. 26
      .github/workflows/tools_tests.yaml
  3. 1
      Dockerfile.openpilot_base
  4. 2
      opendbc
  5. 2
      release/files_common
  6. 59
      selfdrive/boardd/boardd.cc
  7. 13
      selfdrive/camerad/cameras/camera_common.cc
  8. 79
      selfdrive/camerad/cameras/camera_qcom.cc
  9. 6
      selfdrive/camerad/cameras/camera_qcom.h
  10. 5
      selfdrive/car/honda/values.py
  11. 34
      selfdrive/car/volkswagen/carstate.py
  12. 4
      selfdrive/car/volkswagen/values.py
  13. 9
      selfdrive/common/SConscript
  14. 14
      selfdrive/common/modeldata.h
  15. 12
      selfdrive/common/util.cc
  16. 10
      selfdrive/common/util.h
  17. 17
      selfdrive/common/watchdog.cc
  18. 3
      selfdrive/common/watchdog.h
  19. 12
      selfdrive/locationd/ubloxd_main.cc
  20. 13
      selfdrive/logcatd/tests/test_logcatd_android.py
  21. 20
      selfdrive/loggerd/raw_logger.cc
  22. 4
      selfdrive/loggerd/raw_logger.h
  23. 4
      selfdrive/manager/manager.py
  24. 60
      selfdrive/manager/process.py
  25. 35
      selfdrive/manager/process_config.py
  26. 65
      selfdrive/modeld/dmonitoringmodeld.cc
  27. 190
      selfdrive/modeld/modeld.cc
  28. 6
      selfdrive/modeld/runners/onnx_runner.py
  29. 3
      selfdrive/thermald/thermald.py
  30. 2
      selfdrive/ui/android/ui.cc
  31. 2
      selfdrive/ui/qt/home.cc
  32. 2
      selfdrive/ui/qt/qt_sound.cc
  33. 27
      selfdrive/ui/ui.cc
  34. 2
      selfdrive/updated.py
  35. 2
      tools/plotjuggler/juggle.py
  36. 0
      tools/scripts/save_ubloxraw_stream.py
  37. 37
      tools/sim/Dockerfile.sim
  38. 18
      tools/sim/bridge.py
  39. 7
      tools/sim/install_carla.sh
  40. 26
      tools/sim/start_carla.sh
  41. 6
      tools/sim/start_openpilot_docker.sh

@ -1,26 +0,0 @@
name: simulator
on:
schedule:
- cron: '0 * * * *'
jobs:
docker_build:
name: build container
runs-on: ubuntu-20.04
timeout-minutes: 50
if: github.repository == 'commaai/openpilot'
steps:
- uses: actions/checkout@v2
with:
submodules: true
lfs: true
- name: Docker build
run: |
docker pull commaai/openpilot-sim:latest || true
tools/sim/build_container.sh
- name: Push to dockerhub
if: github.ref == 'refs/heads/master'
run: |
docker login -u wmelching -p ${{ secrets.COMMA_DOCKERHUB_TOKEN }}
docker tag commaai/openpilot-sim docker.io/commaai/openpilot-sim:latest
docker push docker.io/commaai/openpilot-sim:latest

@ -6,6 +6,7 @@ on:
env:
BASE_IMAGE: openpilot-base
DOCKER_REGISTRY: ghcr.io/commaai
DOCKER_LOGIN: docker login ghcr.io -u adeebshihadeh -p ${{ secrets.CONTAINER_TOKEN }}
BUILD: |
docker pull $(grep -iohP '(?<=^from)\s+\S+' Dockerfile.openpilot_base) || true
@ -23,7 +24,7 @@ jobs:
- uses: actions/checkout@v2
with:
submodules: true
- name: Build docker image
- name: Build Docker image
run: eval "$BUILD"
- name: Unit test
run: |
@ -32,3 +33,26 @@ jobs:
apt-get install -y libdw-dev libqt5svg5-dev libqt5x11extras5-dev && \
cd /tmp/openpilot/tools/plotjuggler && \
./test_plotjuggler.py"
simulator:
name: simulator
runs-on: ubuntu-20.04
timeout-minutes: 50
env:
IMAGE_NAME: openpilot-sim
if: github.repository == 'commaai/openpilot'
steps:
- uses: actions/checkout@v2
with:
submodules: true
lfs: true
- name: Build Docker image
run: |
eval "$BUILD"
docker pull $DOCKER_REGISTRY/$IMAGE_NAME:latest || true
docker build --cache-from $DOCKER_REGISTRY/$IMAGE_NAME:latest -t $DOCKER_REGISTRY/$IMAGE_NAME:latest -f tools/sim/Dockerfile.sim .
- name: Push to container registry
if: github.ref == 'refs/heads/master' && github.repository == 'commaai/openpilot'
run: |
$DOCKER_LOGIN
docker push $DOCKER_REGISTRY/$IMAGE_NAME:latest

@ -61,5 +61,4 @@ RUN pyenv install 3.8.2 && \
pip install --no-cache-dir pipenv==2020.8.13 && \
cd /tmp && \
pipenv install --system --deploy --dev --clear && \
pip uninstall torch tensorflow -y && \
pip uninstall -y pipenv

@ -1 +1 @@
Subproject commit 5a92a64b4fd4d51cade6b756a700f54017d9190c
Subproject commit f13370845507528dfb728246dd9fef348f766618

@ -196,6 +196,8 @@ selfdrive/common/clutil.cc
selfdrive/common/clutil.h
selfdrive/common/params.h
selfdrive/common/params.cc
selfdrive/common/watchdog.cc
selfdrive/common/watchdog.h
selfdrive/common/modeldata.h
selfdrive/common/mat.h

@ -15,6 +15,7 @@
#include <bitset>
#include <thread>
#include <atomic>
#include <unordered_map>
#include <libusb-1.0/libusb.h>
@ -25,6 +26,7 @@
#include "common/swaglog.h"
#include "common/timing.h"
#include "messaging.hpp"
#include "locationd/ublox_msg.h"
#include "panda.h"
#include "pigeon.h"
@ -199,6 +201,7 @@ void can_recv(PubMaster &pm) {
void can_send_thread(bool fake_send) {
LOGD("start send thread");
kj::Array<capnp::word> buf = kj::heapArray<capnp::word>(1024);
Context * context = Context::create();
SubSocket * subscriber = SubSocket::create(context, "sendcan");
assert(subscriber != NULL);
@ -214,11 +217,13 @@ void can_send_thread(bool fake_send) {
}
continue;
}
const size_t size = (msg->getSize() / sizeof(capnp::word)) + 1;
if (buf.size() < size) {
buf = kj::heapArray<capnp::word>(size);
}
memcpy(buf.begin(), msg->getData(), msg->getSize());
auto amsg = kj::heapArray<capnp::word>((msg->getSize() / sizeof(capnp::word)) + 1);
memcpy(amsg.begin(), msg->getData(), msg->getSize());
capnp::FlatArrayMessageReader cmsg(amsg);
capnp::FlatArrayMessageReader cmsg(buf.slice(0, size));
cereal::Event::Reader event = cmsg.getRoot<cereal::Event>();
//Dont send if older than 1 second
@ -479,24 +484,56 @@ void pigeon_thread() {
Pigeon * pigeon = Pigeon::connect(panda);
#endif
std::unordered_map<char, uint64_t> last_recv_time;
std::unordered_map<char, int64_t> cls_max_dt = {
{(char)ublox::CLASS_NAV, int64_t(500000000ULL)}, // 0.5s
{(char)ublox::CLASS_RXM, int64_t(1000000000ULL)}, // 1.0s
};
while (!do_exit && panda->connected) {
bool need_reset = false;
std::string recv = pigeon->receive();
if (recv.length() > 0) {
if (recv[0] == (char)0x00){
if (ignition) {
LOGW("received invalid ublox message while onroad, resetting panda GPS");
need_reset = true;
// Parse message header
if (ignition && recv.length() >= 3) {
if (recv[0] == (char)ublox::PREAMBLE1 && recv[1] == (char)ublox::PREAMBLE2){
const char msg_cls = recv[2];
uint64_t t = nanos_since_boot();
if (t > last_recv_time[msg_cls]){
last_recv_time[msg_cls] = t;
}
} else {
pigeon_publish_raw(pm, recv);
}
}
// Check based on message frequency
for (const auto& [msg_cls, max_dt] : cls_max_dt) {
int64_t dt = (int64_t)nanos_since_boot() - (int64_t)last_recv_time[msg_cls];
if (ignition_last && ignition && dt > max_dt) {
LOGE("ublox receive timeout, msg class: 0x%02x, dt %llu, resetting panda GPS", msg_cls, dt);
need_reset = true;
}
}
// Check based on null bytes
if (ignition && recv.length() > 0 && recv[0] == (char)0x00){
need_reset = true;
LOGW("received invalid ublox message while onroad, resetting panda GPS");
}
if (recv.length() > 0){
pigeon_publish_raw(pm, recv);
}
// init pigeon on rising ignition edge
// since it was turned off in low power mode
if((ignition && !ignition_last) || need_reset) {
pigeon->init();
// Set receive times to current time
uint64_t t = nanos_since_boot() + 10000000000ULL; // Give ublox 10 seconds to start
for (const auto& [msg_cls, dt] : cls_max_dt) {
last_recv_time[msg_cls] = t;
}
}
ignition_last = ignition;

@ -22,6 +22,7 @@
#include "common/params.h"
#include "common/swaglog.h"
#include "common/util.h"
#include "modeldata.h"
#include "imgproc/utils.h"
static cl_program build_debayer_program(cl_device_id device_id, cl_context context, const CameraInfo *ci, const CameraBuf *b, const CameraState *s) {
@ -69,16 +70,8 @@ void CameraBuf::init(cl_device_id device_id, cl_context context, CameraState *s,
rgb_width = ci->frame_width / 2;
rgb_height = ci->frame_height / 2;
}
float db_s = 0.5;
#else
float db_s = 1.0;
#endif
const mat3 transform = (mat3){{
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0
}};
yuv_transform = ci->bayer ? transform_scale_buffer(transform, db_s) : transform;
yuv_transform = get_model_yuv_transform(ci->bayer);
vipc_server->create_buffers(rgb_type, UI_BUF_COUNT, true, rgb_width, rgb_height);
rgb_stride = vipc_server->get_buffer(rgb_type)->stride;
@ -325,7 +318,7 @@ void set_exposure_target(CameraState *c, int x_start, int x_end, int x_skip, int
break;
}
}
lum_med = lum_med_alt>0 ? lum_med + lum_med*lum_cur*(lum_med_alt - lum_med)/lum_total/32:lum_med;
lum_med = lum_med_alt>0 ? lum_med + lum_med/32*lum_cur*(lum_med_alt - lum_med)/lum_total:lum_med;
camera_autoexposure(c, lum_med / 256.0);
}

@ -117,12 +117,7 @@ static int imx298_apply_exposure(CameraState *s, int gain, int integ_lines, int
// REG_HOLD
{0x104,0x0,0},
};
int err = sensor_write_regs(s, reg_array, ARRAYSIZE(reg_array), MSM_CAMERA_I2C_BYTE_DATA);
if (err != 0) {
LOGE("apply_exposure err %d", err);
}
return err;
return sensor_write_regs(s, reg_array, ARRAYSIZE(reg_array), MSM_CAMERA_I2C_BYTE_DATA);
}
static int ov8865_apply_exposure(CameraState *s, int gain, int integ_lines, int frame_length) {
@ -157,16 +152,12 @@ static int ov8865_apply_exposure(CameraState *s, int gain, int integ_lines, int
//{0x104,0x0,0},
};
int err = sensor_write_regs(s, reg_array, ARRAYSIZE(reg_array), MSM_CAMERA_I2C_BYTE_DATA);
if (err != 0) {
LOGE("apply_exposure err %d", err);
}
return err;
return sensor_write_regs(s, reg_array, ARRAYSIZE(reg_array), MSM_CAMERA_I2C_BYTE_DATA);
}
static void camera_init(VisionIpcServer *v, CameraState *s, int camera_id, int camera_num,
uint32_t pixel_clock, uint32_t line_length_pclk,
unsigned int max_gain, unsigned int fps, cl_device_id device_id, cl_context ctx,
uint32_t max_gain, uint32_t fps, cl_device_id device_id, cl_context ctx,
VisionStreamType rgb_type, VisionStreamType yuv_type) {
s->camera_num = camera_num;
s->camera_id = camera_id;
@ -235,10 +226,10 @@ void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_i
static void set_exposure(CameraState *s, float exposure_frac, float gain_frac) {
int err = 0;
unsigned int frame_length = s->pixel_clock / s->line_length_pclk / s->fps;
uint32_t frame_length = s->pixel_clock / s->line_length_pclk / s->fps;
unsigned int gain = s->cur_gain;
unsigned int integ_lines = s->cur_integ_lines;
uint32_t gain = s->cur_gain;
uint32_t integ_lines = s->cur_integ_lines;
if (exposure_frac >= 0) {
exposure_frac = std::clamp(exposure_frac, 2.0f / frame_length, 1.0f);
@ -275,6 +266,8 @@ static void set_exposure(CameraState *s, float exposure_frac, float gain_frac) {
s->cur_gain = gain;
s->cur_integ_lines = integ_lines;
s->cur_frame_length = frame_length;
} else {
LOGE("camera %d apply_exposure err: %d", s->camera_num, err);
}
}
@ -294,9 +287,9 @@ static void do_autoexposure(CameraState *s, float grey_frac) {
const float gain_frac_min = 0.015625;
const float gain_frac_max = 1.0;
// exposure time limits
unsigned int frame_length = s->pixel_clock / s->line_length_pclk / s->fps;
const unsigned int exposure_time_min = 16;
const unsigned int exposure_time_max = frame_length - 11; // copied from set_exposure()
uint32_t frame_length = s->pixel_clock / s->line_length_pclk / s->fps;
const uint32_t exposure_time_min = 16;
const uint32_t exposure_time_max = frame_length - 11; // copied from set_exposure()
float cur_gain_frac = s->cur_gain_frac;
float exposure_factor = pow(1.05, (target_grey - grey_frac) / 0.05);
@ -328,28 +321,6 @@ static void do_autoexposure(CameraState *s, float grey_frac) {
}
}
static uint8_t* get_eeprom(int eeprom_fd, size_t *out_len) {
msm_eeprom_cfg_data cfg = {.cfgtype = CFG_EEPROM_GET_CAL_DATA};
int err = ioctl(eeprom_fd, VIDIOC_MSM_EEPROM_CFG, &cfg);
assert(err >= 0);
uint32_t num_bytes = cfg.cfg.get_data.num_bytes;
assert(num_bytes > 100);
uint8_t* buffer = (uint8_t*)malloc(num_bytes);
assert(buffer);
memset(buffer, 0, num_bytes);
cfg.cfgtype = CFG_EEPROM_READ_CAL_DATA;
cfg.cfg.read_data.num_bytes = num_bytes;
cfg.cfg.read_data.dbuffer = buffer;
err = ioctl(eeprom_fd, VIDIOC_MSM_EEPROM_CFG, &cfg);
assert(err >= 0);
*out_len = num_bytes;
return buffer;
}
static void sensors_init(MultiCameraState *s) {
unique_fd sensorinit_fd;
sensorinit_fd = open("/dev/v4l-subdev11", O_RDWR | O_NONBLOCK);
@ -461,9 +432,6 @@ static void camera_open(CameraState *s, bool is_road_cam) {
sensor_dev = "/dev/v4l-subdev17";
s->isp_fd = open("/dev/v4l-subdev13", O_RDWR | O_NONBLOCK);
assert(s->isp_fd >= 0);
s->eeprom_fd = open("/dev/v4l-subdev8", O_RDWR | O_NONBLOCK);
assert(s->eeprom_fd >= 0);
s->actuator_fd = open("/dev/v4l-subdev7", O_RDWR | O_NONBLOCK);
assert(s->actuator_fd >= 0);
} else {
@ -474,8 +442,6 @@ static void camera_open(CameraState *s, bool is_road_cam) {
sensor_dev = "/dev/v4l-subdev18";
s->isp_fd = open("/dev/v4l-subdev14", O_RDWR | O_NONBLOCK);
assert(s->isp_fd >= 0);
s->eeprom_fd = open("/dev/v4l-subdev9", O_RDWR | O_NONBLOCK);
assert(s->eeprom_fd >= 0);
}
// wait for sensor device
@ -537,14 +503,6 @@ static void camera_open(CameraState *s, bool is_road_cam) {
// **** GO GO GO ****
LOG("******************** GO GO GO ************************");
s->eeprom = get_eeprom(s->eeprom_fd, &s->eeprom_size);
// printf("eeprom:\n");
// for (int i=0; i<s->eeprom_size; i++) {
// printf("%02x", s->eeprom[i]);
// }
// printf("\n");
// CSID: init csid
csid_cfg_data.cfgtype = CSID_INIT;
err = ioctl(s->csid_fd, VIDIOC_MSM_CSID_IO_CFG, &csid_cfg_data);
@ -856,7 +814,6 @@ static void road_camera_start(CameraState *s) {
s->cur_step_pos = inf_step;
actuator_move(s, s->cur_lens_pos);
LOG("init lens pos: %d", s->cur_lens_pos);
}
@ -870,8 +827,8 @@ void actuator_move(CameraState *s, uint16_t target) {
struct msm_actuator_cfg_data actuator_cfg_data = {0};
actuator_cfg_data.cfgtype = CFG_MOVE_FOCUS;
actuator_cfg_data.cfg.move = (struct msm_actuator_move_params_t){
.dir = (int8_t)((step > 0) ? 0 : 1),
.sign_dir = (int8_t)((step > 0) ? 1 : -1),
.dir = (int8_t)((step > 0) ? MOVE_NEAR : MOVE_FAR),
.sign_dir = (int8_t)((step > 0) ? MSM_ACTUATOR_MOVE_SIGNED_NEAR : MSM_ACTUATOR_MOVE_SIGNED_FAR),
.dest_step_pos = (int16_t)dest_step_pos,
.num_steps = abs(step),
.curr_lens_pos = s->cur_lens_pos,
@ -1083,11 +1040,9 @@ static void camera_close(CameraState *s) {
LOG("isp release stream: %d", err);
}
}
free(s->eeprom);
}
const char* get_isp_event_name(unsigned int type) {
const char* get_isp_event_name(uint32_t type) {
switch (type) {
case ISP_EVENT_REG_UPDATE: return "ISP_EVENT_REG_UPDATE";
case ISP_EVENT_EPOCH_0: return "ISP_EVENT_EPOCH_0";
@ -1255,9 +1210,9 @@ void cameras_run(MultiCameraState *s) {
c->frame_metadata[c->frame_metadata_idx] = (FrameMetadata){
.frame_id = isp_event_data->frame_id,
.timestamp_eof = timestamp,
.frame_length = (unsigned int)c->cur_frame_length,
.integ_lines = (unsigned int)c->cur_integ_lines,
.global_gain = (unsigned int)c->cur_gain,
.frame_length = (uint32_t)c->cur_frame_length,
.integ_lines = (uint32_t)c->cur_integ_lines,
.global_gain = (uint32_t)c->cur_gain,
.lens_pos = c->cur_lens_pos,
.lens_sag = c->last_sag_acc_z,
.lens_err = c->focus_err,

@ -67,7 +67,7 @@ typedef struct CameraState {
// exposure
uint32_t pixel_clock, line_length_pclk;
unsigned int max_gain;
uint32_t max_gain;
float cur_exposure_frac, cur_gain_frac;
int cur_gain, cur_integ_lines;
int cur_frame_length;
@ -75,7 +75,7 @@ typedef struct CameraState {
camera_apply_exposure_func apply_exposure;
// rear camera only,used for focusing
unique_fd actuator_fd, eeprom_fd;
unique_fd actuator_fd;
std::atomic<float> focus_err;
std::atomic<float> last_sag_acc_z;
std::atomic<float> lens_true_pos;
@ -85,8 +85,6 @@ typedef struct CameraState {
int16_t focus[NUM_FOCUS];
uint8_t confidence[NUM_FOCUS];
uint16_t infinity_dac;
size_t eeprom_size;
uint8_t *eeprom;
} CameraState;

@ -930,6 +930,7 @@ FW_VERSIONS = {
},
CAR.ACURA_RDX_3G: {
(Ecu.programmedFuelInjection, 0x18da10f1, None): [
b'37805-5YF-C210\x00\x00',
b'37805-5YF-A230\x00\x00',
b'37805-5YF-A420\x00\x00',
b'37805-5YF-A430\x00\x00',
@ -951,11 +952,13 @@ FW_VERSIONS = {
b'28102-5YK-A711\x00\x00',
],
(Ecu.combinationMeter, 0x18da60f1, None): [
b'78109-TJB-AW10\x00\x00',
b'78109-TJB-AB10\x00\x00',
b'78109-TJB-AD10\x00\x00',
b'78109-TJB-AF10\x00\x00',
],
(Ecu.srs, 0x18da53f1, None): [
b'77959-TJB-A210\x00\x00',
b'77959-TJB-A040\x00\x00',
],
(Ecu.electricBrakeBooster, 0x18da2bf1, None): [
@ -963,9 +966,11 @@ FW_VERSIONS = {
b'46114-TJB-A060\x00\x00',
],
(Ecu.gateway, 0x18daeff1, None): [
b'38897-TJB-A120\x00\x00',
b'38897-TJB-A110\x00\x00',
],
(Ecu.eps, 0x18da30f1, None): [
b'39990-TJB-A040\x00\x00',
b'39990-TJB-A030\x00\x00',
],
},

@ -4,16 +4,19 @@ from selfdrive.config import Conversions as CV
from selfdrive.car.interfaces import CarStateBase
from opendbc.can.parser import CANParser
from opendbc.can.can_define import CANDefine
from selfdrive.car.volkswagen.values import DBC, CANBUS, BUTTON_STATES, CarControllerParams
from selfdrive.car.volkswagen.values import DBC, CANBUS, TransmissionType, GearShifter, BUTTON_STATES, CarControllerParams
class CarState(CarStateBase):
def __init__(self, CP):
super().__init__(CP)
can_define = CANDefine(DBC[CP.carFingerprint]['pt'])
self.shifter_values = can_define.dv["Getriebe_11"]['GE_Fahrstufe']
if CP.transmissionType == TransmissionType.automatic:
self.shifter_values = can_define.dv["Getriebe_11"]['GE_Fahrstufe']
elif CP.transmissionType == TransmissionType.direct:
self.shifter_values = can_define.dv["EV_Gearshift"]['GearPosition']
self.buttonStates = BUTTON_STATES.copy()
def update(self, pt_cp):
def update(self, pt_cp, trans_type):
ret = car.CarState.new_message()
# Update vehicle speed and acceleration from ABS wheel speeds.
ret.wheelSpeeds.fl = pt_cp.vl["ESP_19"]['ESP_VL_Radgeschw_02'] * CV.KPH_TO_MS
@ -42,8 +45,16 @@ class CarState(CarStateBase):
ret.brakeLights = bool(pt_cp.vl["ESP_05"]['ESP_Status_Bremsdruck'])
# Update gear and/or clutch position data.
can_gear_shifter = int(pt_cp.vl["Getriebe_11"]['GE_Fahrstufe'])
ret.gearShifter = self.parse_gear_shifter(self.shifter_values.get(can_gear_shifter, None))
if trans_type == TransmissionType.automatic:
ret.gearShifter = self.parse_gear_shifter(self.shifter_values.get(pt_cp.vl["Getriebe_11"]['GE_Fahrstufe'], None))
elif trans_type == TransmissionType.direct:
ret.gearShifter = self.parse_gear_shifter(self.shifter_values.get(pt_cp.vl["EV_Gearshift"]['GearPosition'], None))
elif trans_type == TransmissionType.manual:
ret.clutchPressed = not pt_cp.vl["Motor_14"]['MO_Kuppl_schalter']
if bool(pt_cp.vl["Gateway_72"]['BCM1_Rueckfahrlicht_Schalter']):
ret.gearShifter = GearShifter.reverse
else:
ret.gearShifter = GearShifter.drive
# Update door and trunk/hatch lid open status.
ret.doorOpen = any([pt_cp.vl["Gateway_72"]['ZV_FT_offen'],
@ -154,7 +165,6 @@ class CarState(CarStateBase):
("ZV_HD_offen", "Gateway_72", 0), # Trunk or hatch open
("BH_Blinker_li", "Gateway_72", 0), # Left turn signal on
("BH_Blinker_re", "Gateway_72", 0), # Right turn signal on
("GE_Fahrstufe", "Getriebe_11", 0), # Auto trans gear selector position
("AB_Gurtschloss_FA", "Airbag_02", 0), # Seatbelt status, driver
("AB_Gurtschloss_BF", "Airbag_02", 0), # Seatbelt status, passenger
("ESP_Fahrer_bremst", "ESP_05", 0), # Brake pedal pressed
@ -206,7 +216,6 @@ class CarState(CarStateBase):
("TSK_06", 50), # From J623 Engine control module
("GRA_ACC_01", 33), # From J??? steering wheel control buttons
("ACC_02", 17), # From J428 ACC radar control module
("Getriebe_11", 20), # From J743 Auto transmission control module
("Gateway_72", 10), # From J533 CAN gateway (aggregated data)
("Motor_14", 10), # From J623 Engine control module
("Airbag_02", 5), # From J234 Airbag control module
@ -215,6 +224,17 @@ class CarState(CarStateBase):
("Einheiten_01", 1), # From J??? not known if gateway, cluster, or BCM
]
if CP.transmissionType == TransmissionType.automatic:
signals += [("GE_Fahrstufe", "Getriebe_11", 0)] # Auto trans gear selector position
checks += [("Getriebe_11", 20)] # From J743 Auto transmission control module
elif CP.transmissionType == TransmissionType.direct:
signals += [("GearPosition", "EV_Gearshift", 0)] # EV gear selector position
checks += [("EV_Gearshift", 10)] # From J??? unknown EV control module
elif CP.transmissionType == TransmissionType.manual:
signals += [("MO_Kuppl_schalter", "Motor_14", 0), # Clutch switch
("BCM1_Rueckfahrlicht_Schalter", "Gateway_72", 0)] # Reverse light from BCM
checks += [("Motor_14", 10)] # From J623 Engine control module
return CANParser(DBC[CP.carFingerprint]['pt'], signals, checks, CANBUS.pt)
# A single signal is monitored from the camera CAN bus, and then ignored,

@ -1,5 +1,6 @@
# flake8: noqa
from cereal import car
from selfdrive.car import dbc_dict
from cereal import car
Ecu = car.CarParams.Ecu
@ -26,6 +27,9 @@ class CANBUS:
pt = 0
cam = 2
TransmissionType = car.CarParams.TransmissionType
GearShifter = car.CarState.GearShifter
BUTTON_STATES = {
"accelCruise": False,
"decelCruise": False,

@ -5,7 +5,14 @@ if SHARED:
else:
fxn = env.Library
common_libs = ['params.cc', 'swaglog.cc', 'util.cc', 'gpio.cc', 'i2c.cc']
common_libs = [
'params.cc',
'swaglog.cc',
'util.cc',
'gpio.cc',
'i2c.cc',
'watchdog.cc',
]
_common = fxn('common', common_libs, LIBS="json11")

@ -34,5 +34,19 @@ const mat3 fcam_intrinsic_matrix = (mat3){{
}};
#endif
static inline mat3 get_model_yuv_transform(bool bayer = true) {
#ifndef QCOM2
float db_s = 0.5; // debayering does a 2x downscale
#else
float db_s = 1.0;
#endif
const mat3 transform = (mat3){{
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0
}};
return bayer ? transform_scale_buffer(transform, db_s) : transform;
}
#endif

@ -1,11 +1,7 @@
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <assert.h>
#include <unistd.h>
#include <fcntl.h>
#include <errno.h>
#include "common/util.h"
#ifdef __linux__
#include <sys/prctl.h>
#include <sys/syscall.h>
@ -45,8 +41,8 @@ void* read_file(const char* path, size_t* out_len) {
return buf;
}
int write_file(const char* path, const void* data, size_t size) {
int fd = open(path, O_WRONLY);
int write_file(const char* path, const void* data, size_t size, int flags, mode_t mode) {
int fd = open(path, flags, mode);
if (fd == -1) {
return -1;
}

@ -1,8 +1,9 @@
#pragma once
#include <stdio.h>
#include <unistd.h>
#include <cstdio>
#include <csignal>
#include <cstring>
#include <cstdlib>
#include <string>
#include <memory>
#include <atomic>
@ -10,6 +11,9 @@
#include <fstream>
#include <thread>
#include <chrono>
#include <cassert>
#include <unistd.h>
#include <fcntl.h>
#ifndef sighandler_t
typedef void (*sighandler_t)(int sig);
@ -25,7 +29,7 @@ typedef void (*sighandler_t)(int sig);
// Returns NULL on failure, otherwise the NULL-terminated file contents.
// The result must be freed by the caller.
void* read_file(const char* path, size_t* out_len);
int write_file(const char* path, const void* data, size_t size);
int write_file(const char* path, const void* data, size_t size, int flags=O_WRONLY, mode_t mode=0777);
void set_thread_name(const char* name);

@ -0,0 +1,17 @@
#include <string>
#include <cstdint>
#include <unistd.h>
#include "common/timing.h"
#include "common/util.h"
#include "common/watchdog.h"
const std::string watchdog_fn_prefix = "/dev/shm/wd_"; // + <pid>
bool watchdog_kick(){
std::string fn = watchdog_fn_prefix + std::to_string(getpid());
std::string cur_t = std::to_string(nanos_since_boot());
int r = write_file(fn.c_str(), cur_t.data(), cur_t.length(), O_WRONLY | O_CREAT);
return r == 0;
}

@ -0,0 +1,3 @@
#pragma once
bool watchdog_kick();

@ -23,7 +23,7 @@ ExitHandler do_exit;
using namespace ublox;
int ubloxd_main(poll_ubloxraw_msg_func poll_func, send_gps_event_func send_func) {
LOGW("starting ubloxd");
kj::Array<capnp::word> buf = kj::heapArray<capnp::word>(1024);
UbloxMsgParser parser;
Context * context = Context::create();
@ -41,11 +41,13 @@ int ubloxd_main(poll_ubloxraw_msg_func poll_func, send_gps_event_func send_func)
}
continue;
}
const size_t size = (msg->getSize() / sizeof(capnp::word)) + 1;
if (buf.size() < size) {
buf = kj::heapArray<capnp::word>(size);
}
memcpy(buf.begin(), msg->getData(), msg->getSize());
auto amsg = kj::heapArray<capnp::word>((msg->getSize() / sizeof(capnp::word)) + 1);
memcpy(amsg.begin(), msg->getData(), msg->getSize());
capnp::FlatArrayMessageReader cmsg(amsg);
capnp::FlatArrayMessageReader cmsg(buf.slice(0, size));
cereal::Event::Reader event = cmsg.getRoot<cereal::Event>();
auto ubloxRaw = event.getUbloxRaw();

@ -22,9 +22,11 @@ class TestLogcatdAndroid(unittest.TestCase):
# write some log messages
sent_msgs = {}
for __ in range(random.randint(5, 50)):
msg = ''.join([random.choice(string.ascii_letters) for _ in range(random.randrange(2, 200))])
msg = ''.join([random.choice(string.ascii_letters) for _ in range(random.randrange(2, 50))])
if msg in sent_msgs:
continue
sent_msgs[msg] = ''.join([random.choice(string.ascii_letters) for _ in range(random.randrange(2, 20))])
os.system(f"log -t {sent_msgs[msg]} {msg}")
os.system(f"log -t '{sent_msgs[msg]}' '{msg}'")
time.sleep(1)
msgs = messaging.drain_sock(sock)
@ -36,12 +38,13 @@ class TestLogcatdAndroid(unittest.TestCase):
if recv_msg not in sent_msgs:
continue
self.assertEqual(m.androidLog.tag, sent_msgs[recv_msg])
del sent_msgs[recv_msg]
# see https://android.googlesource.com/platform/system/core/+/android-2.1_r1/liblog/logd_write.c#144
radio_msg = m.androidLog.id == 1 and m.androidLog.tag.startswith("use-Rlog/RLOG-")
if m.androidLog.tag == sent_msgs[recv_msg] or radio_msg:
del sent_msgs[recv_msg]
# ensure we received all the logs we sent
self.assertEqual(len(sent_msgs), 0)
time.sleep(1)
if __name__ == "__main__":

@ -25,8 +25,6 @@ RawLogger::RawLogger(const char* filename, int width, int height, int fps,
: filename(filename),
fps(fps) {
int err = 0;
av_register_all();
codec = avcodec_find_encoder(AV_CODEC_ID_FFVHUFF);
// codec = avcodec_find_encoder(AV_CODEC_ID_FFV1);
@ -45,7 +43,7 @@ RawLogger::RawLogger(const char* filename, int width, int height, int fps,
codec_ctx->time_base = (AVRational){ 1, fps };
err = avcodec_open2(codec_ctx, codec, NULL);
int err = avcodec_open2(codec_ctx, codec, NULL);
assert(err >= 0);
frame = av_frame_alloc();
@ -65,10 +63,6 @@ RawLogger::~RawLogger() {
}
void RawLogger::encoder_open(const char* path) {
int err = 0;
std::lock_guard<std::recursive_mutex> guard(lock);
vid_path = util::string_format("%s/%s.mkv", path, filename);
// create camera lock file
@ -91,7 +85,7 @@ void RawLogger::encoder_open(const char* path) {
stream->time_base = (AVRational){ 1, fps };
// codec_ctx->time_base = stream->time_base;
err = avcodec_parameters_from_context(stream->codecpar, codec_ctx);
int err = avcodec_parameters_from_context(stream->codecpar, codec_ctx);
assert(err >= 0);
err = avio_open(&format_ctx->pb, vid_path.c_str(), AVIO_FLAG_WRITE);
@ -105,13 +99,9 @@ void RawLogger::encoder_open(const char* path) {
}
void RawLogger::encoder_close() {
int err = 0;
std::lock_guard<std::recursive_mutex> guard(lock);
if (!is_open) return;
err = av_write_trailer(format_ctx);
int err = av_write_trailer(format_ctx);
assert(err == 0);
avcodec_close(stream->codec);
@ -128,8 +118,6 @@ void RawLogger::encoder_close() {
int RawLogger::encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const uint8_t *v_ptr,
int in_width, int in_height, uint64_t ts) {
int err = 0;
AVPacket pkt;
av_init_packet(&pkt);
pkt.data = NULL;
@ -143,7 +131,7 @@ int RawLogger::encode_frame(const uint8_t *y_ptr, const uint8_t *u_ptr, const ui
int ret = counter;
int got_output = 0;
err = avcodec_encode_video2(codec_ctx, &pkt, frame, &got_output);
int err = avcodec_encode_video2(codec_ctx, &pkt, frame, &got_output);
if (err) {
LOGE("encoding error\n");
ret = -1;

@ -5,8 +5,6 @@
#include <string>
#include <vector>
#include <mutex>
#include <condition_variable>
extern "C" {
#include <libavutil/imgutils.h>
@ -34,8 +32,6 @@ private:
std::string vid_path, lock_path;
std::recursive_mutex lock;
AVCodec *codec = NULL;
AVCodecContext *codec_ctx = NULL;

@ -153,7 +153,7 @@ def manager_thread():
ensure_running(managed_processes.values(), started, driverview, not_run)
# trigger an update after going offroad
if started_prev and not started:
if started_prev and not started and 'updated' in managed_processes:
os.sync()
managed_processes['updated'].signal(signal.SIGHUP)
@ -194,7 +194,7 @@ def main(spinner=None):
finally:
manager_cleanup()
if Params().params.get("DoUninstall", encoding='utf8') == "1":
if Params().get("DoUninstall", encoding='utf8') == "1":
cloudlog.warning("uninstalling")
HARDWARE.uninstall()

@ -12,10 +12,14 @@ import cereal.messaging as messaging
import selfdrive.crash as crash
from common.basedir import BASEDIR
from common.params import Params
from common.realtime import sec_since_boot
from selfdrive.swaglog import cloudlog
from selfdrive.hardware import HARDWARE
from cereal import log
WATCHDOG_FN = "/dev/shm/wd_"
ENABLE_WATCHDOG = os.getenv("NO_WATCHDOG") is None
def launcher(proc):
try:
@ -58,8 +62,13 @@ class ManagerProcess(ABC):
daemon = False
sigkill = False
proc = None
enabled = True
name = ""
last_watchdog_time = 0
watchdog_max_dt = None
watchdog_seen = False
@abstractmethod
def prepare(self):
pass
@ -68,6 +77,30 @@ class ManagerProcess(ABC):
def start(self):
pass
def restart(self):
self.stop()
self.start()
def check_watchdog(self, started):
if self.watchdog_max_dt is None or self.proc is None:
return
try:
fn = WATCHDOG_FN + str(self.proc.pid)
self.last_watchdog_time = int(open(fn).read())
except Exception:
pass
dt = sec_since_boot() - self.last_watchdog_time / 1e9
if dt > self.watchdog_max_dt:
# Only restart while offroad for now
if self.watchdog_seen and ENABLE_WATCHDOG and (not started):
cloudlog.error(f"Watchdog timeout for {self.name} (exitcode {self.proc.exitcode}) restarting")
self.restart()
else:
self.watchdog_seen = True
def stop(self, retry=True):
if self.proc is None:
return
@ -106,6 +139,10 @@ class ManagerProcess(ABC):
return ret
def signal(self, sig):
if self.proc is None:
return
# Don't signal if already exited
if self.proc.exitcode is not None and self.proc.pid is not None:
return
@ -123,14 +160,16 @@ class ManagerProcess(ABC):
class NativeProcess(ManagerProcess):
def __init__(self, name, cwd, cmdline, persistent=False, driverview=False, unkillable=False, sigkill=False):
def __init__(self, name, cwd, cmdline, enabled=True, persistent=False, driverview=False, unkillable=False, sigkill=False, watchdog_max_dt=None):
self.name = name
self.cwd = cwd
self.cmdline = cmdline
self.enabled = enabled
self.persistent = persistent
self.driverview = driverview
self.unkillable = unkillable
self.sigkill = sigkill
self.watchdog_max_dt = watchdog_max_dt
def prepare(self):
pass
@ -143,20 +182,24 @@ class NativeProcess(ManagerProcess):
cloudlog.info("starting process %s" % self.name)
self.proc = Process(name=self.name, target=nativelauncher, args=(self.cmdline, cwd))
self.proc.start()
self.watchdog_seen = False
class PythonProcess(ManagerProcess):
def __init__(self, name, module, persistent=False, driverview=False, unkillable=False, sigkill=False):
def __init__(self, name, module, enabled=True, persistent=False, driverview=False, unkillable=False, sigkill=False, watchdog_max_dt=None):
self.name = name
self.module = module
self.enabled = enabled
self.persistent = persistent
self.driverview = driverview
self.unkillable = unkillable
self.sigkill = sigkill
self.watchdog_max_dt = watchdog_max_dt
def prepare(self):
cloudlog.info("preimporting %s" % self.module)
importlib.import_module(self.module)
if self.enabled:
cloudlog.info("preimporting %s" % self.module)
importlib.import_module(self.module)
def start(self):
if self.proc is not None:
@ -165,15 +208,17 @@ class PythonProcess(ManagerProcess):
cloudlog.info("starting python %s" % self.module)
self.proc = Process(name=self.name, target=launcher, args=(self.module,))
self.proc.start()
self.watchdog_seen = False
class DaemonProcess(ManagerProcess):
"""Python process that has to stay running accross manager restart.
This is used for athena so you don't lose SSH access when restarting manager."""
def __init__(self, name, module, param_name):
def __init__(self, name, module, param_name, enabled=True):
self.name = name
self.module = module
self.param_name = param_name
self.enabled = enabled
self.persistent = True
def prepare(self):
@ -215,6 +260,8 @@ def ensure_running(procs, started, driverview=False, not_run=None):
for p in procs:
if p.name in not_run:
p.stop()
elif not p.enabled:
p.stop()
elif p.persistent:
p.start()
elif p.driverview and driverview:
@ -223,3 +270,6 @@ def ensure_running(procs, started, driverview=False, not_run=None):
p.start()
else:
p.stop()
p.check_watchdog(started)

@ -1,48 +1,39 @@
import os
from selfdrive.manager.process import PythonProcess, NativeProcess, DaemonProcess
from selfdrive.hardware import EON, TICI, PC
WEBCAM = os.getenv("WEBCAM") is not None
procs = [
DaemonProcess("manage_athenad", "selfdrive.athena.manage_athenad", "AthenadPid"),
# due to qualcomm kernel bugs SIGKILLing camerad sometimes causes page table corruption
NativeProcess("camerad", "selfdrive/camerad", ["./camerad"], unkillable=True, driverview=True),
NativeProcess("clocksd", "selfdrive/clocksd", ["./clocksd"]),
NativeProcess("dmonitoringmodeld", "selfdrive/modeld", ["./dmonitoringmodeld"], driverview=True),
NativeProcess("dmonitoringmodeld", "selfdrive/modeld", ["./dmonitoringmodeld"], enabled=(not PC or WEBCAM), driverview=True),
NativeProcess("logcatd", "selfdrive/logcatd", ["./logcatd"]),
NativeProcess("loggerd", "selfdrive/loggerd", ["./loggerd"]),
NativeProcess("modeld", "selfdrive/modeld", ["./modeld"]),
NativeProcess("proclogd", "selfdrive/proclogd", ["./proclogd"]),
NativeProcess("ubloxd", "selfdrive/locationd", ["./ubloxd"]),
NativeProcess("ui", "selfdrive/ui", ["./ui"], persistent=True),
NativeProcess("sensord", "selfdrive/sensord", ["./sensord"], enabled=not PC, persistent=EON, sigkill=EON),
NativeProcess("ubloxd", "selfdrive/locationd", ["./ubloxd"], enabled=(not PC or WEBCAM)),
NativeProcess("ui", "selfdrive/ui", ["./ui"], persistent=True, watchdog_max_dt=10),
PythonProcess("calibrationd", "selfdrive.locationd.calibrationd"),
PythonProcess("controlsd", "selfdrive.controls.controlsd"),
PythonProcess("deleter", "selfdrive.loggerd.deleter", persistent=True),
PythonProcess("dmonitoringd", "selfdrive.monitoring.dmonitoringd", driverview=True),
PythonProcess("dmonitoringd", "selfdrive.monitoring.dmonitoringd", enabled=(not PC or WEBCAM), driverview=True),
PythonProcess("locationd", "selfdrive.locationd.locationd"),
PythonProcess("logmessaged", "selfdrive.logmessaged", persistent=True),
PythonProcess("pandad", "selfdrive.pandad", persistent=True),
PythonProcess("paramsd", "selfdrive.locationd.paramsd"),
PythonProcess("plannerd", "selfdrive.controls.plannerd"),
PythonProcess("radard", "selfdrive.controls.radard"),
PythonProcess("rtshield", "selfdrive.rtshield", enabled=EON),
PythonProcess("thermald", "selfdrive.thermald.thermald", persistent=True),
PythonProcess("timezoned", "selfdrive.timezoned", enabled=TICI, persistent=True),
PythonProcess("tombstoned", "selfdrive.tombstoned", enabled=not PC, persistent=True),
PythonProcess("updated", "selfdrive.updated", enabled=not PC, persistent=True),
PythonProcess("uploader", "selfdrive.loggerd.uploader", persistent=True),
]
if not PC:
procs += [
NativeProcess("sensord", "selfdrive/sensord", ["./sensord"], persistent=EON, sigkill=EON),
PythonProcess("tombstoned", "selfdrive.tombstoned", persistent=True),
PythonProcess("updated", "selfdrive.updated", persistent=True),
]
if TICI:
procs += [
PythonProcess("timezoned", "selfdrive.timezoned", persistent=True),
]
if EON:
procs += [
PythonProcess("rtshield", "selfdrive.rtshield"),
]
managed_processes = {p.name: p for p in procs}

@ -1,10 +1,7 @@
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <cassert>
#include <sys/resource.h>
#include "visionbuf.h"
#include "visionipc_client.h"
#include "common/swaglog.h"
#include "common/util.h"
@ -15,51 +12,47 @@
#include <linux/limits.h>
#endif
ExitHandler do_exit;
int main(int argc, char **argv) {
setpriority(PRIO_PROCESS, 0, -15);
void run_model(DMonitoringModelState &model, VisionIpcClient &vipc_client) {
PubMaster pm({"driverState"});
double last = 0;
// init the models
DMonitoringModelState dmonitoringmodel;
dmonitoring_init(&dmonitoringmodel);
while (!do_exit) {
VisionIpcBufExtra extra = {};
VisionBuf *buf = vipc_client.recv(&extra);
if (buf == nullptr) continue;
VisionIpcClient vipc_client = VisionIpcClient("camerad", VISION_STREAM_YUV_FRONT, true);
while (!do_exit){
if (!vipc_client.connect(false)){
util::sleep_for(100);
continue;
}
break;
}
double t1 = millis_since_boot();
DMonitoringResult res = dmonitoring_eval_frame(&model, buf->addr, buf->width, buf->height);
double t2 = millis_since_boot();
while (!do_exit) {
LOGW("connected with buffer size: %d", vipc_client.buffers[0].len);
// send dm packet
dmonitoring_publish(pm, extra.frame_id, res, (t2 - t1) / 1000.0, model.output);
double last = 0;
while (!do_exit) {
VisionIpcBufExtra extra = {0};
VisionBuf *buf = vipc_client.recv(&extra);
if (buf == nullptr){
continue;
}
LOGD("dmonitoring process: %.2fms, from last %.2fms", t2 - t1, t1 - last);
last = t1;
}
}
double t1 = millis_since_boot();
DMonitoringResult res = dmonitoring_eval_frame(&dmonitoringmodel, buf->addr, buf->width, buf->height);
double t2 = millis_since_boot();
int main(int argc, char **argv) {
setpriority(PRIO_PROCESS, 0, -15);
// send dm packet
dmonitoring_publish(pm, extra.frame_id, res, (t2-t1)/1000.0, dmonitoringmodel.output);
// init the models
DMonitoringModelState model;
dmonitoring_init(&model);
LOGD("dmonitoring process: %.2fms, from last %.2fms", t2-t1, t1-last);
last = t1;
}
VisionIpcClient vipc_client = VisionIpcClient("camerad", VISION_STREAM_YUV_FRONT, true);
while (!do_exit && !vipc_client.connect(false)) {
util::sleep_for(100);
}
dmonitoring_free(&dmonitoringmodel);
// run the models
if (vipc_client.connected) {
LOGW("connected with buffer size: %d", vipc_client.buffers[0].len);
run_model(model, vipc_client);
}
dmonitoring_free(&model);
return 0;
}

@ -1,9 +1,8 @@
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <mutex>
#include <eigen3/Eigen/Dense>
#include "visionbuf.h"
#include "visionipc_client.h"
#include "common/swaglog.h"
#include "common/clutil.h"
@ -14,12 +13,12 @@
ExitHandler do_exit;
// globals
bool run_model;
bool live_calib_seen;
mat3 cur_transform;
pthread_mutex_t transform_lock;
std::mutex transform_lock;
void* live_thread(void *arg) {
set_thread_name("live");
void calibration_thread() {
set_thread_name("calibration");
set_realtime_priority(50);
SubMaster sm({"liveCalibration"});
@ -37,17 +36,7 @@ void* live_thread(void *arg) {
-1.84808520e-20, 9.00738606e-04,-4.28751576e-02;
Eigen::Matrix<float, 3, 3> fcam_intrinsics = Eigen::Matrix<float, 3, 3, Eigen::RowMajor>(fcam_intrinsic_matrix.v);
#ifndef QCOM2
float db_s = 0.5; // debayering does a 2x downscale
#else
float db_s = 1.0;
#endif
mat3 yuv_transform = transform_scale_buffer((mat3){{
1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
}}, db_s);
const mat3 yuv_transform = get_model_yuv_transform();
while (!do_exit) {
if (sm.update(100) > 0){
@ -70,17 +59,77 @@ void* live_thread(void *arg) {
transform.v[i] = warp_matrix(i / 3, i % 3);
}
mat3 model_transform = matmul3(yuv_transform, transform);
pthread_mutex_lock(&transform_lock);
std::lock_guard lk(transform_lock);
cur_transform = model_transform;
run_model = true;
pthread_mutex_unlock(&transform_lock);
live_calib_seen = true;
}
}
}
void run_model(ModelState &model, VisionIpcClient &vipc_client) {
// messaging
PubMaster pm({"modelV2", "cameraOdometry"});
SubMaster sm({"lateralPlan", "roadCameraState"});
// setup filter to track dropped frames
const float dt = 1. / MODEL_FREQ;
const float ts = 10.0; // filter time constant (s)
const float frame_filter_k = (dt / ts) / (1. + dt / ts);
float frames_dropped = 0;
uint32_t frame_id = 0, last_vipc_frame_id = 0;
double last = 0;
int desire = -1;
uint32_t run_count = 0;
while (!do_exit) {
VisionIpcBufExtra extra = {};
VisionBuf *buf = vipc_client.recv(&extra);
if (buf == nullptr) continue;
transform_lock.lock();
mat3 model_transform = cur_transform;
const bool run_model_this_iter = live_calib_seen;
transform_lock.unlock();
if (sm.update(0) > 0) {
// TODO: path planner timeout?
desire = ((int)sm["lateralPlan"].getLateralPlan().getDesire());
frame_id = sm["roadCameraState"].getRoadCameraState().getFrameId();
}
if (run_model_this_iter) {
run_count++;
float vec_desire[DESIRE_LEN] = {0};
if (desire >= 0 && desire < DESIRE_LEN) {
vec_desire[desire] = 1.0;
}
double mt1 = millis_since_boot();
ModelDataRaw model_buf = model_eval_frame(&model, buf->buf_cl, buf->width, buf->height,
model_transform, vec_desire);
double mt2 = millis_since_boot();
float model_execution_time = (mt2 - mt1) / 1000.0;
// tracked dropped frames
uint32_t vipc_dropped_frames = extra.frame_id - last_vipc_frame_id - 1;
frames_dropped = (1. - frame_filter_k) * frames_dropped + frame_filter_k * (float)std::min(vipc_dropped_frames, 10U);
if (run_count < 10) frames_dropped = 0; // let frame drops warm up
float frame_drop_ratio = frames_dropped / (1 + frames_dropped);
model_publish(pm, extra.frame_id, frame_id, frame_drop_ratio, model_buf, extra.timestamp_eof, model_execution_time,
kj::ArrayPtr<const float>(model.output.data(), model.output.size()));
posenet_publish(pm, extra.frame_id, vipc_dropped_frames, model_buf, extra.timestamp_eof);
LOGD("model process: %.2fms, from last %.2fms, vipc_frame_id %u, frame_id, %u, frame_drop %.3f", mt2 - mt1, mt1 - last, extra.frame_id, frame_id, frame_drop_ratio);
last = mt1;
last_vipc_frame_id = extra.frame_id;
}
}
return NULL;
}
int main(int argc, char **argv) {
int err;
set_realtime_priority(54);
#ifdef QCOM
@ -90,16 +139,8 @@ int main(int argc, char **argv) {
set_core_affinity(4);
#endif
pthread_mutex_init(&transform_lock, NULL);
// start calibration thread
pthread_t live_thread_handle;
err = pthread_create(&live_thread_handle, NULL, live_thread, NULL);
assert(err == 0);
// messaging
PubMaster pm({"modelV2", "cameraOdometry"});
SubMaster sm({"lateralPlan", "roadCameraState"});
std::thread thread = std::thread(calibration_thread);
// cl init
cl_device_id device_id = cl_get_device_id(CL_DEVICE_TYPE_DEFAULT);
@ -111,90 +152,21 @@ int main(int argc, char **argv) {
LOGW("models loaded, modeld starting");
VisionIpcClient vipc_client = VisionIpcClient("camerad", VISION_STREAM_YUV_BACK, true, device_id, context);
while (!do_exit){
if (!vipc_client.connect(false)){
util::sleep_for(100);
continue;
}
break;
while (!do_exit && !vipc_client.connect(false)) {
util::sleep_for(100);
}
// loop
while (!do_exit) {
VisionBuf *b = &vipc_client.buffers[0];
// run the models
// vipc_client.connected is false only when do_exit is true
if (vipc_client.connected) {
const VisionBuf *b = &vipc_client.buffers[0];
LOGW("connected with buffer size: %d (%d x %d)", b->len, b->width, b->height);
// setup filter to track dropped frames
const float dt = 1. / MODEL_FREQ;
const float ts = 10.0; // filter time constant (s)
const float frame_filter_k = (dt / ts) / (1. + dt / ts);
float frames_dropped = 0;
uint32_t frame_id = 0, last_vipc_frame_id = 0;
double last = 0;
int desire = -1;
uint32_t run_count = 0;
while (!do_exit) {
VisionIpcBufExtra extra;
VisionBuf *buf = vipc_client.recv(&extra);
if (buf == nullptr){
continue;
}
pthread_mutex_lock(&transform_lock);
mat3 model_transform = cur_transform;
const bool run_model_this_iter = run_model;
pthread_mutex_unlock(&transform_lock);
if (sm.update(0) > 0){
// TODO: path planner timeout?
desire = ((int)sm["lateralPlan"].getLateralPlan().getDesire());
frame_id = sm["roadCameraState"].getRoadCameraState().getFrameId();
}
double mt1 = 0, mt2 = 0;
if (run_model_this_iter) {
run_count++;
float vec_desire[DESIRE_LEN] = {0};
if (desire >= 0 && desire < DESIRE_LEN) {
vec_desire[desire] = 1.0;
}
mt1 = millis_since_boot();
ModelDataRaw model_buf =
model_eval_frame(&model, buf->buf_cl, buf->width, buf->height,
model_transform, vec_desire);
mt2 = millis_since_boot();
float model_execution_time = (mt2 - mt1) / 1000.0;
// tracked dropped frames
uint32_t vipc_dropped_frames = extra.frame_id - last_vipc_frame_id - 1;
frames_dropped = (1. - frame_filter_k) * frames_dropped + frame_filter_k * (float)std::min(vipc_dropped_frames, 10U);
if (run_count < 10) frames_dropped = 0; // let frame drops warm up
float frame_drop_ratio = frames_dropped / (1 + frames_dropped);
model_publish(pm, extra.frame_id, frame_id, frame_drop_ratio, model_buf, extra.timestamp_eof, model_execution_time,
kj::ArrayPtr<const float>(model.output.data(), model.output.size()));
posenet_publish(pm, extra.frame_id, vipc_dropped_frames, model_buf, extra.timestamp_eof);
LOGD("model process: %.2fms, from last %.2fms, vipc_frame_id %u, frame_id, %u, frame_drop %.3f", mt2-mt1, mt1-last, extra.frame_id, frame_id, frame_drop_ratio);
last = mt1;
last_vipc_frame_id = extra.frame_id;
}
}
run_model(model, vipc_client);
}
model_free(&model);
LOG("joining live_thread");
err = pthread_join(live_thread_handle, NULL);
assert(err == 0);
LOG("joining calibration thread");
thread.join();
CL_CHECK(clReleaseContext(context));
pthread_mutex_destroy(&transform_lock);
return 0;
}

@ -1,10 +1,11 @@
#!/usr/bin/env python3
# TODO: why are the keras models saved with python 2?
from __future__ import print_function
import os
import sys
import numpy as np
os.environ["OMP_NUM_THREADS"] = "1"
import onnxruntime as ort
def read(sz):
@ -53,4 +54,3 @@ if __name__ == "__main__":
ort_session = ort.InferenceSession(sys.argv[1], options)
ort_session.set_providers([provider], None)
run_loop(ort_session)

@ -406,7 +406,8 @@ def thermald_thread():
msg.deviceState.thermalStatus = thermal_status
pm.send("deviceState", msg)
set_offroad_alert_if_changed("Offroad_ChargeDisabled", (not usb_power))
if EON and not is_uno:
set_offroad_alert_if_changed("Offroad_ChargeDisabled", (not usb_power))
should_start_prev = should_start
startup_conditions_prev = startup_conditions.copy()

@ -9,6 +9,7 @@
#include "common/params.h"
#include "common/touch.h"
#include "common/swaglog.h"
#include "common/watchdog.h"
#include "ui.hpp"
#include "paint.hpp"
@ -139,6 +140,7 @@ int main(int argc, char* argv[]) {
s->sound->setVolume(MIN_VOLUME);
while (!do_exit) {
watchdog_kick();
if (!s->scene.started) {
util::sleep_for(50);
}

@ -16,6 +16,7 @@
#include "common/params.h"
#include "common/timing.h"
#include "common/swaglog.h"
#include "common/watchdog.h"
#include "home.hpp"
#include "paint.hpp"
@ -276,6 +277,7 @@ void GLWindow::timerUpdate() {
ui_update(&ui_state);
repaint();
watchdog_kick();
}
void GLWindow::resizeGL(int w, int h) {

@ -11,7 +11,7 @@ QtSound::QtSound() {
bool QtSound::play(AudibleAlert alert) {
int loops = sound_map[alert].second> - 1 ? sound_map[alert].second : QSoundEffect::Infinite;
sounds[alert].setLoopCount(loops);
sounds[alert].setVolume(0.7);
sounds[alert].setVolume(0.45);
sounds[alert].play();
return true;
}

@ -91,13 +91,11 @@ static void update_lead(UIState *s, const cereal::RadarState::Reader &radar_stat
}
static void update_line_data(const UIState *s, const cereal::ModelDataV2::XYZTData::Reader &line,
float y_off, float z_off, line_vertices_data *pvd, float max_distance) {
float y_off, float z_off, line_vertices_data *pvd, int max_idx) {
const auto line_x = line.getX(), line_y = line.getY(), line_z = line.getZ();
int max_idx = -1;
vertex_data *v = &pvd->v[0];
for (int i = 0; ((i < TRAJECTORY_SIZE) and (line_x[i] < fmax(MIN_DRAW_DISTANCE, max_distance))); i++) {
for (int i = 0; i <= max_idx; i++) {
v += calib_frame_to_full_frame(s, line_x[i], line_y[i] - y_off, line_z[i] + z_off, v);
max_idx = i;
}
for (int i = max_idx; i >= 0; i--) {
v += calib_frame_to_full_frame(s, line_x[i], line_y[i] + y_off, line_z[i] + z_off, v);
@ -108,13 +106,17 @@ static void update_line_data(const UIState *s, const cereal::ModelDataV2::XYZTDa
static void update_model(UIState *s, const cereal::ModelDataV2::Reader &model) {
UIScene &scene = s->scene;
const float max_distance = fmin(model.getPosition().getX()[TRAJECTORY_SIZE - 1], MAX_DRAW_DISTANCE);
auto model_position = model.getPosition();
const float max_distance = std::clamp(model_position.getX()[TRAJECTORY_SIZE - 1],
MIN_DRAW_DISTANCE, MAX_DRAW_DISTANCE);
int max_idx = get_path_length_idx(model_position, max_distance);
// update lane lines
const auto lane_lines = model.getLaneLines();
const auto lane_line_probs = model.getLaneLineProbs();
for (int i = 0; i < std::size(scene.lane_line_vertices); i++) {
scene.lane_line_probs[i] = lane_line_probs[i];
update_line_data(s, lane_lines[i], 0.025 * scene.lane_line_probs[i], 0, &scene.lane_line_vertices[i], max_distance);
update_line_data(s, lane_lines[i], 0.025 * scene.lane_line_probs[i], 0, &scene.lane_line_vertices[i], max_idx);
}
// update road edges
@ -122,14 +124,17 @@ static void update_model(UIState *s, const cereal::ModelDataV2::Reader &model) {
const auto road_edge_stds = model.getRoadEdgeStds();
for (int i = 0; i < std::size(scene.road_edge_vertices); i++) {
scene.road_edge_stds[i] = road_edge_stds[i];
update_line_data(s, road_edges[i], 0.025, 0, &scene.road_edge_vertices[i], max_distance);
update_line_data(s, road_edges[i], 0.025, 0, &scene.road_edge_vertices[i], max_idx);
}
// update path
const float lead_d = scene.lead_data[0].getStatus() ? scene.lead_data[0].getDRel() * 2. : MAX_DRAW_DISTANCE;
float path_length = (lead_d > 0.) ? lead_d - fmin(lead_d * 0.35, 10.) : MAX_DRAW_DISTANCE;
path_length = fmin(path_length, max_distance);
update_line_data(s, model.getPosition(), 0.5, 1.22, &scene.track_vertices, path_length);
if (scene.lead_data[0].getStatus()) {
const float lead_d = scene.lead_data[0].getDRel() * 2.;
const float path_length = std::clamp((float)(lead_d - fmin(lead_d * 0.35, 10.)),
0.0f, max_distance);
max_idx = get_path_length_idx(model_position, path_length);
}
update_line_data(s, model_position, 0.5, 1.22, &scene.track_vertices, max_idx);
}
static void update_sockets(UIState *s) {

@ -227,9 +227,11 @@ def handle_agnos_update(wait_helper):
set_consistent_flag(False)
cloudlog.info(f"Beginning background installation for AGNOS {updated_version}")
set_offroad_alert("Offroad_NeosUpdate", True)
manifest_path = os.path.join(OVERLAY_MERGED, "selfdrive/hardware/tici/agnos.json")
flash_agnos_update(manifest_path, cloudlog)
set_offroad_alert("Offroad_NeosUpdate", False)
def handle_neos_update(wait_helper: WaitTimeHelper) -> None:

@ -31,7 +31,7 @@ def juggle_file(fn, dbc=None, layout=None):
if dbc:
env["DBC_NAME"] = dbc
pj = os.getenv("PLOTJUGGLER_PATH", "plotjuggler")
pj = os.getenv("PLOTJUGGLER_PATH", "bin/plotjuggler")
extra_args = ""
if layout is not None:
extra_args += f'-l {layout}'

@ -6,40 +6,33 @@ RUN apt-get update && apt-get install -y \
tar \
curl \
xz-utils \
beignet-opencl-icd \
libglvnd-dev \
alien \
dbus \
gcc-arm-none-eabi \
tmux \
vim \
lsb-core \
libx11-6 \
&& rm -rf /var/lib/apt/lists/*
# Intel OpenCL driver
ARG INTEL_DRIVER=opencl_runtime_16.1.1_x64_ubuntu_6.4.0.25.tgz
ARG INTEL_DRIVER_URL=http://registrationcenter-download.intel.com/akdlm/irc_nas/9019
ARG INTEL_DRIVER=l_opencl_p_18.1.0.015.tgz
ARG INTEL_DRIVER_URL=http://registrationcenter-download.intel.com/akdlm/irc_nas/vcp/15532
RUN mkdir -p /tmp/opencl-driver-intel
WORKDIR /tmp/opencl-driver-intel
RUN echo INTEL_DRIVER is $INTEL_DRIVER; \
curl -O $INTEL_DRIVER_URL/$INTEL_DRIVER; \
tar -xzf $INTEL_DRIVER; \
for i in $(basename $INTEL_DRIVER .tgz)/rpm/*.rpm; do alien --to-deb $i; done; \
dpkg -i *.deb; \
rm -rf $INTEL_DRIVER $(basename $INTEL_DRIVER .tgz) *.deb; \
mkdir -p /etc/OpenCL/vendors; \
echo /opt/intel/*/lib64/libintelocl.so > /etc/OpenCL/vendors/intel.icd; \
rm -rf /tmp/opencl-driver-intel;
RUN echo INTEL_DRIVER is $INTEL_DRIVER && \
curl -O $INTEL_DRIVER_URL/$INTEL_DRIVER && \
tar -xzf $INTEL_DRIVER && \
for i in $(basename $INTEL_DRIVER .tgz)/rpm/*.rpm; do alien --to-deb $i; done && \
dpkg -i *.deb && \
rm -rf $INTEL_DRIVER $(basename $INTEL_DRIVER .tgz) *.deb && \
mkdir -p /etc/OpenCL/vendors && \
echo /opt/intel/opencl_compilers_and_libraries_18.1.0.015/linux/compiler/lib/intel64_lin/libintelocl.so > /etc/OpenCL/vendors/intel.icd && \
rm -rf /tmp/opencl-driver-intel
# Open[GL,CL] for gpu
ENV NVIDIA_DRIVER_CAPABILITIES ${NVIDIA_DRIVER_CAPABILITIES},display
RUN apt-get update && apt-get install -y --no-install-recommends \
mesa-utils \
ocl-icd-libopencl1 \
&& rm -rf /var/lib/apt/lists/*
RUN mkdir -p /etc/OpenCL/vendors && \
echo "libnvidia-opencl.so.1" > /etc/OpenCL/vendors/nvidia.icd
ENV NVIDIA_VISIBLE_DEVICES all
ENV NVIDIA_DRIVER_CAPABILITIES compute,utility,display
ENV NVIDIA_DRIVER_CAPABILITIES graphics,utility,compute
ENV QTWEBENGINE_DISABLE_SANDBOX 1
RUN dbus-uuidgen > /etc/machine-id

@ -19,7 +19,7 @@ parser.add_argument('--joystick', action='store_true')
parser.add_argument('--town', type=str, default='Town04')
parser.add_argument('--spawn_point', dest='num_selected_spawn_point',
type=int, default=16)
parser.add_argument('--cloudyness', default=0.1, type=float)
parser.add_argument('--cloudiness', default=0.1, type=float)
parser.add_argument('--precipitation', default=0.0, type=float)
parser.add_argument('--precipitation_deposits', default=0.0, type=float)
parser.add_argument('--wind_intensity', default=0.0, type=float)
@ -62,7 +62,7 @@ def cam_callback(image):
dat = messaging.new_message('roadCameraState')
dat.roadCameraState = {
"frameId": image.frame,
"image": img.tostring(),
"image": img.tobytes(),
"transform": [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0]
}
pm.send('roadCameraState', dat)
@ -118,7 +118,6 @@ def fake_driver_monitoring():
"faceDetected": True,
"isDistracted": False,
"awarenessStatus": 1.,
"isRHD": False,
}
pm.send('driverMonitoringState', dat)
@ -143,7 +142,7 @@ def go(q):
world_map = world.get_map()
vehicle_bp = blueprint_library.filter('vehicle.tesla.*')[0]
vehicle_bp = blueprint_library.filter('vehicle.tesla.*')[1]
spawn_points = world_map.get_spawn_points()
assert len(spawn_points) > args.num_selected_spawn_point, \
f'''No spawn point {args.num_selected_spawn_point}, try a value between 0 and
@ -167,12 +166,12 @@ def go(q):
blueprint.set_attribute('image_size_y', str(H))
blueprint.set_attribute('fov', '70')
blueprint.set_attribute('sensor_tick', '0.05')
transform = carla.Transform(carla.Location(x=0.8, z=1.45))
transform = carla.Transform(carla.Location(x=0.8, z=1.13))
camera = world.spawn_actor(blueprint, transform, attach_to=vehicle)
camera.listen(cam_callback)
world.set_weather(carla.WeatherParameters(
cloudyness=args.cloudyness,
cloudiness=args.cloudiness,
precipitation=args.precipitation,
precipitation_deposits=args.precipitation_deposits,
wind_intensity=args.wind_intensity,
@ -278,7 +277,7 @@ def go(q):
sm.update(0)
throttle_op = sm['carControl'].actuators.gas #[0,1]
brake_op = sm['carControl'].actuators.brake #[0,1]
steer_op = sm['controlsState'].angleSteersDes # degrees [-180,180]
steer_op = sm['controlsState'].steeringAngleDesiredDeg # degrees [-180,180]
throttle_out = throttle_op
steer_out = steer_op
@ -350,11 +349,10 @@ def go(q):
rk.keep_time()
if __name__ == "__main__":
# make sure params are in a good state
params = Params()
params.clear_all()
set_params_enabled()
params = Params()
params.delete("Offroad_ConnectivityNeeded")
params.put("CalibrationParams", '{"calib_radians": [0,0,0], "valid_blocks": 20}')

@ -1,15 +1,16 @@
#!/usr/bin/env bash
cd /tmp
FILE=CARLA_0.9.7.tar.gz
FILE=CARLA_0.9.11.tar.gz
rm -f $FILE
curl -O http://carla-assets-internal.s3.amazonaws.com/Releases/Linux/$FILE
curl -O https://carla-releases.s3.eu-west-3.amazonaws.com/Linux/$FILE
rm -rf carla_tmp
mkdir -p carla_tmp
cd carla_tmp
tar xvf ../$FILE PythonAPI/
easy_install PythonAPI/carla/dist/carla-0.9.7-py3.5-linux-x86_64.egg || true
easy_install PythonAPI/carla/dist/carla-0.9.11-py3.7-linux-x86_64.egg || true
cd ..
rm -rf /tmp/$FILE
rm -rf carla_tmp

@ -15,20 +15,14 @@ if ! $(apt list --installed | grep -q nvidia-container-toolkit); then
fi
fi
docker pull carlasim/carla:0.9.7
if [ -z "$WINDOW" ]; then
docker run -it --net=host --gpus all carlasim/carla:0.9.7
else
docker run --name openpilot_carla \
--privileged --rm \
--net=host \
-e SDL_VIDEODRIVER=x11 \
-e DISPLAY=$DISPLAY \
-v /tmp/.X11-unix:/tmp/.X11-unix \
-it \
--gpus all \
carlasim/carla:0.9.7 \
./CarlaUE4.sh -quality-level=Medium
fi
docker pull carlasim/carla:0.9.11
docker run \
--rm \
--net=host \
-e DISPLAY= \
-e SDL_VIDEODRIVER=offscreen \
-it \
--gpus all \
carlasim/carla:0.9.11 \
./CarlaUE4.sh -opengl -nosound -quality-level=High

@ -2,6 +2,7 @@
# expose X to the container
xhost +local:root
docker pull commaai/openpilot-sim:latest
docker run --net=host\
@ -9,9 +10,10 @@ docker run --net=host\
--rm \
-it \
--gpus all \
--device=/dev/dri/ \
--device=/dev/dri \
-v /tmp/.X11-unix:/tmp/.X11-unix \
--shm-size 1G \
-e DISPLAY=$DISPLAY \
-e QT_X11_NO_MITSHM=1 \
commaai/openpilot-sim:latest \
/bin/bash -c "cd tools && cd sim && sh tmux_script.sh $*"
/bin/bash -c "cd /openpilot/tools/sim && ./tmux_script.sh $*"

Loading…
Cancel
Save