diff --git a/selfdrive/ui/qt/onroad.cc b/selfdrive/ui/qt/onroad.cc index 346cb3ab96..d3bc931b67 100644 --- a/selfdrive/ui/qt/onroad.cc +++ b/selfdrive/ui/qt/onroad.cc @@ -227,14 +227,6 @@ void AnnotatedCameraWidget::updateState(const UIState &s) { setProperty("dmActive", sm["driverMonitoringState"].getDriverMonitoringState().getIsActiveMode()); setProperty("rightHandDM", sm["driverMonitoringState"].getDriverMonitoringState().getIsRHD()); } - - setStreamType(s.scene.wide_cam ? VISION_STREAM_WIDE_ROAD : VISION_STREAM_ROAD); - if (s.scene.calibration_valid) { - auto calib = s.scene.wide_cam ? s.scene.view_from_wide_calib : s.scene.view_from_calib; - CameraWidget::updateCalibration(calib); - } else { - CameraWidget::updateCalibration(DEFAULT_CALIBRATION); - } } void AnnotatedCameraWidget::drawHud(QPainter &p) { @@ -545,18 +537,62 @@ void AnnotatedCameraWidget::drawLead(QPainter &painter, const cereal::ModelDataV } void AnnotatedCameraWidget::paintGL() { + UIState *s = uiState(); + SubMaster &sm = *(s->sm); const double start_draw_t = millis_since_boot(); + const cereal::ModelDataV2::Reader &model = sm["modelV2"].getModelV2(); - UIState *s = uiState(); - const cereal::ModelDataV2::Reader &model = (*s->sm)["modelV2"].getModelV2(); - CameraWidget::setFrameId(model.getFrameId()); - CameraWidget::paintGL(); + // draw camera frame + { + std::lock_guard lk(frame_lock); + + if (frames.empty()) { + if (skip_frame_count > 0) { + skip_frame_count--; + qDebug() << "skipping frame, not ready"; + return; + } + } else { + // skip drawing up to this many frames if we're + // missing camera frames. this smooths out the + // transitions from the narrow and wide cameras + skip_frame_count = 5; + } + + // Wide or narrow cam dependent on speed + float v_ego = sm["carState"].getCarState().getVEgo(); + if ((v_ego < 10) || s->wide_cam_only) { + wide_cam_requested = true; + } else if (v_ego > 15) { + wide_cam_requested = false; + } + // TODO: also detect when ecam vision stream isn't available + // for replay of old routes, never go to widecam + wide_cam_requested = wide_cam_requested && s->scene.calibration_wide_valid; + CameraWidget::setStreamType(wide_cam_requested ? VISION_STREAM_WIDE_ROAD : VISION_STREAM_ROAD); + + s->scene.wide_cam = CameraWidget::getStreamType() == VISION_STREAM_WIDE_ROAD; + if (s->scene.calibration_valid) { + auto calib = s->scene.wide_cam ? s->scene.view_from_wide_calib : s->scene.view_from_calib; + CameraWidget::updateCalibration(calib); + } else { + CameraWidget::updateCalibration(DEFAULT_CALIBRATION); + } + CameraWidget::setFrameId(model.getFrameId()); + CameraWidget::paintGL(); + } QPainter painter(this); painter.setRenderHint(QPainter::Antialiasing); painter.setPen(Qt::NoPen); if (s->worldObjectsVisible()) { + if (sm.rcv_frame("modelV2") > s->scene.started_frame) { + update_model(s, sm["modelV2"].getModelV2()); + if (sm.rcv_frame("radarState") > s->scene.started_frame) { + update_leads(s, sm["radarState"].getRadarState(), sm["modelV2"].getModelV2().getPosition()); + } + } drawLaneLines(painter, s); diff --git a/selfdrive/ui/qt/onroad.h b/selfdrive/ui/qt/onroad.h index 2a663185f4..7edca6b3d5 100644 --- a/selfdrive/ui/qt/onroad.h +++ b/selfdrive/ui/qt/onroad.h @@ -70,6 +70,9 @@ private: int status = STATUS_DISENGAGED; std::unique_ptr pm; + int skip_frame_count = 0; + bool wide_cam_requested = false; + protected: void paintGL() override; void initializeGL() override; diff --git a/selfdrive/ui/qt/widgets/cameraview.cc b/selfdrive/ui/qt/widgets/cameraview.cc index d7591633c9..a606d6893e 100644 --- a/selfdrive/ui/qt/widgets/cameraview.cc +++ b/selfdrive/ui/qt/widgets/cameraview.cc @@ -94,7 +94,7 @@ mat4 get_fit_view_transform(float widget_aspect_ratio, float frame_aspect_ratio) } // namespace CameraWidget::CameraWidget(std::string stream_name, VisionStreamType type, bool zoom, QWidget* parent) : - stream_name(stream_name), stream_type(type), zoomed_view(zoom), QOpenGLWidget(parent) { + stream_name(stream_name), requested_stream_type(type), zoomed_view(zoom), QOpenGLWidget(parent) { setAttribute(Qt::WA_OpaquePaintEvent); QObject::connect(this, &CameraWidget::vipcThreadConnected, this, &CameraWidget::vipcConnected, Qt::BlockingQueuedConnection); QObject::connect(this, &CameraWidget::vipcThreadFrameReceived, this, &CameraWidget::vipcFrameReceived, Qt::QueuedConnection); @@ -124,7 +124,7 @@ void CameraWidget::initializeGL() { GLint frame_pos_loc = program->attributeLocation("aPosition"); GLint frame_texcoord_loc = program->attributeLocation("aTexCoord"); - auto [x1, x2, y1, y2] = stream_type == VISION_STREAM_DRIVER ? std::tuple(0.f, 1.f, 1.f, 0.f) : std::tuple(1.f, 0.f, 1.f, 0.f); + auto [x1, x2, y1, y2] = requested_stream_type == VISION_STREAM_DRIVER ? std::tuple(0.f, 1.f, 1.f, 0.f) : std::tuple(1.f, 0.f, 1.f, 0.f); const uint8_t frame_indicies[] = {0, 1, 2, 0, 2, 3}; const float frame_coords[4][4] = { {-1.0, -1.0, x2, y1}, // bl @@ -163,36 +163,26 @@ void CameraWidget::initializeGL() { void CameraWidget::showEvent(QShowEvent *event) { if (!vipc_thread) { + clearFrames(); vipc_thread = new QThread(); connect(vipc_thread, &QThread::started, [=]() { vipcThread(); }); connect(vipc_thread, &QThread::finished, vipc_thread, &QObject::deleteLater); vipc_thread->start(); } - clearFrames(); -} - -void CameraWidget::hideEvent(QHideEvent *event) { - if (vipc_thread) { - vipc_thread->requestInterruption(); - vipc_thread->quit(); - vipc_thread->wait(); - vipc_thread = nullptr; - } - clearFrames(); } void CameraWidget::updateFrameMat() { int w = width(), h = height(); if (zoomed_view) { - if (stream_type == VISION_STREAM_DRIVER) { + if (active_stream_type == VISION_STREAM_DRIVER) { frame_mat = get_driver_view_transform(w, h, stream_width, stream_height); } else { // Project point at "infinity" to compute x and y offsets // to ensure this ends up in the middle of the screen // for narrow come and a little lower for wide cam. // TODO: use proper perspective transform? - if (stream_type == VISION_STREAM_WIDE_ROAD) { + if (active_stream_type == VISION_STREAM_WIDE_ROAD) { intrinsic_matrix = ecam_intrinsic_matrix; zoom = 2.0; } else { @@ -212,11 +202,11 @@ void CameraWidget::updateFrameMat() { x_offset = std::clamp(x_offset_, -max_x_offset, max_x_offset); y_offset = std::clamp(y_offset_, -max_y_offset, max_y_offset); - float zx = zoom * 2 * intrinsic_matrix.v[2] / width(); - float zy = zoom * 2 * intrinsic_matrix.v[5] / height(); + float zx = zoom * 2 * intrinsic_matrix.v[2] / w; + float zy = zoom * 2 * intrinsic_matrix.v[5] / h; const mat4 frame_transform = {{ - zx, 0.0, 0.0, -x_offset / width() * 2, - 0.0, zy, 0.0, y_offset / height() * 2, + zx, 0.0, 0.0, -x_offset / w * 2, + 0.0, zy, 0.0, y_offset / h * 2, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, }}; @@ -224,7 +214,7 @@ void CameraWidget::updateFrameMat() { } } else if (stream_width > 0 && stream_height > 0) { // fit frame to widget size - float widget_aspect_ratio = (float)width() / height(); + float widget_aspect_ratio = (float)w / h; float frame_aspect_ratio = (float)stream_width / stream_height; frame_mat = get_fit_view_transform(widget_aspect_ratio, frame_aspect_ratio); } @@ -232,7 +222,6 @@ void CameraWidget::updateFrameMat() { void CameraWidget::updateCalibration(const mat3 &calib) { calibration = calib; - updateFrameMat(); } void CameraWidget::paintGL() { @@ -240,13 +229,26 @@ void CameraWidget::paintGL() { glClear(GL_STENCIL_BUFFER_BIT | GL_COLOR_BUFFER_BIT); std::lock_guard lk(frame_lock); + if (frames.empty()) return; + + int frame_idx = frames.size() - 1; + + // Always draw latest frame until sync logic is more stable + // for (frame_idx = 0; frame_idx < frames.size() - 1; frame_idx++) { + // if (frames[frame_idx].first == draw_frame_id) break; + // } - // use previous texture if update() is called without new frame. - VisionBuf *frame = nullptr; - if (!frames.empty()) { - frame = frames.front().second; - frames.pop_front(); + // Log duplicate/dropped frames + if (frames[frame_idx].first == prev_frame_id) { + qDebug() << "Drawing same frame twice" << frames[frame_idx].first; + } else if (frames[frame_idx].first != prev_frame_id + 1) { + qDebug() << "Skipped frame" << frames[frame_idx].first; } + prev_frame_id = frames[frame_idx].first; + VisionBuf *frame = frames[frame_idx].second; + assert(frame != nullptr); + + updateFrameMat(); glViewport(0, 0, width(), height()); glBindVertexArray(frame_vao); @@ -254,22 +256,22 @@ void CameraWidget::paintGL() { glPixelStorei(GL_UNPACK_ALIGNMENT, 1); #ifdef QCOM2 + // no frame copy glActiveTexture(GL_TEXTURE0); - if (frame) { - glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, egl_images[frame->idx]); - assert(glGetError() == GL_NO_ERROR); - } + glEGLImageTargetTexture2DOES(GL_TEXTURE_EXTERNAL_OES, egl_images[frame->idx]); + assert(glGetError() == GL_NO_ERROR); #else + // fallback to copy glPixelStorei(GL_UNPACK_ROW_LENGTH, stream_stride); glActiveTexture(GL_TEXTURE0); glBindTexture(GL_TEXTURE_2D, textures[0]); - if (frame) glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, stream_width, stream_height, GL_RED, GL_UNSIGNED_BYTE, frame->y); + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, stream_width, stream_height, GL_RED, GL_UNSIGNED_BYTE, frame->y); assert(glGetError() == GL_NO_ERROR); glPixelStorei(GL_UNPACK_ROW_LENGTH, stream_stride/2); glActiveTexture(GL_TEXTURE0 + 1); glBindTexture(GL_TEXTURE_2D, textures[1]); - if (frame) glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, stream_width/2, stream_height/2, GL_RG, GL_UNSIGNED_BYTE, frame->uv); + glTexSubImage2D(GL_TEXTURE_2D, 0, 0, 0, stream_width/2, stream_height/2, GL_RG, GL_UNSIGNED_BYTE, frame->uv); assert(glGetError() == GL_NO_ERROR); #endif @@ -332,8 +334,6 @@ void CameraWidget::vipcConnected(VisionIpcClient *vipc_client) { glTexImage2D(GL_TEXTURE_2D, 0, GL_RG8, stream_width/2, stream_height/2, 0, GL_RG, GL_UNSIGNED_BYTE, nullptr); assert(glGetError() == GL_NO_ERROR); #endif - - updateFrameMat(); } void CameraWidget::vipcFrameReceived() { @@ -341,16 +341,18 @@ void CameraWidget::vipcFrameReceived() { } void CameraWidget::vipcThread() { - VisionStreamType cur_stream_type = stream_type; + VisionStreamType cur_stream = requested_stream_type; std::unique_ptr vipc_client; VisionIpcBufExtra meta_main = {0}; while (!QThread::currentThread()->isInterruptionRequested()) { - if (!vipc_client || cur_stream_type != stream_type) { + if (!vipc_client || cur_stream != requested_stream_type) { clearFrames(); - cur_stream_type = stream_type; - vipc_client.reset(new VisionIpcClient(stream_name, cur_stream_type, false)); + qDebug() << "connecting to stream " << requested_stream_type << ", was connected to " << cur_stream; + cur_stream = requested_stream_type;; + vipc_client.reset(new VisionIpcClient(stream_name, cur_stream, false)); } + active_stream_type = cur_stream; if (!vipc_client->connected) { clearFrames(); @@ -366,7 +368,6 @@ void CameraWidget::vipcThread() { std::lock_guard lk(frame_lock); frames.push_back(std::make_pair(meta_main.frame_id, buf)); while (frames.size() > FRAME_BUFFER_SIZE) { - qDebug() << "Skipped frame" << frames.front().first; frames.pop_front(); } } diff --git a/selfdrive/ui/qt/widgets/cameraview.h b/selfdrive/ui/qt/widgets/cameraview.h index aff3abc836..0698d1fb9a 100644 --- a/selfdrive/ui/qt/widgets/cameraview.h +++ b/selfdrive/ui/qt/widgets/cameraview.h @@ -31,9 +31,10 @@ public: using QOpenGLWidget::QOpenGLWidget; explicit CameraWidget(std::string stream_name, VisionStreamType stream_type, bool zoom, QWidget* parent = nullptr); ~CameraWidget(); - void setStreamType(VisionStreamType type) { stream_type = type; } void setBackgroundColor(const QColor &color) { bg = color; } void setFrameId(int frame_id) { draw_frame_id = frame_id; } + void setStreamType(VisionStreamType type) { requested_stream_type = type; } + VisionStreamType getStreamType() { return active_stream_type; } signals: void clicked(); @@ -45,7 +46,6 @@ protected: void initializeGL() override; void resizeGL(int w, int h) override { updateFrameMat(); } void showEvent(QShowEvent *event) override; - void hideEvent(QHideEvent *event) override; void mouseReleaseEvent(QMouseEvent *event) override { emit clicked(); } virtual void updateFrameMat(); void updateCalibration(const mat3 &calib); @@ -68,7 +68,8 @@ protected: int stream_width = 0; int stream_height = 0; int stream_stride = 0; - std::atomic stream_type; + std::atomic active_stream_type; + std::atomic requested_stream_type; QThread *vipc_thread = nullptr; // Calibration @@ -78,9 +79,10 @@ protected: mat3 calibration = DEFAULT_CALIBRATION; mat3 intrinsic_matrix = fcam_intrinsic_matrix; - std::mutex frame_lock; + std::recursive_mutex frame_lock; std::deque> frames; uint32_t draw_frame_id = 0; + uint32_t prev_frame_id = 0; protected slots: void vipcConnected(VisionIpcClient *vipc_client); diff --git a/selfdrive/ui/ui.cc b/selfdrive/ui/ui.cc index d1d72b4dad..821064f81f 100644 --- a/selfdrive/ui/ui.cc +++ b/selfdrive/ui/ui.cc @@ -35,7 +35,7 @@ static bool calib_frame_to_full_frame(const UIState *s, float in_x, float in_y, return false; } -static int get_path_length_idx(const cereal::ModelDataV2::XYZTData::Reader &line, const float path_height) { +int get_path_length_idx(const cereal::ModelDataV2::XYZTData::Reader &line, const float path_height) { const auto line_x = line.getX(); int max_idx = 0; for (int i = 1; i < TRAJECTORY_SIZE && line_x[i] <= path_height; ++i) { @@ -44,7 +44,7 @@ static int get_path_length_idx(const cereal::ModelDataV2::XYZTData::Reader &line return max_idx; } -static void update_leads(UIState *s, const cereal::RadarState::Reader &radar_state, const cereal::ModelDataV2::XYZTData::Reader &line) { +void update_leads(UIState *s, const cereal::RadarState::Reader &radar_state, const cereal::ModelDataV2::XYZTData::Reader &line) { for (int i = 0; i < 2; ++i) { auto lead_data = (i == 0) ? radar_state.getLeadOne() : radar_state.getLeadTwo(); if (lead_data.getStatus()) { @@ -54,7 +54,7 @@ static void update_leads(UIState *s, const cereal::RadarState::Reader &radar_sta } } -static void update_line_data(const UIState *s, const cereal::ModelDataV2::XYZTData::Reader &line, +void update_line_data(const UIState *s, const cereal::ModelDataV2::XYZTData::Reader &line, float y_off, float z_off, QPolygonF *pvd, int max_idx, bool allow_invert=true) { const auto line_x = line.getX(), line_y = line.getY(), line_z = line.getZ(); @@ -78,7 +78,7 @@ static void update_line_data(const UIState *s, const cereal::ModelDataV2::XYZTDa *pvd = left_points + right_points; } -static void update_model(UIState *s, const cereal::ModelDataV2::Reader &model) { +void update_model(UIState *s, const cereal::ModelDataV2::Reader &model) { UIScene &scene = s->scene; auto model_position = model.getPosition(); float max_distance = std::clamp(model_position.getX()[TRAJECTORY_SIZE - 1], @@ -141,14 +141,7 @@ static void update_state(UIState *s) { } } scene.calibration_valid = sm["liveCalibration"].getLiveCalibration().getCalStatus() == 1; - } - if (s->worldObjectsVisible()) { - if (sm.updated("modelV2")) { - update_model(s, sm["modelV2"].getModelV2()); - } - if (sm.updated("radarState") && sm.rcv_frame("modelV2") > s->scene.started_frame) { - update_leads(s, sm["radarState"].getRadarState(), sm["modelV2"].getModelV2().getPosition()); - } + scene.calibration_wide_valid = wfde_list.size() == 3; } if (sm.updated("pandaStates")) { auto pandaStates = sm["pandaStates"].getPandaStates(); @@ -173,17 +166,6 @@ static void update_state(UIState *s) { scene.light_sensor = std::max(100.0f - scale * sm["wideRoadCameraState"].getWideRoadCameraState().getExposureValPercent(), 0.0f); } scene.started = sm["deviceState"].getDeviceState().getStarted() && scene.ignition; - - if (sm.updated("carState")) { - float v_ego = sm["carState"].getCarState().getVEgo(); - // TODO: support replays without ecam by using fcam - // Wide or narrow cam dependent on speed - if ((v_ego < 10) || s->wide_cam_only) { - scene.wide_cam = true; - } else if (v_ego > 15) { - scene.wide_cam = false; - } - } } void ui_update_params(UIState *s) { diff --git a/selfdrive/ui/ui.h b/selfdrive/ui/ui.h index 5a51dda8f8..e550afd5f2 100644 --- a/selfdrive/ui/ui.h +++ b/selfdrive/ui/ui.h @@ -87,6 +87,7 @@ const QColor bg_colors [] = { typedef struct UIScene { bool calibration_valid = false; + bool calibration_wide_valid = false; bool wide_cam = true; mat3 view_from_calib = DEFAULT_CALIBRATION; mat3 view_from_wide_calib = DEFAULT_CALIBRATION; @@ -181,3 +182,8 @@ public slots: }; void ui_update_params(UIState *s); +int get_path_length_idx(const cereal::ModelDataV2::XYZTData::Reader &line, const float path_height); +void update_model(UIState *s, const cereal::ModelDataV2::Reader &model); +void update_leads(UIState *s, const cereal::RadarState::Reader &radar_state, const cereal::ModelDataV2::XYZTData::Reader &line); +void update_line_data(const UIState *s, const cereal::ModelDataV2::XYZTData::Reader &line, + float y_off, float z_off, QPolygonF *pvd, int max_idx, bool allow_invert);