ui: refactor CameraView to serve as a generic vision stream display class (#33457)

refactor transform
pull/33515/head
Dean Lee 8 months ago committed by GitHub
parent 650ee44e89
commit 47409ab4d4
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 14
      selfdrive/ui/qt/offroad/driverview.cc
  2. 1
      selfdrive/ui/qt/offroad/driverview.h
  3. 63
      selfdrive/ui/qt/onroad/annotated_camera.cc
  4. 2
      selfdrive/ui/qt/onroad/annotated_camera.h
  5. 2
      selfdrive/ui/qt/onroad/onroad_home.cc
  6. 106
      selfdrive/ui/qt/widgets/cameraview.cc
  7. 16
      selfdrive/ui/qt/widgets/cameraview.h
  8. 52
      selfdrive/ui/ui.cc
  9. 43
      selfdrive/ui/ui.h
  10. 6
      selfdrive/ui/watch3.cc
  11. 6
      tools/cabana/videowidget.cc
  12. 2
      tools/cabana/videowidget.h

@ -7,7 +7,7 @@
const int FACE_IMG_SIZE = 130;
DriverViewWindow::DriverViewWindow(QWidget* parent) : CameraWidget("camerad", VISION_STREAM_DRIVER, true, parent) {
DriverViewWindow::DriverViewWindow(QWidget* parent) : CameraWidget("camerad", VISION_STREAM_DRIVER, parent) {
face_img = loadPixmap("../assets/img_driver_face_static.png", {FACE_IMG_SIZE, FACE_IMG_SIZE});
QObject::connect(this, &CameraWidget::clicked, this, &DriverViewWindow::done);
QObject::connect(device(), &Device::interactiveTimeout, this, [this]() {
@ -75,3 +75,15 @@ void DriverViewWindow::paintGL() {
p.setOpacity(face_detected ? 1.0 : 0.2);
p.drawPixmap(img_x, img_y, face_img);
}
mat4 DriverViewWindow::calcFrameMatrix() {
const float driver_view_ratio = 2.0;
const float yscale = stream_height * driver_view_ratio / stream_width;
const float xscale = yscale * glHeight() / glWidth() * stream_width / stream_height;
return mat4{{
xscale, 0.0, 0.0, 0.0,
0.0, yscale, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
}};
}

@ -12,6 +12,7 @@ signals:
void done();
protected:
mat4 calcFrameMatrix() override;
void showEvent(QShowEvent *event) override;
void hideEvent(QHideEvent *event) override;
void paintGL() override;

@ -10,7 +10,8 @@
#include "selfdrive/ui/qt/util.h"
// Window that shows camera view and variety of info drawn on top
AnnotatedCameraWidget::AnnotatedCameraWidget(VisionStreamType type, QWidget* parent) : fps_filter(UI_FREQ, 3, 1. / UI_FREQ), CameraWidget("camerad", type, true, parent) {
AnnotatedCameraWidget::AnnotatedCameraWidget(VisionStreamType type, QWidget *parent)
: fps_filter(UI_FREQ, 3, 1. / UI_FREQ), CameraWidget("camerad", type, parent) {
pm = std::make_unique<PubMaster>(std::vector<const char*>{"uiDebug"});
main_layout = new QVBoxLayout(this);
@ -125,22 +126,54 @@ void AnnotatedCameraWidget::initializeGL() {
setBackgroundColor(bg_colors[STATUS_DISENGAGED]);
}
void AnnotatedCameraWidget::updateFrameMat() {
CameraWidget::updateFrameMat();
UIState *s = uiState();
mat4 AnnotatedCameraWidget::calcFrameMatrix() {
// Project point at "infinity" to compute x and y offsets
// to ensure this ends up in the middle of the screen
// for narrow come and a little lower for wide cam.
// TODO: use proper perspective transform?
// Select intrinsic matrix and calibration based on camera type
auto *s = uiState();
bool wide_cam = active_stream_type == VISION_STREAM_WIDE_ROAD;
const auto &intrinsic_matrix = wide_cam ? ECAM_INTRINSIC_MATRIX : FCAM_INTRINSIC_MATRIX;
const auto &calibration = wide_cam ? s->scene.view_from_wide_calib : s->scene.view_from_calib;
// Compute the calibration transformation matrix
const auto calib_transform = intrinsic_matrix * calibration;
float zoom = wide_cam ? 2.0 : 1.1;
Eigen::Vector3f inf(1000., 0., 0.);
auto Kep = calib_transform * inf;
int w = width(), h = height();
float center_x = intrinsic_matrix(0, 2);
float center_y = intrinsic_matrix(1, 2);
s->fb_w = w;
s->fb_h = h;
float max_x_offset = center_x * zoom - w / 2 - 5;
float max_y_offset = center_y * zoom - h / 2 - 5;
float x_offset = std::clamp<float>((Kep.x() / Kep.z() - center_x) * zoom, -max_x_offset, max_x_offset);
float y_offset = std::clamp<float>((Kep.y() / Kep.z() - center_y) * zoom, -max_y_offset, max_y_offset);
// Apply transformation such that video pixel coordinates match video
// 1) Put (0, 0) in the middle of the video
// 2) Apply same scaling as video
// 3) Put (0, 0) in top left corner of video
s->car_space_transform.reset();
s->car_space_transform.translate(w / 2 - x_offset, h / 2 - y_offset)
.scale(zoom, zoom)
.translate(-intrinsic_matrix.v[2], -intrinsic_matrix.v[5]);
Eigen::Matrix3f video_transform =(Eigen::Matrix3f() <<
zoom, 0.0f, (w / 2 - x_offset) - (center_x * zoom),
0.0f, zoom, (h / 2 - y_offset) - (center_y * zoom),
0.0f, 0.0f, 1.0f).finished();
s->car_space_transform = video_transform * calib_transform;
s->clip_region = rect().adjusted(-500, -500, 500, 500);
float zx = zoom * 2 * center_x / w;
float zy = zoom * 2 * center_y / h;
return mat4{{
zx, 0.0, 0.0, -x_offset / w * 2,
0.0, zy, 0.0, y_offset / h * 2,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
}};
}
void AnnotatedCameraWidget::drawLaneLines(QPainter &painter, const UIState *s) {
@ -271,18 +304,8 @@ void AnnotatedCameraWidget::paintGL() {
wide_cam_requested = false;
}
wide_cam_requested = wide_cam_requested && sm["selfdriveState"].getSelfdriveState().getExperimentalMode();
// for replay of old routes, never go to widecam
wide_cam_requested = wide_cam_requested && s->scene.calibration_wide_valid;
}
CameraWidget::setStreamType(wide_cam_requested ? VISION_STREAM_WIDE_ROAD : VISION_STREAM_ROAD);
s->scene.wide_cam = CameraWidget::getStreamType() == VISION_STREAM_WIDE_ROAD;
if (s->scene.calibration_valid) {
auto calib = s->scene.wide_cam ? s->scene.view_from_wide_calib : s->scene.view_from_calib;
CameraWidget::updateCalibration(calib);
} else {
CameraWidget::updateCalibration(DEFAULT_CALIBRATION);
}
CameraWidget::setFrameId(model.getFrameId());
CameraWidget::paintGL();
}

@ -36,7 +36,7 @@ protected:
void paintGL() override;
void initializeGL() override;
void showEvent(QShowEvent *event) override;
void updateFrameMat() override;
mat4 calcFrameMatrix() override;
void drawLaneLines(QPainter &painter, const UIState *s);
void drawLead(QPainter &painter, const cereal::RadarState::LeadData::Reader &lead_data, const QPointF &vd);
void drawHud(QPainter &p);

@ -21,7 +21,7 @@ OnroadWindow::OnroadWindow(QWidget *parent) : QWidget(parent) {
split->addWidget(nvg);
if (getenv("DUAL_CAMERA_VIEW")) {
CameraWidget *arCam = new CameraWidget("camerad", VISION_STREAM_ROAD, true, this);
CameraWidget *arCam = new CameraWidget("camerad", VISION_STREAM_ROAD, this);
split->insertWidget(0, arCam);
}

@ -7,13 +7,7 @@
#endif
#include <cmath>
#include <set>
#include <string>
#include <utility>
#include <QApplication>
#include <QOpenGLBuffer>
#include <QOffscreenSurface>
namespace {
@ -66,40 +60,10 @@ const char frame_fragment_shader[] =
"}\n";
#endif
mat4 get_driver_view_transform(int screen_width, int screen_height, int stream_width, int stream_height) {
const float driver_view_ratio = 2.0;
const float yscale = stream_height * driver_view_ratio / stream_width;
const float xscale = yscale*screen_height/screen_width*stream_width/stream_height;
mat4 transform = (mat4){{
xscale, 0.0, 0.0, 0.0,
0.0, yscale, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
}};
return transform;
}
mat4 get_fit_view_transform(float widget_aspect_ratio, float frame_aspect_ratio) {
float zx = 1, zy = 1;
if (frame_aspect_ratio > widget_aspect_ratio) {
zy = widget_aspect_ratio / frame_aspect_ratio;
} else {
zx = frame_aspect_ratio / widget_aspect_ratio;
}
const mat4 frame_transform = {{
zx, 0.0, 0.0, 0.0,
0.0, zy, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
}};
return frame_transform;
}
} // namespace
CameraWidget::CameraWidget(std::string stream_name, VisionStreamType type, bool zoom, QWidget* parent) :
stream_name(stream_name), active_stream_type(type), requested_stream_type(type), zoomed_view(zoom), QOpenGLWidget(parent) {
CameraWidget::CameraWidget(std::string stream_name, VisionStreamType type, QWidget* parent) :
stream_name(stream_name), active_stream_type(type), requested_stream_type(type), QOpenGLWidget(parent) {
setAttribute(Qt::WA_OpaquePaintEvent);
qRegisterMetaType<std::set<VisionStreamType>>("availableStreams");
QObject::connect(this, &CameraWidget::vipcThreadConnected, this, &CameraWidget::vipcConnected, Qt::BlockingQueuedConnection);
@ -214,59 +178,19 @@ void CameraWidget::availableStreamsUpdated(std::set<VisionStreamType> streams) {
available_streams = streams;
}
void CameraWidget::updateFrameMat() {
int w = glWidth(), h = glHeight();
mat4 CameraWidget::calcFrameMatrix() {
// Scale the frame to fit the widget while maintaining the aspect ratio.
float widget_aspect_ratio = (float)width() / height();
float frame_aspect_ratio = (float)stream_width / stream_height;
float zx = std::min(frame_aspect_ratio / widget_aspect_ratio, 1.0f);
float zy = std::min(widget_aspect_ratio / frame_aspect_ratio, 1.0f);
if (zoomed_view) {
if (active_stream_type == VISION_STREAM_DRIVER) {
if (stream_width > 0 && stream_height > 0) {
frame_mat = get_driver_view_transform(w, h, stream_width, stream_height);
}
} else {
// Project point at "infinity" to compute x and y offsets
// to ensure this ends up in the middle of the screen
// for narrow come and a little lower for wide cam.
// TODO: use proper perspective transform?
if (active_stream_type == VISION_STREAM_WIDE_ROAD) {
intrinsic_matrix = ECAM_INTRINSIC_MATRIX;
zoom = 2.0;
} else {
intrinsic_matrix = FCAM_INTRINSIC_MATRIX;
zoom = 1.1;
}
const vec3 inf = {{1000., 0., 0.}};
const vec3 Ep = matvecmul3(calibration, inf);
const vec3 Kep = matvecmul3(intrinsic_matrix, Ep);
float x_offset_ = (Kep.v[0] / Kep.v[2] - intrinsic_matrix.v[2]) * zoom;
float y_offset_ = (Kep.v[1] / Kep.v[2] - intrinsic_matrix.v[5]) * zoom;
float max_x_offset = intrinsic_matrix.v[2] * zoom - w / 2 - 5;
float max_y_offset = intrinsic_matrix.v[5] * zoom - h / 2 - 5;
x_offset = std::clamp(x_offset_, -max_x_offset, max_x_offset);
y_offset = std::clamp(y_offset_, -max_y_offset, max_y_offset);
float zx = zoom * 2 * intrinsic_matrix.v[2] / w;
float zy = zoom * 2 * intrinsic_matrix.v[5] / h;
const mat4 frame_transform = {{
zx, 0.0, 0.0, -x_offset / w * 2,
0.0, zy, 0.0, y_offset / h * 2,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
}};
frame_mat = frame_transform;
}
} else if (stream_width > 0 && stream_height > 0) {
// fit frame to widget size
float widget_aspect_ratio = (float)w / h;
float frame_aspect_ratio = (float)stream_width / stream_height;
frame_mat = get_fit_view_transform(widget_aspect_ratio, frame_aspect_ratio);
}
}
void CameraWidget::updateCalibration(const mat3 &calib) {
calibration = calib;
return mat4{{
zx, 0.0, 0.0, 0.0,
0.0, zy, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
}};
}
void CameraWidget::paintGL() {
@ -293,7 +217,7 @@ void CameraWidget::paintGL() {
VisionBuf *frame = frames[frame_idx].second;
assert(frame != nullptr);
updateFrameMat();
auto frame_mat = calcFrameMatrix();
glViewport(0, 0, glWidth(), glHeight());
glBindVertexArray(frame_vao);

@ -34,7 +34,7 @@ class CameraWidget : public QOpenGLWidget, protected QOpenGLFunctions {
public:
using QOpenGLWidget::QOpenGLWidget;
explicit CameraWidget(std::string stream_name, VisionStreamType stream_type, bool zoom, QWidget* parent = nullptr);
explicit CameraWidget(std::string stream_name, VisionStreamType stream_type, QWidget* parent = nullptr);
~CameraWidget();
void setBackgroundColor(const QColor &color) { bg = color; }
void setFrameId(int frame_id) { draw_frame_id = frame_id; }
@ -51,21 +51,17 @@ signals:
protected:
void paintGL() override;
void initializeGL() override;
void resizeGL(int w, int h) override { updateFrameMat(); }
void showEvent(QShowEvent *event) override;
void mouseReleaseEvent(QMouseEvent *event) override { emit clicked(); }
virtual void updateFrameMat();
void updateCalibration(const mat3 &calib);
virtual mat4 calcFrameMatrix();
void vipcThread();
void clearFrames();
int glWidth();
int glHeight();
bool zoomed_view;
GLuint frame_vao, frame_vbo, frame_ibo;
GLuint textures[2];
mat4 frame_mat = {};
std::unique_ptr<QOpenGLShaderProgram> program;
QColor bg = QColor("#000000");
@ -81,14 +77,6 @@ protected:
std::atomic<VisionStreamType> requested_stream_type;
std::set<VisionStreamType> available_streams;
QThread *vipc_thread = nullptr;
// Calibration
float x_offset = 0;
float y_offset = 0;
float zoom = 1.0;
mat3 calibration = DEFAULT_CALIBRATION;
mat3 intrinsic_matrix = FCAM_INTRINSIC_MATRIX;
std::recursive_mutex frame_lock;
std::deque<std::pair<uint32_t, VisionBuf*>> frames;
uint32_t draw_frame_id = 0;

@ -1,13 +1,11 @@
#include "selfdrive/ui/ui.h"
#include <algorithm>
#include <cassert>
#include <cmath>
#include <QtConcurrent>
#include "common/transformations/orientation.hpp"
#include "common/params.h"
#include "common/swaglog.h"
#include "common/util.h"
#include "common/watchdog.h"
@ -19,20 +17,10 @@
// Projects a point in car to space to the corresponding point in full frame
// image space.
static bool calib_frame_to_full_frame(const UIState *s, float in_x, float in_y, float in_z, QPointF *out) {
const float margin = 500.0f;
const QRectF clip_region{-margin, -margin, s->fb_w + 2 * margin, s->fb_h + 2 * margin};
const vec3 pt = (vec3){{in_x, in_y, in_z}};
const vec3 Ep = matvecmul3(s->scene.wide_cam ? s->scene.view_from_wide_calib : s->scene.view_from_calib, pt);
const vec3 KEp = matvecmul3(s->scene.wide_cam ? ECAM_INTRINSIC_MATRIX : FCAM_INTRINSIC_MATRIX, Ep);
// Project.
QPointF point = s->car_space_transform.map(QPointF{KEp.v[0] / KEp.v[2], KEp.v[1] / KEp.v[2]});
if (clip_region.contains(point)) {
*out = point;
return true;
}
return false;
Eigen::Vector3f input(in_x, in_y, in_z);
auto transformed = s->car_space_transform * input;
*out = QPointF(transformed.x() / transformed.z(), transformed.y() / transformed.z());
return s->clip_region.contains(*out);
}
int get_path_length_idx(const cereal::XYZTData::Reader &line, const float path_height) {
@ -119,29 +107,19 @@ static void update_state(UIState *s) {
UIScene &scene = s->scene;
if (sm.updated("liveCalibration")) {
auto list2rot = [](const capnp::List<float>::Reader &rpy_list) ->Eigen::Matrix3f {
return euler2rot({rpy_list[0], rpy_list[1], rpy_list[2]}).cast<float>();
};
auto live_calib = sm["liveCalibration"].getLiveCalibration();
auto rpy_list = live_calib.getRpyCalib();
auto wfde_list = live_calib.getWideFromDeviceEuler();
Eigen::Vector3d rpy;
Eigen::Vector3d wfde;
if (rpy_list.size() == 3) rpy << rpy_list[0], rpy_list[1], rpy_list[2];
if (wfde_list.size() == 3) wfde << wfde_list[0], wfde_list[1], wfde_list[2];
Eigen::Matrix3d device_from_calib = euler2rot(rpy);
Eigen::Matrix3d wide_from_device = euler2rot(wfde);
Eigen::Matrix3d view_from_device;
view_from_device << 0, 1, 0,
0, 0, 1,
1, 0, 0;
Eigen::Matrix3d view_from_calib = view_from_device * device_from_calib;
Eigen::Matrix3d view_from_wide_calib = view_from_device * wide_from_device * device_from_calib;
for (int i = 0; i < 3; i++) {
for (int j = 0; j < 3; j++) {
scene.view_from_calib.v[i*3 + j] = view_from_calib(i, j);
scene.view_from_wide_calib.v[i*3 + j] = view_from_wide_calib(i, j);
}
if (live_calib.getCalStatus() == cereal::LiveCalibrationData::Status::CALIBRATED) {
auto device_from_calib = list2rot(live_calib.getRpyCalib());
auto wide_from_device = list2rot(live_calib.getWideFromDeviceEuler());
s->scene.view_from_calib = VIEW_FROM_DEVICE * device_from_calib;
s->scene.view_from_wide_calib = VIEW_FROM_DEVICE * wide_from_device * device_from_calib;
} else {
s->scene.view_from_calib = s->scene.view_from_wide_calib = VIEW_FROM_DEVICE;
}
scene.calibration_valid = live_calib.getCalStatus() == cereal::LiveCalibrationData::Status::CALIBRATED;
scene.calibration_wide_valid = wfde_list.size() == 3;
}
if (sm.updated("pandaStates")) {
auto pandaStates = sm["pandaStates"].getPandaStates();

@ -1,19 +1,18 @@
#pragma once
#include <eigen3/Eigen/Dense>
#include <memory>
#include <string>
#include <QObject>
#include <QTimer>
#include <QColor>
#include <QFuture>
#include <QPolygonF>
#include <QTransform>
#include "cereal/messaging/messaging.h"
#include "common/mat.h"
#include "common/params.h"
#include "common/timing.h"
#include "common/util.h"
#include "system/hardware/hw.h"
#include "selfdrive/ui/qt/prime_state.h"
@ -25,15 +24,22 @@ const int BACKLIGHT_OFFROAD = 50;
const float MIN_DRAW_DISTANCE = 10.0;
const float MAX_DRAW_DISTANCE = 100.0;
constexpr mat3 DEFAULT_CALIBRATION = {{ 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0 }};
constexpr mat3 FCAM_INTRINSIC_MATRIX = (mat3){{2648.0, 0.0, 1928.0 / 2,
0.0, 2648.0, 1208.0 / 2,
0.0, 0.0, 1.0}};
const Eigen::Matrix3f VIEW_FROM_DEVICE = (Eigen::Matrix3f() <<
0.0, 1.0, 0.0,
0.0, 0.0, 1.0,
1.0, 0.0, 0.0).finished();
const Eigen::Matrix3f FCAM_INTRINSIC_MATRIX = (Eigen::Matrix3f() <<
2648.0, 0.0, 1928.0 / 2,
0.0, 2648.0, 1208.0 / 2,
0.0, 0.0, 1.0).finished();
// tici ecam focal probably wrong? magnification is not consistent across frame
// Need to retrain model before this can be changed
constexpr mat3 ECAM_INTRINSIC_MATRIX = (mat3){{567.0, 0.0, 1928.0 / 2,
0.0, 567.0, 1208.0 / 2,
0.0, 0.0, 1.0}};
const Eigen::Matrix3f ECAM_INTRINSIC_MATRIX = (Eigen::Matrix3f() <<
567.0, 0.0, 1928.0 / 2,
0.0, 567.0, 1208.0 / 2,
0.0, 0.0, 1.0).finished();
typedef enum UIStatus {
STATUS_DISENGAGED,
@ -47,13 +53,9 @@ const QColor bg_colors [] = {
[STATUS_ENGAGED] = QColor(0x17, 0x86, 0x44, 0xf1),
};
typedef struct UIScene {
bool calibration_valid = false;
bool calibration_wide_valid = false;
bool wide_cam = true;
mat3 view_from_calib = DEFAULT_CALIBRATION;
mat3 view_from_wide_calib = DEFAULT_CALIBRATION;
Eigen::Matrix3f view_from_calib = VIEW_FROM_DEVICE;
Eigen::Matrix3f view_from_wide_calib = VIEW_FROM_DEVICE;
cereal::PandaState::PandaType pandaType;
// modelV2
@ -84,18 +86,16 @@ public:
return scene.started && (*sm)["selfdriveState"].getSelfdriveState().getEnabled();
}
int fb_w = 0, fb_h = 0;
std::unique_ptr<SubMaster> sm;
UIStatus status;
UIScene scene = {};
QString language;
QTransform car_space_transform;
PrimeState *prime_state;
Eigen::Matrix3f car_space_transform = Eigen::Matrix3f::Zero();
QRectF clip_region;
signals:
void uiUpdate(const UIState &s);
void offroadTransition(bool offroad);
@ -150,7 +150,6 @@ void ui_update_params(UIState *s);
int get_path_length_idx(const cereal::XYZTData::Reader &line, const float path_height);
void update_model(UIState *s,
const cereal::ModelDataV2::Reader &model);
void update_dmonitoring(UIState *s, const cereal::DriverStateV2::Reader &driverstate, float dm_fade_state, bool is_rhd);
void update_leads(UIState *s, const cereal::RadarState::Reader &radar_state, const cereal::XYZTData::Reader &line);
void update_line_data(const UIState *s, const cereal::XYZTData::Reader &line,
float y_off, float z_off, QPolygonF *pvd, int max_idx, bool allow_invert);

@ -19,14 +19,14 @@ int main(int argc, char *argv[]) {
{
QHBoxLayout *hlayout = new QHBoxLayout();
layout->addLayout(hlayout);
hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_ROAD, false));
hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_ROAD));
}
{
QHBoxLayout *hlayout = new QHBoxLayout();
layout->addLayout(hlayout);
hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_DRIVER, false));
hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_WIDE_ROAD, false));
hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_DRIVER));
hlayout->addWidget(new CameraWidget("camerad", VISION_STREAM_WIDE_ROAD));
}
return a.exec();

@ -148,7 +148,7 @@ QWidget *VideoWidget::createCameraWidget() {
QStackedLayout *stacked = new QStackedLayout();
stacked->setStackingMode(QStackedLayout::StackAll);
stacked->addWidget(cam_widget = new StreamCameraView("camerad", VISION_STREAM_ROAD, false));
stacked->addWidget(cam_widget = new StreamCameraView("camerad", VISION_STREAM_ROAD));
cam_widget->setMinimumHeight(MIN_VIDEO_HEIGHT);
cam_widget->setSizePolicy(QSizePolicy::Preferred, QSizePolicy::MinimumExpanding);
stacked->addWidget(alert_label = new InfoLabel(this));
@ -420,8 +420,8 @@ void InfoLabel::paintEvent(QPaintEvent *event) {
}
}
StreamCameraView::StreamCameraView(std::string stream_name, VisionStreamType stream_type, bool zoom, QWidget *parent)
: CameraWidget(stream_name, stream_type, zoom, parent) {
StreamCameraView::StreamCameraView(std::string stream_name, VisionStreamType stream_type, QWidget *parent)
: CameraWidget(stream_name, stream_type, parent) {
fade_animation = new QPropertyAnimation(this, "overlayOpacity");
fade_animation->setDuration(500);
fade_animation->setStartValue(0.2f);

@ -64,7 +64,7 @@ class StreamCameraView : public CameraWidget {
Q_PROPERTY(float overlayOpacity READ overlayOpacity WRITE setOverlayOpacity)
public:
StreamCameraView(std::string stream_name, VisionStreamType stream_type, bool zoom, QWidget *parent = nullptr);
StreamCameraView(std::string stream_name, VisionStreamType stream_type, QWidget *parent = nullptr);
void paintGL() override;
void showPausedOverlay() { fade_animation->start(); }
float overlayOpacity() const { return overlay_opacity; }

Loading…
Cancel
Save