openpilot is an open source driver assistance system. openpilot performs the functions of Automated Lane Centering and Adaptive Cruise Control for over 200 supported car makes and models.
You can not select more than 25 topics Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
 
 
 
 
 
 

271 lines
9.7 KiB

#include "selfdrive/ui/qt/onroad/annotated_camera.h"
#include <QPainter>
#include <algorithm>
#include <cmath>
#include "common/swaglog.h"
#include "selfdrive/ui/qt/util.h"
// Window that shows camera view and variety of info drawn on top
AnnotatedCameraWidget::AnnotatedCameraWidget(VisionStreamType type, QWidget *parent)
: fps_filter(UI_FREQ, 3, 1. / UI_FREQ), CameraWidget("camerad", type, parent) {
pm = std::make_unique<PubMaster>(std::vector<const char*>{"uiDebug"});
main_layout = new QVBoxLayout(this);
main_layout->setMargin(UI_BORDER_SIZE);
main_layout->setSpacing(0);
experimental_btn = new ExperimentalButton(this);
main_layout->addWidget(experimental_btn, 0, Qt::AlignTop | Qt::AlignRight);
}
void AnnotatedCameraWidget::updateState(const UIState &s) {
// update engageability/experimental mode button
experimental_btn->updateState(s);
dmon.updateState(s);
}
void AnnotatedCameraWidget::initializeGL() {
CameraWidget::initializeGL();
qInfo() << "OpenGL version:" << QString((const char*)glGetString(GL_VERSION));
qInfo() << "OpenGL vendor:" << QString((const char*)glGetString(GL_VENDOR));
qInfo() << "OpenGL renderer:" << QString((const char*)glGetString(GL_RENDERER));
qInfo() << "OpenGL language version:" << QString((const char*)glGetString(GL_SHADING_LANGUAGE_VERSION));
prev_draw_t = millis_since_boot();
setBackgroundColor(bg_colors[STATUS_DISENGAGED]);
}
mat4 AnnotatedCameraWidget::calcFrameMatrix() {
// Project point at "infinity" to compute x and y offsets
// to ensure this ends up in the middle of the screen
// for narrow come and a little lower for wide cam.
// TODO: use proper perspective transform?
// Select intrinsic matrix and calibration based on camera type
auto *s = uiState();
bool wide_cam = active_stream_type == VISION_STREAM_WIDE_ROAD;
const auto &intrinsic_matrix = wide_cam ? ECAM_INTRINSIC_MATRIX : FCAM_INTRINSIC_MATRIX;
const auto &calibration = wide_cam ? s->scene.view_from_wide_calib : s->scene.view_from_calib;
// Compute the calibration transformation matrix
const auto calib_transform = intrinsic_matrix * calibration;
float zoom = wide_cam ? 2.0 : 1.1;
Eigen::Vector3f inf(1000., 0., 0.);
auto Kep = calib_transform * inf;
int w = width(), h = height();
float center_x = intrinsic_matrix(0, 2);
float center_y = intrinsic_matrix(1, 2);
float max_x_offset = center_x * zoom - w / 2 - 5;
float max_y_offset = center_y * zoom - h / 2 - 5;
float x_offset = std::clamp<float>((Kep.x() / Kep.z() - center_x) * zoom, -max_x_offset, max_x_offset);
float y_offset = std::clamp<float>((Kep.y() / Kep.z() - center_y) * zoom, -max_y_offset, max_y_offset);
// Apply transformation such that video pixel coordinates match video
// 1) Put (0, 0) in the middle of the video
// 2) Apply same scaling as video
// 3) Put (0, 0) in top left corner of video
Eigen::Matrix3f video_transform =(Eigen::Matrix3f() <<
zoom, 0.0f, (w / 2 - x_offset) - (center_x * zoom),
0.0f, zoom, (h / 2 - y_offset) - (center_y * zoom),
0.0f, 0.0f, 1.0f).finished();
s->car_space_transform = video_transform * calib_transform;
s->clip_region = rect().adjusted(-500, -500, 500, 500);
float zx = zoom * 2 * center_x / w;
float zy = zoom * 2 * center_y / h;
return mat4{{
zx, 0.0, 0.0, -x_offset / w * 2,
0.0, zy, 0.0, y_offset / h * 2,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0,
}};
}
void AnnotatedCameraWidget::drawLaneLines(QPainter &painter, const UIState *s) {
painter.save();
const UIScene &scene = s->scene;
SubMaster &sm = *(s->sm);
// lanelines
for (int i = 0; i < std::size(scene.lane_line_vertices); ++i) {
painter.setBrush(QColor::fromRgbF(1.0, 1.0, 1.0, std::clamp<float>(scene.lane_line_probs[i], 0.0, 0.7)));
painter.drawPolygon(scene.lane_line_vertices[i]);
}
// road edges
for (int i = 0; i < std::size(scene.road_edge_vertices); ++i) {
painter.setBrush(QColor::fromRgbF(1.0, 0, 0, std::clamp<float>(1.0 - scene.road_edge_stds[i], 0.0, 1.0)));
painter.drawPolygon(scene.road_edge_vertices[i]);
}
// paint path
QLinearGradient bg(0, height(), 0, 0);
if (sm["selfdriveState"].getSelfdriveState().getExperimentalMode()) {
// The first half of track_vertices are the points for the right side of the path
const auto &acceleration = sm["modelV2"].getModelV2().getAcceleration().getX();
const int max_len = std::min<int>(scene.track_vertices.length() / 2, acceleration.size());
for (int i = 0; i < max_len; ++i) {
// Some points are out of frame
int track_idx = max_len - i - 1; // flip idx to start from bottom right
if (scene.track_vertices[track_idx].y() < 0 || scene.track_vertices[track_idx].y() > height()) continue;
// Flip so 0 is bottom of frame
float lin_grad_point = (height() - scene.track_vertices[track_idx].y()) / height();
// speed up: 120, slow down: 0
float path_hue = fmax(fmin(60 + acceleration[i] * 35, 120), 0);
// FIXME: painter.drawPolygon can be slow if hue is not rounded
path_hue = int(path_hue * 100 + 0.5) / 100;
float saturation = fmin(fabs(acceleration[i] * 1.5), 1);
float lightness = util::map_val(saturation, 0.0f, 1.0f, 0.95f, 0.62f); // lighter when grey
float alpha = util::map_val(lin_grad_point, 0.75f / 2.f, 0.75f, 0.4f, 0.0f); // matches previous alpha fade
bg.setColorAt(lin_grad_point, QColor::fromHslF(path_hue / 360., saturation, lightness, alpha));
// Skip a point, unless next is last
i += (i + 2) < max_len ? 1 : 0;
}
} else {
bg.setColorAt(0.0, QColor::fromHslF(148 / 360., 0.94, 0.51, 0.4));
bg.setColorAt(0.5, QColor::fromHslF(112 / 360., 1.0, 0.68, 0.35));
bg.setColorAt(1.0, QColor::fromHslF(112 / 360., 1.0, 0.68, 0.0));
}
painter.setBrush(bg);
painter.drawPolygon(scene.track_vertices);
painter.restore();
}
void AnnotatedCameraWidget::drawLead(QPainter &painter, const cereal::RadarState::LeadData::Reader &lead_data, const QPointF &vd) {
painter.save();
const float speedBuff = 10.;
const float leadBuff = 40.;
const float d_rel = lead_data.getDRel();
const float v_rel = lead_data.getVRel();
float fillAlpha = 0;
if (d_rel < leadBuff) {
fillAlpha = 255 * (1.0 - (d_rel / leadBuff));
if (v_rel < 0) {
fillAlpha += 255 * (-1 * (v_rel / speedBuff));
}
fillAlpha = (int)(fmin(fillAlpha, 255));
}
float sz = std::clamp((25 * 30) / (d_rel / 3 + 30), 15.0f, 30.0f) * 2.35;
float x = std::clamp((float)vd.x(), 0.f, width() - sz / 2);
float y = std::fmin(height() - sz * .6, (float)vd.y());
float g_xo = sz / 5;
float g_yo = sz / 10;
QPointF glow[] = {{x + (sz * 1.35) + g_xo, y + sz + g_yo}, {x, y - g_yo}, {x - (sz * 1.35) - g_xo, y + sz + g_yo}};
painter.setBrush(QColor(218, 202, 37, 255));
painter.drawPolygon(glow, std::size(glow));
// chevron
QPointF chevron[] = {{x + (sz * 1.25), y + sz}, {x, y}, {x - (sz * 1.25), y + sz}};
painter.setBrush(redColor(fillAlpha));
painter.drawPolygon(chevron, std::size(chevron));
painter.restore();
}
void AnnotatedCameraWidget::paintGL() {
UIState *s = uiState();
SubMaster &sm = *(s->sm);
const double start_draw_t = millis_since_boot();
const cereal::ModelDataV2::Reader &model = sm["modelV2"].getModelV2();
// draw camera frame
{
std::lock_guard lk(frame_lock);
if (frames.empty()) {
if (skip_frame_count > 0) {
skip_frame_count--;
qDebug() << "skipping frame, not ready";
return;
}
} else {
// skip drawing up to this many frames if we're
// missing camera frames. this smooths out the
// transitions from the narrow and wide cameras
skip_frame_count = 5;
}
// Wide or narrow cam dependent on speed
bool has_wide_cam = available_streams.count(VISION_STREAM_WIDE_ROAD);
if (has_wide_cam) {
float v_ego = sm["carState"].getCarState().getVEgo();
if ((v_ego < 10) || available_streams.size() == 1) {
wide_cam_requested = true;
} else if (v_ego > 15) {
wide_cam_requested = false;
}
wide_cam_requested = wide_cam_requested && sm["selfdriveState"].getSelfdriveState().getExperimentalMode();
}
CameraWidget::setStreamType(wide_cam_requested ? VISION_STREAM_WIDE_ROAD : VISION_STREAM_ROAD);
CameraWidget::setFrameId(model.getFrameId());
CameraWidget::paintGL();
}
QPainter painter(this);
painter.setRenderHint(QPainter::Antialiasing);
painter.setPen(Qt::NoPen);
if (s->scene.world_objects_visible) {
update_model(s, model);
drawLaneLines(painter, s);
if (s->scene.longitudinal_control && sm.rcv_frame("radarState") > s->scene.started_frame) {
auto radar_state = sm["radarState"].getRadarState();
update_leads(s, radar_state, model.getPosition());
auto lead_one = radar_state.getLeadOne();
auto lead_two = radar_state.getLeadTwo();
if (lead_one.getStatus()) {
drawLead(painter, lead_one, s->scene.lead_vertices[0]);
}
if (lead_two.getStatus() && (std::abs(lead_one.getDRel() - lead_two.getDRel()) > 3.0)) {
drawLead(painter, lead_two, s->scene.lead_vertices[1]);
}
}
}
dmon.draw(painter, rect());
hud.updateState(*s);
hud.draw(painter, rect());
double cur_draw_t = millis_since_boot();
double dt = cur_draw_t - prev_draw_t;
double fps = fps_filter.update(1. / dt * 1000);
if (fps < 15) {
LOGW("slow frame rate: %.2f fps", fps);
}
prev_draw_t = cur_draw_t;
// publish debug msg
MessageBuilder msg;
auto m = msg.initEvent().initUiDebug();
m.setDrawTimeMillis(cur_draw_t - start_draw_t);
pm->send("uiDebug", msg);
}
void AnnotatedCameraWidget::showEvent(QShowEvent *event) {
CameraWidget::showEvent(event);
ui_update_params(uiState());
prev_draw_t = millis_since_boot();
}