You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
289 lines
10 KiB
289 lines
10 KiB
#include "selfdrive/ui/ui.h"
|
|
|
|
#include <algorithm>
|
|
#include <cmath>
|
|
|
|
#include <QtConcurrent>
|
|
|
|
#include "common/transformations/orientation.hpp"
|
|
#include "common/swaglog.h"
|
|
#include "common/util.h"
|
|
#include "common/watchdog.h"
|
|
#include "system/hardware/hw.h"
|
|
|
|
#define BACKLIGHT_DT 0.05
|
|
#define BACKLIGHT_TS 10.00
|
|
|
|
// Projects a point in car to space to the corresponding point in full frame
|
|
// image space.
|
|
static bool calib_frame_to_full_frame(const UIState *s, float in_x, float in_y, float in_z, QPointF *out) {
|
|
Eigen::Vector3f input(in_x, in_y, in_z);
|
|
auto transformed = s->car_space_transform * input;
|
|
*out = QPointF(transformed.x() / transformed.z(), transformed.y() / transformed.z());
|
|
return s->clip_region.contains(*out);
|
|
}
|
|
|
|
int get_path_length_idx(const cereal::XYZTData::Reader &line, const float path_height) {
|
|
const auto line_x = line.getX();
|
|
int max_idx = 0;
|
|
for (int i = 1; i < line_x.size() && line_x[i] <= path_height; ++i) {
|
|
max_idx = i;
|
|
}
|
|
return max_idx;
|
|
}
|
|
|
|
void update_leads(UIState *s, const cereal::RadarState::Reader &radar_state, const cereal::XYZTData::Reader &line) {
|
|
for (int i = 0; i < 2; ++i) {
|
|
auto lead_data = (i == 0) ? radar_state.getLeadOne() : radar_state.getLeadTwo();
|
|
if (lead_data.getStatus()) {
|
|
float z = line.getZ()[get_path_length_idx(line, lead_data.getDRel())];
|
|
calib_frame_to_full_frame(s, lead_data.getDRel(), -lead_data.getYRel(), z + 1.22, &s->scene.lead_vertices[i]);
|
|
}
|
|
}
|
|
}
|
|
|
|
void update_line_data(const UIState *s, const cereal::XYZTData::Reader &line,
|
|
float y_off, float z_off, QPolygonF *pvd, int max_idx, bool allow_invert=true) {
|
|
const auto line_x = line.getX(), line_y = line.getY(), line_z = line.getZ();
|
|
QPointF left, right;
|
|
pvd->clear();
|
|
for (int i = 0; i <= max_idx; i++) {
|
|
// highly negative x positions are drawn above the frame and cause flickering, clip to zy plane of camera
|
|
if (line_x[i] < 0) continue;
|
|
|
|
bool l = calib_frame_to_full_frame(s, line_x[i], line_y[i] - y_off, line_z[i] + z_off, &left);
|
|
bool r = calib_frame_to_full_frame(s, line_x[i], line_y[i] + y_off, line_z[i] + z_off, &right);
|
|
if (l && r) {
|
|
// For wider lines the drawn polygon will "invert" when going over a hill and cause artifacts
|
|
if (!allow_invert && pvd->size() && left.y() > pvd->back().y()) {
|
|
continue;
|
|
}
|
|
pvd->push_back(left);
|
|
pvd->push_front(right);
|
|
}
|
|
}
|
|
}
|
|
|
|
void update_model(UIState *s,
|
|
const cereal::ModelDataV2::Reader &model) {
|
|
UIScene &scene = s->scene;
|
|
auto model_position = model.getPosition();
|
|
float max_distance = std::clamp(*(model_position.getX().end() - 1),
|
|
MIN_DRAW_DISTANCE, MAX_DRAW_DISTANCE);
|
|
|
|
// update lane lines
|
|
const auto lane_lines = model.getLaneLines();
|
|
const auto lane_line_probs = model.getLaneLineProbs();
|
|
int max_idx = get_path_length_idx(lane_lines[0], max_distance);
|
|
for (int i = 0; i < std::size(scene.lane_line_vertices); i++) {
|
|
scene.lane_line_probs[i] = lane_line_probs[i];
|
|
update_line_data(s, lane_lines[i], 0.025 * scene.lane_line_probs[i], 0, &scene.lane_line_vertices[i], max_idx);
|
|
}
|
|
|
|
// update road edges
|
|
const auto road_edges = model.getRoadEdges();
|
|
const auto road_edge_stds = model.getRoadEdgeStds();
|
|
for (int i = 0; i < std::size(scene.road_edge_vertices); i++) {
|
|
scene.road_edge_stds[i] = road_edge_stds[i];
|
|
update_line_data(s, road_edges[i], 0.025, 0, &scene.road_edge_vertices[i], max_idx);
|
|
}
|
|
|
|
// update path
|
|
auto lead_one = (*s->sm)["radarState"].getRadarState().getLeadOne();
|
|
if (lead_one.getStatus()) {
|
|
const float lead_d = lead_one.getDRel() * 2.;
|
|
max_distance = std::clamp((float)(lead_d - fmin(lead_d * 0.35, 10.)), 0.0f, max_distance);
|
|
}
|
|
max_idx = get_path_length_idx(model_position, max_distance);
|
|
update_line_data(s, model_position, 0.9, 1.22, &scene.track_vertices, max_idx, false);
|
|
}
|
|
|
|
static void update_sockets(UIState *s) {
|
|
s->sm->update(0);
|
|
}
|
|
|
|
static void update_state(UIState *s) {
|
|
SubMaster &sm = *(s->sm);
|
|
UIScene &scene = s->scene;
|
|
|
|
if (sm.updated("liveCalibration")) {
|
|
auto list2rot = [](const capnp::List<float>::Reader &rpy_list) ->Eigen::Matrix3f {
|
|
return euler2rot({rpy_list[0], rpy_list[1], rpy_list[2]}).cast<float>();
|
|
};
|
|
|
|
auto live_calib = sm["liveCalibration"].getLiveCalibration();
|
|
if (live_calib.getCalStatus() == cereal::LiveCalibrationData::Status::CALIBRATED) {
|
|
auto device_from_calib = list2rot(live_calib.getRpyCalib());
|
|
auto wide_from_device = list2rot(live_calib.getWideFromDeviceEuler());
|
|
s->scene.view_from_calib = VIEW_FROM_DEVICE * device_from_calib;
|
|
s->scene.view_from_wide_calib = VIEW_FROM_DEVICE * wide_from_device * device_from_calib;
|
|
} else {
|
|
s->scene.view_from_calib = s->scene.view_from_wide_calib = VIEW_FROM_DEVICE;
|
|
}
|
|
}
|
|
if (sm.updated("pandaStates")) {
|
|
auto pandaStates = sm["pandaStates"].getPandaStates();
|
|
if (pandaStates.size() > 0) {
|
|
scene.pandaType = pandaStates[0].getPandaType();
|
|
|
|
if (scene.pandaType != cereal::PandaState::PandaType::UNKNOWN) {
|
|
scene.ignition = false;
|
|
for (const auto& pandaState : pandaStates) {
|
|
scene.ignition |= pandaState.getIgnitionLine() || pandaState.getIgnitionCan();
|
|
}
|
|
}
|
|
}
|
|
} else if ((s->sm->frame - s->sm->rcv_frame("pandaStates")) > 5*UI_FREQ) {
|
|
scene.pandaType = cereal::PandaState::PandaType::UNKNOWN;
|
|
}
|
|
if (sm.updated("carParams")) {
|
|
scene.longitudinal_control = sm["carParams"].getCarParams().getOpenpilotLongitudinalControl();
|
|
}
|
|
if (sm.updated("wideRoadCameraState")) {
|
|
auto cam_state = sm["wideRoadCameraState"].getWideRoadCameraState();
|
|
float scale = (cam_state.getSensor() == cereal::FrameData::ImageSensor::AR0231) ? 6.0f : 1.0f;
|
|
scene.light_sensor = std::max(100.0f - scale * cam_state.getExposureValPercent(), 0.0f);
|
|
} else if (!sm.allAliveAndValid({"wideRoadCameraState"})) {
|
|
scene.light_sensor = -1;
|
|
}
|
|
scene.started = sm["deviceState"].getDeviceState().getStarted() && scene.ignition;
|
|
|
|
scene.world_objects_visible = scene.world_objects_visible ||
|
|
(scene.started &&
|
|
sm.rcv_frame("liveCalibration") > scene.started_frame &&
|
|
sm.rcv_frame("modelV2") > scene.started_frame);
|
|
}
|
|
|
|
void ui_update_params(UIState *s) {
|
|
auto params = Params();
|
|
s->scene.is_metric = params.getBool("IsMetric");
|
|
}
|
|
|
|
void UIState::updateStatus() {
|
|
if (scene.started && sm->updated("selfdriveState")) {
|
|
auto ss = (*sm)["selfdriveState"].getSelfdriveState();
|
|
auto state = ss .getState();
|
|
if (state == cereal::SelfdriveState::OpenpilotState::PRE_ENABLED || state == cereal::SelfdriveState::OpenpilotState::OVERRIDING) {
|
|
status = STATUS_OVERRIDE;
|
|
} else {
|
|
status = ss.getEnabled() ? STATUS_ENGAGED : STATUS_DISENGAGED;
|
|
}
|
|
}
|
|
|
|
// Handle onroad/offroad transition
|
|
if (scene.started != started_prev || sm->frame == 1) {
|
|
if (scene.started) {
|
|
status = STATUS_DISENGAGED;
|
|
scene.started_frame = sm->frame;
|
|
}
|
|
started_prev = scene.started;
|
|
scene.world_objects_visible = false;
|
|
emit offroadTransition(!scene.started);
|
|
}
|
|
}
|
|
|
|
UIState::UIState(QObject *parent) : QObject(parent) {
|
|
sm = std::make_unique<SubMaster>(std::vector<const char*>{
|
|
"modelV2", "controlsState", "liveCalibration", "radarState", "deviceState",
|
|
"pandaStates", "carParams", "driverMonitoringState", "carState", "driverStateV2",
|
|
"wideRoadCameraState", "managerState", "selfdriveState",
|
|
});
|
|
prime_state = new PrimeState(this);
|
|
language = QString::fromStdString(Params().get("LanguageSetting"));
|
|
|
|
// update timer
|
|
timer = new QTimer(this);
|
|
QObject::connect(timer, &QTimer::timeout, this, &UIState::update);
|
|
timer->start(1000 / UI_FREQ);
|
|
}
|
|
|
|
void UIState::update() {
|
|
update_sockets(this);
|
|
update_state(this);
|
|
updateStatus();
|
|
|
|
if (sm->frame % UI_FREQ == 0) {
|
|
watchdog_kick(nanos_since_boot());
|
|
}
|
|
emit uiUpdate(*this);
|
|
}
|
|
|
|
Device::Device(QObject *parent) : brightness_filter(BACKLIGHT_OFFROAD, BACKLIGHT_TS, BACKLIGHT_DT), QObject(parent) {
|
|
setAwake(true);
|
|
resetInteractiveTimeout();
|
|
|
|
QObject::connect(uiState(), &UIState::uiUpdate, this, &Device::update);
|
|
}
|
|
|
|
void Device::update(const UIState &s) {
|
|
updateBrightness(s);
|
|
updateWakefulness(s);
|
|
}
|
|
|
|
void Device::setAwake(bool on) {
|
|
if (on != awake) {
|
|
awake = on;
|
|
Hardware::set_display_power(awake);
|
|
LOGD("setting display power %d", awake);
|
|
emit displayPowerChanged(awake);
|
|
}
|
|
}
|
|
|
|
void Device::resetInteractiveTimeout(int timeout) {
|
|
if (timeout == -1) {
|
|
timeout = (ignition_on ? 10 : 30);
|
|
}
|
|
interactive_timeout = timeout * UI_FREQ;
|
|
}
|
|
|
|
void Device::updateBrightness(const UIState &s) {
|
|
float clipped_brightness = offroad_brightness;
|
|
if (s.scene.started && s.scene.light_sensor > 0) {
|
|
clipped_brightness = s.scene.light_sensor;
|
|
|
|
// CIE 1931 - https://www.photonstophotos.net/GeneralTopics/Exposure/Psychometric_Lightness_and_Gamma.htm
|
|
if (clipped_brightness <= 8) {
|
|
clipped_brightness = (clipped_brightness / 903.3);
|
|
} else {
|
|
clipped_brightness = std::pow((clipped_brightness + 16.0) / 116.0, 3.0);
|
|
}
|
|
|
|
// Scale back to 10% to 100%
|
|
clipped_brightness = std::clamp(100.0f * clipped_brightness, 10.0f, 100.0f);
|
|
}
|
|
|
|
int brightness = brightness_filter.update(clipped_brightness);
|
|
if (!awake) {
|
|
brightness = 0;
|
|
}
|
|
|
|
if (brightness != last_brightness) {
|
|
if (!brightness_future.isRunning()) {
|
|
brightness_future = QtConcurrent::run(Hardware::set_brightness, brightness);
|
|
last_brightness = brightness;
|
|
}
|
|
}
|
|
}
|
|
|
|
void Device::updateWakefulness(const UIState &s) {
|
|
bool ignition_just_turned_off = !s.scene.ignition && ignition_on;
|
|
ignition_on = s.scene.ignition;
|
|
|
|
if (ignition_just_turned_off) {
|
|
resetInteractiveTimeout();
|
|
} else if (interactive_timeout > 0 && --interactive_timeout == 0) {
|
|
emit interactiveTimeout();
|
|
}
|
|
|
|
setAwake(s.scene.ignition || interactive_timeout > 0);
|
|
}
|
|
|
|
UIState *uiState() {
|
|
static UIState ui_state;
|
|
return &ui_state;
|
|
}
|
|
|
|
Device *device() {
|
|
static Device _device;
|
|
return &_device;
|
|
}
|
|
|