cabana: Improve seeking and zooming (#32334)

* Improve seeking and zooming

* No repeated calculation of freq

* set min zoom seconds to 10ms
pull/32340/head
Dean Lee 12 months ago committed by GitHub
parent d7d3111212
commit d72f000d98
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 3
      tools/cabana/chart/chart.cc
  2. 3
      tools/cabana/chart/chartswidget.cc
  3. 5
      tools/cabana/streams/abstractstream.cc
  4. 3
      tools/cabana/streams/abstractstream.h
  5. 10
      tools/cabana/streams/replaystream.cc
  6. 2
      tools/cabana/streams/replaystream.h
  7. 46
      tools/replay/camera.cc
  8. 6
      tools/replay/camera.h

@ -22,6 +22,7 @@
// ChartAxisElement's padding is 4 (https://codebrowser.dev/qt5/qtcharts/src/charts/axis/chartaxiselement_p.h.html)
const int AXIS_X_TOP_MARGIN = 4;
const double MIN_ZOOM_SECONDS = 0.01; // 10ms
// Define a small value of epsilon to compare double values
const float EPSILON = 0.000001;
static inline bool xLessThan(const QPointF &p, float x) { return p.x() < (x - EPSILON); }
@ -511,7 +512,7 @@ void ChartView::mouseReleaseEvent(QMouseEvent *event) {
if (rubber->width() <= 0) {
// no rubber dragged, seek to mouse position
can->seekTo(min);
} else if (rubber->width() > 10 && (max - min) > 0.01) { // Minimum range is 10 milliseconds.
} else if (rubber->width() > 10 && (max - min) > MIN_ZOOM_SECONDS) {
charts_widget->zoom_undo_stack->push(new ZoomCommand(charts_widget, {min, max}));
} else {
viewport()->update();

@ -197,7 +197,8 @@ void ChartsWidget::updateState() {
display_range.second = display_range.first + max_chart_range;
} else if (cur_sec < (zoomed_range.first - 0.1) || cur_sec >= zoomed_range.second) {
// loop in zoomed range
can->seekTo(zoomed_range.first);
QTimer::singleShot(0, [ts = zoomed_range.first]() { can->seekTo(ts);});
return;
}
const auto &range = is_zoomed ? zoomed_range : display_range;

@ -138,13 +138,16 @@ void AbstractStream::updateLastMsgsTo(double sec) {
auto prev = std::prev(it);
double ts = (*prev)->mono_time / 1e9 - routeStartTime();
auto &m = msgs[id];
double freq = 0;
// Keep suppressed bits.
if (auto old_m = messages_.find(id); old_m != messages_.end()) {
freq = old_m->second.freq;
m.last_changes.reserve(old_m->second.last_changes.size());
std::transform(old_m->second.last_changes.cbegin(), old_m->second.last_changes.cend(),
std::back_inserter(m.last_changes),
[](const auto &change) { return CanData::ByteLastChange{.suppressed = change.suppressed}; });
}
m.compute(id, (*prev)->dat, (*prev)->size, ts, getSpeed(), {});
m.compute(id, (*prev)->dat, (*prev)->size, ts, getSpeed(), {}, freq);
m.count = std::distance(ev.begin(), prev) + 1;
}
}

@ -90,6 +90,7 @@ public:
signals:
void paused();
void resume();
void seekingTo(double sec);
void seekedTo(double sec);
void streamStarted();
void eventsMerged(const MessageEventsMap &events_map);
@ -107,6 +108,7 @@ protected:
uint64_t lastEventMonoTime() const { return lastest_event_ts; }
std::vector<const CanEvent *> all_events_;
double current_sec_ = 0;
uint64_t lastest_event_ts = 0;
private:
@ -114,7 +116,6 @@ private:
void updateLastMsgsTo(double sec);
void updateMasks();
double current_sec_ = 0;
MessageEventsMap events_;
std::unordered_map<MessageId, CanData> last_msgs;
std::unique_ptr<MonotonicBuffer> event_buffer_;

@ -84,6 +84,16 @@ bool ReplayStream::eventFilter(const Event *event) {
return true;
}
void ReplayStream::seekTo(double ts) {
// Update timestamp and notify receivers of the time change.
current_sec_ = ts;
std::set<MessageId> new_msgs;
msgsReceived(&new_msgs, false);
// Seek to the specified timestamp
replay->seekTo(std::max(double(0), ts), false);
}
void ReplayStream::pause(bool pause) {
replay->pause(pause);
emit(pause ? paused() : resume());

@ -18,7 +18,7 @@ public:
void start() override;
bool loadRoute(const QString &route, const QString &data_dir, uint32_t replay_flags = REPLAY_FLAG_NONE);
bool eventFilter(const Event *event);
void seekTo(double ts) override { replay->seekTo(std::max(double(0), ts), false); }
void seekTo(double ts) override;
bool liveStreaming() const override { return false; }
inline QString routeName() const override { return replay->route()->name(); }
inline QString carFingerprint() const override { return replay->carFingerprint().c_str(); }

@ -1,12 +1,13 @@
#include "tools/replay/camera.h"
#include <capnp/dynamic.h>
#include <cassert>
#include "third_party/linux/include/msm_media_info.h"
#include "tools/replay/util.h"
const int BUFFER_COUNT = 40;
std::tuple<size_t, size_t, size_t> get_nv12_info(int width, int height) {
int nv12_width = VENUS_Y_STRIDE(COLOR_FMT_NV12, width);
int nv12_height = VENUS_Y_SCANLINES(COLOR_FMT_NV12, height);
@ -36,10 +37,12 @@ CameraServer::~CameraServer() {
void CameraServer::startVipcServer() {
vipc_server_.reset(new VisionIpcServer("camerad"));
for (auto &cam : cameras_) {
cam.cached_buf.clear();
if (cam.width > 0 && cam.height > 0) {
rInfo("camera[%d] frame size %dx%d", cam.type, cam.width, cam.height);
auto [nv12_width, nv12_height, nv12_buffer_size] = get_nv12_info(cam.width, cam.height);
vipc_server_->create_buffers_with_sizes(cam.stream_type, YUV_BUFFER_COUNT, false, cam.width, cam.height,
vipc_server_->create_buffers_with_sizes(cam.stream_type, BUFFER_COUNT, false, cam.width, cam.height,
nv12_buffer_size, nv12_width, nv12_width * nv12_height);
if (!cam.thread.joinable()) {
cam.thread = std::thread(&CameraServer::cameraThread, this, std::ref(cam));
@ -50,13 +53,6 @@ void CameraServer::startVipcServer() {
}
void CameraServer::cameraThread(Camera &cam) {
auto read_frame = [&](FrameReader *fr, int frame_id) {
VisionBuf *yuv_buf = vipc_server_->get_buffer(cam.stream_type);
assert(yuv_buf);
bool ret = fr->get(frame_id, yuv_buf);
return ret ? yuv_buf : nullptr;
};
while (true) {
const auto [fr, event] = cam.queue.pop();
if (!fr) break;
@ -66,29 +62,41 @@ void CameraServer::cameraThread(Camera &cam) {
auto eidx = capnp::AnyStruct::Reader(evt).getPointerSection()[0].getAs<cereal::EncodeIndex>();
if (eidx.getType() != cereal::EncodeIndex::Type::FULL_H_E_V_C) continue;
const int id = eidx.getSegmentId();
bool prefetched = (id == cam.cached_id && eidx.getSegmentNum() == cam.cached_seg);
auto yuv = prefetched ? cam.cached_buf : read_frame(fr, id);
if (yuv) {
int segment_id = eidx.getSegmentId();
uint32_t frame_id = eidx.getFrameId();
if (auto yuv = getFrame(cam, fr, segment_id, frame_id)) {
VisionIpcBufExtra extra = {
.frame_id = eidx.getFrameId(),
.frame_id = frame_id,
.timestamp_sof = eidx.getTimestampSof(),
.timestamp_eof = eidx.getTimestampEof(),
};
yuv->set_frame_id(eidx.getFrameId());
vipc_server_->send(yuv, &extra);
} else {
rError("camera[%d] failed to get frame: %lu", cam.type, eidx.getSegmentId());
rError("camera[%d] failed to get frame: %lu", cam.type, segment_id);
}
cam.cached_id = id + 1;
cam.cached_seg = eidx.getSegmentNum();
cam.cached_buf = read_frame(fr, cam.cached_id);
// Prefetch the next frame
getFrame(cam, fr, segment_id + 1, frame_id + 1);
--publishing_;
}
}
VisionBuf *CameraServer::getFrame(Camera &cam, FrameReader *fr, int32_t segment_id, uint32_t frame_id) {
// Check if the frame is cached
auto buf_it = std::find_if(cam.cached_buf.begin(), cam.cached_buf.end(),
[frame_id](VisionBuf *buf) { return buf->get_frame_id() == frame_id; });
if (buf_it != cam.cached_buf.end()) return *buf_it;
VisionBuf *yuv_buf = vipc_server_->get_buffer(cam.stream_type);
if (fr->get(segment_id, yuv_buf)) {
yuv_buf->set_frame_id(frame_id);
cam.cached_buf.insert(yuv_buf);
return yuv_buf;
}
return nullptr;
}
void CameraServer::pushFrame(CameraType type, FrameReader *fr, const Event *event) {
auto &cam = cameras_[type];
if (cam.width != fr->width || cam.height != fr->height) {

@ -1,6 +1,7 @@
#pragma once
#include <memory>
#include <set>
#include <tuple>
#include <utility>
@ -26,12 +27,11 @@ protected:
int height;
std::thread thread;
SafeQueue<std::pair<FrameReader*, const Event *>> queue;
int cached_id = -1;
int cached_seg = -1;
VisionBuf * cached_buf;
std::set<VisionBuf *> cached_buf;
};
void startVipcServer();
void cameraThread(Camera &cam);
VisionBuf *getFrame(Camera &cam, FrameReader *fr, int32_t segment_id, uint32_t frame_id);
Camera cameras_[MAX_CAMERAS] = {
{.type = RoadCam, .stream_type = VISION_STREAM_ROAD},

Loading…
Cancel
Save