separate tool, super hack does not work yet

pull/34892/head
Trey Moen 1 month ago
parent ed54e9fbd9
commit 93873316c1
  1. 3
      SConstruct
  2. 3
      selfdrive/ui/qt/onroad/annotated_camera.cc
  3. 2
      selfdrive/ui/qt/onroad/onroad_home.cc
  4. 3
      selfdrive/ui/ui.cc
  5. 21
      tools/clip/SConscript
  6. 56
      tools/clip/application.cc
  7. 18
      tools/clip/application.h
  8. BIN
      tools/clip/clip
  9. 42
      tools/clip/main.cc
  10. 7
      tools/clip/recorder/ffmpeg.cc
  11. 0
      tools/clip/recorder/ffmpeg.h
  12. 125
      tools/clip/recorder/moc_widget.cc
  13. 7
      tools/clip/recorder/widget.cc
  14. 0
      tools/clip/recorder/widget.h
  15. 2
      tools/replay/SConscript
  16. 51
      tools/replay/clip/application.cc
  17. 9
      tools/replay/clip/application.h
  18. 8
      tools/replay/main.cc
  19. 44
      tools/replay/replay.cc
  20. 4
      tools/replay/replay.h

@ -168,7 +168,7 @@ env = Environment(
CCFLAGS=[
"-g",
"-fPIC",
"-O2",
"-O0",
"-Wunused",
"-Werror",
"-Wshadow",
@ -373,6 +373,7 @@ SConscript(['selfdrive/SConscript'])
if Dir('#tools/cabana/').exists() and GetOption('extras'):
SConscript(['tools/replay/SConscript'])
SConscript(['tools/clip/SConscript'])
if arch != "larch64":
SConscript(['tools/cabana/SConscript'])

@ -91,6 +91,7 @@ mat4 AnnotatedCameraWidget::calcFrameMatrix() {
void AnnotatedCameraWidget::paintGL() {
UIState *s = uiState();
SubMaster &sm = *(s->sm);
sm.update(0);
const double start_draw_t = millis_since_boot();
// draw camera frame
@ -131,7 +132,7 @@ void AnnotatedCameraWidget::paintGL() {
painter.setRenderHint(QPainter::Antialiasing);
painter.setPen(Qt::NoPen);
// model.draw(painter, rect());
model.draw(painter, rect());
dmon.draw(painter, rect());
hud.updateState(*s);
hud.draw(painter, rect());

@ -44,6 +44,8 @@ void OnroadWindow::updateState(const UIState &s) {
return;
}
fprintf(stderr, "updating\n");
emit drewOnroadFrame(new QImage(grab().toImage()));
alerts->updateState(s);

@ -59,8 +59,10 @@ static void update_state(UIState *s) {
} else if (!sm.allAliveAndValid({"wideRoadCameraState"})) {
scene.light_sensor = -1;
}
if (sm.updated("deviceState")) {
scene.started = sm["deviceState"].getDeviceState().getStarted() && scene.ignition;
}
}
void ui_update_params(UIState *s) {
auto params = Params();
@ -105,6 +107,7 @@ UIState::UIState(QObject *parent) : QObject(parent) {
}
void UIState::update() {
fprintf(stderr, "updatng ui state\n");
update_sockets(this);
update_state(this);
updateStatus();

@ -0,0 +1,21 @@
Import('qt_env', 'ui_libs', 'qt_libs', 'qt_ui', 'replay_lib', 'asset_obj', 'arch', 'common', 'messaging', 'visionipc', 'cereal')
clip_env = qt_env.Clone()
clip_env['CCFLAGS'] += ['-Wno-deprecated-declarations']
base_frameworks = clip_env["FRAMEWORKS"]
base_libs = clip_env["LIBS"]
if arch == "Darwin":
base_frameworks.append('OpenCL')
else:
base_libs.append('OpenCL')
if arch == 'larch64':
base_libs.append('EGL')
clip_lib_src = ["application.cc", "recorder/ffmpeg.cc", "recorder/widget.cc"]
clip_lib = clip_env.Library("clip", clip_lib_src, LIBS=base_libs, FRAMEWORKS=base_frameworks)
Export('clip_lib')
clip_libs = [clip_lib, replay_lib, cereal, 'bz2', 'zstd', 'curl', 'yuv', 'ncurses'] + base_libs + qt_libs + ui_libs + qt_ui
clip_env.Program("clip", ["main.cc", asset_obj], LIBS=clip_libs, FRAMEWORKS=base_frameworks)

@ -0,0 +1,56 @@
#include "tools/clip/application.h"
#include <QApplication>
#include <QTranslator>
#include <selfdrive/ui/qt/util.h>
#include <selfdrive/ui/qt/window.h>
#include "recorder/widget.h"
Application::Application(int argc, char *argv[]) {
initApp(argc, argv);
app = new QApplication(argc, argv);
QString outputFile = "/Users/trey/Desktop/out.mp4";
QTranslator translator;
QString translation_file = QString::fromStdString(Params().get("LanguageSetting"));
if (!translator.load(QString(":/%1").arg(translation_file)) && translation_file.length()) {
qCritical() << "Failed to load translation file:" << translation_file;
}
app->installTranslator(&translator);
window = new OnroadWindow();
recorderThread = new QThread;
recorder = new Recorder;
recorder->moveToThread(recorderThread);
QObject::connect(recorderThread, &QThread::finished, recorder, &QObject::deleteLater);
recorderThread->start();
QObject::connect(window, &OnroadWindow::drewOnroadFrame, recorder, &Recorder::saveFrame, Qt::QueuedConnection);
window->setAttribute(Qt::WA_DontShowOnScreen);
window->setAttribute(Qt::WA_Mapped);
window->setAttribute(Qt::WA_NoSystemBackground);
}
void Application::close() const {
recorderThread->quit();
app->quit();
}
Application::~Application() {
delete recorder;
delete recorderThread;
delete window;
delete app;
}
int Application::exec() const {
setMainWindow(window);
return app->exec();
}

@ -0,0 +1,18 @@
#pragma once
#include <selfdrive/ui/qt/onroad/onroad_home.h>
#include "recorder/widget.h"
class Application {
public:
Application(int argc, char* argv[]);
~Application();
int exec() const;
void close() const;
private:
QApplication *app;
QThread *recorderThread = nullptr;
Recorder *recorder = nullptr;
OnroadWindow *window;
};

Binary file not shown.

@ -0,0 +1,42 @@
#include <iostream>
#include <selfdrive/ui/ui.h>
#include "application.h"
#include "tools/replay/replay.h"
void startReplayThread() {
std::vector<std::string> allow = (std::vector<std::string>{
"modelV2", "controlsState", "liveCalibration", "radarState", "deviceState",
"pandaStates", "carParams", "driverMonitoringState", "carState", "driverStateV2",
"wideRoadCameraState", "managerState", "selfdriveState", "longitudinalPlan",
});
std::vector<std::string> block;
Replay replay("a2a0ccea32023010|2023-07-27--13-01-19", allow, block);
if (!replay.load()) {
return;
}
std::cout << "Replay started." << std::endl;
replay.setEndSeconds(66);
replay.start(60);
replay.waitUntilEnd();
std::cout << "Replay ended." << std::endl;
raise(SIGINT);
}
int main(int argc, char *argv[]) {
Application a(argc, argv);
std::thread thread(startReplayThread);
std::this_thread::sleep_for(std::chrono::milliseconds(2000));
if (a.exec()) {
std::cerr << "Failed to start app." << std::endl;
}
thread.join();
a.close();
return 0;
}

@ -1,4 +1,4 @@
#include "tools/replay/clip/recorder/ffmpeg.h"
#include "tools/clip/recorder/ffmpeg.h"
#include <QDebug>
FFmpegEncoder::FFmpegEncoder(const QString& outputFile, int width, int height, int fps) {
@ -101,7 +101,7 @@ FFmpegEncoder::FFmpegEncoder(const QString& outputFile, int width, int height, i
FFmpegEncoder::~FFmpegEncoder() {
if (initialized) {
encodeFrame(nullptr); // Flush encoder
// encodeFrame(nullptr); // Flush encoder
av_write_trailer(format_ctx);
if (!(format_ctx->oformat->flags & AVFMT_NOFILE) && format_ctx->pb) {
avio_closep(&format_ctx->pb);
@ -139,7 +139,8 @@ bool FFmpegEncoder::encodeFrame(AVFrame* input_frame) {
ret = avcodec_receive_packet(codec_ctx, packet);
if (ret == AVERROR(EAGAIN) || ret == AVERROR_EOF) {
break;
} else if (ret < 0) {
}
if (ret < 0) {
fprintf(stderr, "Error receiving packet: %d\n", ret);
return false;
}

@ -0,0 +1,125 @@
/****************************************************************************
** Meta object code from reading C++ file 'widget.h'
**
** Created by: The Qt Meta Object Compiler version 67 (Qt 5.15.16)
**
** WARNING! All changes made in this file will be lost!
*****************************************************************************/
#include <memory>
#include "widget.h"
#include <QtCore/qbytearray.h>
#include <QtCore/qmetatype.h>
#if !defined(Q_MOC_OUTPUT_REVISION)
#error "The header file 'widget.h' doesn't include <QObject>."
#elif Q_MOC_OUTPUT_REVISION != 67
#error "This file was generated using the moc from 5.15.16. It"
#error "cannot be used with the include files from this version of Qt."
#error "(The moc has changed too much.)"
#endif
QT_BEGIN_MOC_NAMESPACE
QT_WARNING_PUSH
QT_WARNING_DISABLE_DEPRECATED
struct qt_meta_stringdata_Recorder_t {
QByteArrayData data[6];
char stringdata0[39];
};
#define QT_MOC_LITERAL(idx, ofs, len) \
Q_STATIC_BYTE_ARRAY_DATA_HEADER_INITIALIZER_WITH_OFFSET(len, \
qptrdiff(offsetof(qt_meta_stringdata_Recorder_t, stringdata0) + ofs \
- idx * sizeof(QByteArrayData)) \
)
static const qt_meta_stringdata_Recorder_t qt_meta_stringdata_Recorder = {
{
QT_MOC_LITERAL(0, 0, 8), // "Recorder"
QT_MOC_LITERAL(1, 9, 9), // "saveFrame"
QT_MOC_LITERAL(2, 19, 0), // ""
QT_MOC_LITERAL(3, 20, 7), // "QImage*"
QT_MOC_LITERAL(4, 28, 5), // "frame"
QT_MOC_LITERAL(5, 34, 4) // "stop"
},
"Recorder\0saveFrame\0\0QImage*\0frame\0"
"stop"
};
#undef QT_MOC_LITERAL
static const uint qt_meta_data_Recorder[] = {
// content:
8, // revision
0, // classname
0, 0, // classinfo
2, 14, // methods
0, 0, // properties
0, 0, // enums/sets
0, 0, // constructors
0, // flags
0, // signalCount
// slots: name, argc, parameters, tag, flags
1, 1, 24, 2, 0x0a /* Public */,
5, 0, 27, 2, 0x0a /* Public */,
// slots: parameters
QMetaType::Void, 0x80000000 | 3, 4,
QMetaType::Void,
0 // eod
};
void Recorder::qt_static_metacall(QObject *_o, QMetaObject::Call _c, int _id, void **_a)
{
if (_c == QMetaObject::InvokeMetaMethod) {
auto *_t = static_cast<Recorder *>(_o);
(void)_t;
switch (_id) {
case 0: _t->saveFrame((*reinterpret_cast< QImage*(*)>(_a[1]))); break;
case 1: _t->stop(); break;
default: ;
}
}
}
QT_INIT_METAOBJECT const QMetaObject Recorder::staticMetaObject = { {
QMetaObject::SuperData::link<QObject::staticMetaObject>(),
qt_meta_stringdata_Recorder.data,
qt_meta_data_Recorder,
qt_static_metacall,
nullptr,
nullptr
} };
const QMetaObject *Recorder::metaObject() const
{
return QObject::d_ptr->metaObject ? QObject::d_ptr->dynamicMetaObject() : &staticMetaObject;
}
void *Recorder::qt_metacast(const char *_clname)
{
if (!_clname) return nullptr;
if (!strcmp(_clname, qt_meta_stringdata_Recorder.stringdata0))
return static_cast<void*>(this);
return QObject::qt_metacast(_clname);
}
int Recorder::qt_metacall(QMetaObject::Call _c, int _id, void **_a)
{
_id = QObject::qt_metacall(_c, _id, _a);
if (_id < 0)
return _id;
if (_c == QMetaObject::InvokeMetaMethod) {
if (_id < 2)
qt_static_metacall(this, _c, _id, _a);
_id -= 2;
} else if (_c == QMetaObject::RegisterMethodArgumentMetaType) {
if (_id < 2)
*reinterpret_cast<int*>(_a[0]) = -1;
_id -= 2;
}
return _id;
}
QT_WARNING_POP
QT_END_MOC_NAMESPACE

@ -1,13 +1,12 @@
#include "tools/replay/clip/recorder/widget.h"
#include "tools/clip/recorder/widget.h"
#include "tools/replay/clip/recorder/ffmpeg.h"
#include "tools/clip/recorder/ffmpeg.h"
Recorder::Recorder(QObject *parent) : QObject(parent) {
encoder = new FFmpegEncoder("/Users/trey/Desktop/out.mp4", DEVICE_SCREEN_SIZE.width(), DEVICE_SCREEN_SIZE.height(), UI_FREQ);
}
Recorder::~Recorder() {
fprintf(stderr, "closing\n");
delete encoder;
QObject::~QObject();
}
@ -33,7 +32,7 @@ void Recorder::processQueue() {
frame = frameQueue.dequeue();
}
if (!encoder->writeFrame(frame->convertToFormat(QImage::Format_ARGB32))) {
if (!encoder->writeFrame(frame->convertToFormat(QImage::Format_ARGB32_Premultiplied))) {
fprintf(stderr, "did not write\n");
}

@ -15,7 +15,7 @@ if arch == 'larch64':
base_libs.append('EGL')
replay_lib_src = ["replay.cc", "consoleui.cc", "camera.cc", "filereader.cc", "logreader.cc", "framereader.cc",
"route.cc", "util.cc", "seg_mgr.cc", "timeline.cc", "api.cc", "clip/recorder/widget.cc", "clip/recorder/ffmpeg.cc", "clip/application.cc"]
"route.cc", "util.cc", "seg_mgr.cc", "timeline.cc", "api.cc"]
replay_lib = replay_env.Library("replay", replay_lib_src, LIBS=base_libs, FRAMEWORKS=base_frameworks)
Export('replay_lib')
replay_libs = [replay_lib, cereal, 'bz2', 'zstd', 'curl', 'yuv', 'ncurses'] + base_libs + qt_libs + ui_libs + qt_ui

@ -1,51 +0,0 @@
#include "tools/replay/clip/application.h"
#include <QApplication>
#include <QTranslator>
#include <selfdrive/ui/qt/util.h>
#include <selfdrive/ui/qt/window.h>
#include "recorder/widget.h"
Application::Application() {
}
Application::~Application() {
}
int Application::exec(int argc, char *argv[]) {
initApp(argc, argv);
QApplication a(argc, argv);
QString outputFile = "/Users/trey/Desktop/out.mp4";
QTranslator translator;
QString translation_file = QString::fromStdString(Params().get("LanguageSetting"));
if (!translator.load(QString(":/%1").arg(translation_file)) && translation_file.length()) {
qCritical() << "Failed to load translation file:" << translation_file;
}
a.installTranslator(&translator);
OnroadWindow w;
QThread recorderThread;
Recorder recorder;
recorder.moveToThread(&recorderThread);
QObject::connect(&recorderThread, &QThread::finished, &recorder, &QObject::deleteLater);
QObject::connect(&w, &OnroadWindow::drewOnroadFrame, &recorder, &Recorder::saveFrame, Qt::QueuedConnection);
recorderThread.start();
w.setAttribute(Qt::WA_DontShowOnScreen);
w.setAttribute(Qt::WA_Mapped);
w.setAttribute(Qt::WA_NoSystemBackground);
w.resize(DEVICE_SCREEN_SIZE);
setMainWindow(&w);
return a.exec();
}

@ -1,9 +0,0 @@
#pragma once
class Application {
public:
Application();
~Application();
int exec(int argc, char* argv[]);
};

@ -5,7 +5,6 @@
#include <string>
#include <vector>
#include "clip/application.h"
#include "common/prefix.h"
#include "tools/replay/consoleui.h"
#include "tools/replay/replay.h"
@ -154,13 +153,6 @@ int main(int argc, char *argv[]) {
}
replay.start(config.start_seconds);
if (replay.hasFlag(REPLAY_FLAG_CLIP)) {
Application a;
// kick off
return a.exec(argc, argv);
} else {
ConsoleUI console_ui(&replay);
return console_ui.exec();
}
}

@ -78,6 +78,7 @@ bool Replay::load() {
min_seconds_ = seg_mgr_->route_.segments().begin()->first * 60;
max_seconds_ = (seg_mgr_->route_.segments().rbegin()->first + 1) * 60;
end_seconds_ = max_seconds_;
return true;
}
@ -181,6 +182,7 @@ void Replay::startStream(const std::shared_ptr<Segment> segment) {
// write CarParams
it = std::find_if(events.begin(), events.end(), [](const Event &e) { return e.which == cereal::Event::Which::CAR_PARAMS; });
if (it != events.end()) {
publishMessage(&*it);
capnp::FlatArrayMessageReader reader(it->data);
auto event = reader.getRoot<cereal::Event>();
car_fingerprint_ = event.getCarParams().getCarFingerprint();
@ -238,8 +240,9 @@ void Replay::publishFrame(const Event *e) {
default: return; // Invalid event type
}
if ((cam == DriverCam && !hasFlag(REPLAY_FLAG_DCAM)) || (cam == WideRoadCam && !hasFlag(REPLAY_FLAG_ECAM)))
if ((cam == DriverCam && !hasFlag(REPLAY_FLAG_DCAM)) || (cam == WideRoadCam && !hasFlag(REPLAY_FLAG_ECAM))) {
return; // Camera isdisabled
}
auto seg_it = event_data_->segments.find(e->eidx_segnum);
if (seg_it != event_data_->segments.end()) {
@ -334,3 +337,42 @@ std::vector<Event>::const_iterator Replay::publishEvents(std::vector<Event>::con
return first;
}
void Replay::waitUntilEnd() {
std::mutex mutex;
std::condition_variable cv;
std::atomic<bool> reached_end{false};
// Add a temporary callback to check when we've reached the end time
auto original_filter = event_filter_;
event_filter_ = [this, &mutex, &cv, &reached_end, original_filter](const Event* e) {
bool should_filter = original_filter ? original_filter(e) : false;
// Check if we've reached the end seconds
if (currentSeconds() >= end_seconds_) {
std::unique_lock<std::mutex> lock(mutex);
reached_end = true;
cv.notify_one();
}
return should_filter;
};
// If we're already at or past the end, return immediately
if (currentSeconds() >= end_seconds_) {
event_filter_ = original_filter;
return;
}
// Wait until we reach the end or exit
{
std::unique_lock<std::mutex> lock(mutex);
cv.wait(lock, [&reached_end, this]() {
return reached_end || exit_;
});
}
// Restore the original filter
event_filter_ = original_filter;
}

@ -51,6 +51,8 @@ public:
inline double toSeconds(uint64_t mono_time) const { return (mono_time - route_start_ts_) / 1e9; }
inline double minSeconds() const { return min_seconds_; }
inline double maxSeconds() const { return max_seconds_; }
inline double endSeconds() const { return end_seconds_; }
inline void setEndSeconds(double seconds) { end_seconds_ = std::max(0.0, std::min(seconds, max_seconds_)); }
inline void setSpeed(float speed) { speed_ = speed; }
inline float getSpeed() const { return speed_; }
inline const std::string &carFingerprint() const { return car_fingerprint_; }
@ -58,6 +60,7 @@ public:
inline const std::optional<Timeline::Entry> findAlertAtTime(double sec) const { return timeline_.findAlertAtTime(sec); }
const std::shared_ptr<SegmentManager::EventData> getEventData() const { return seg_mgr_->getEventData(); }
void installEventFilter(std::function<bool(const Event *)> filter) { event_filter_ = filter; }
void waitUntilEnd();
// Event callback functions
std::function<void()> onSegmentsMerged = nullptr;
@ -97,6 +100,7 @@ private:
cereal::Event::Which cur_which_ = cereal::Event::Which::INIT_DATA;
double min_seconds_ = 0;
double max_seconds_ = 0;
double end_seconds_ = 0;
SubMaster *sm_ = nullptr;
std::unique_ptr<PubMaster> pm_;
std::vector<const char*> sockets_;

Loading…
Cancel
Save