|
|
@ -2,31 +2,18 @@ |
|
|
|
|
|
|
|
|
|
|
|
#include <unistd.h> |
|
|
|
#include <unistd.h> |
|
|
|
#include <cassert> |
|
|
|
#include <cassert> |
|
|
|
#include <string.h> |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#include <libyuv.h> |
|
|
|
#include <capnp/dynamic.h> |
|
|
|
#include "messaging.hpp" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#include "messaging.hpp" |
|
|
|
#include "common/util.h" |
|
|
|
#include "common/util.h" |
|
|
|
#include "common/timing.h" |
|
|
|
|
|
|
|
#include "common/swaglog.h" |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
extern "C" { |
|
|
|
|
|
|
|
#include <libavcodec/avcodec.h> |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
extern ExitHandler do_exit; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
#define FRAME_WIDTH 1164 |
|
|
|
#define FRAME_WIDTH 1164 |
|
|
|
#define FRAME_HEIGHT 874 |
|
|
|
#define FRAME_HEIGHT 874 |
|
|
|
|
|
|
|
|
|
|
|
namespace { |
|
|
|
extern ExitHandler do_exit; |
|
|
|
void camera_open(CameraState *s, bool rear) { |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void camera_close(CameraState *s) { |
|
|
|
namespace { |
|
|
|
// empty
|
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void camera_init(VisionIpcServer * v, CameraState *s, int camera_id, unsigned int fps, cl_device_id device_id, cl_context ctx, VisionStreamType rgb_type, VisionStreamType yuv_type) { |
|
|
|
void camera_init(VisionIpcServer * v, CameraState *s, int camera_id, unsigned int fps, cl_device_id device_id, cl_context ctx, VisionStreamType rgb_type, VisionStreamType yuv_type) { |
|
|
|
assert(camera_id < ARRAYSIZE(cameras_supported)); |
|
|
|
assert(camera_id < ARRAYSIZE(cameras_supported)); |
|
|
@ -37,38 +24,36 @@ void camera_init(VisionIpcServer * v, CameraState *s, int camera_id, unsigned in |
|
|
|
s->buf.init(device_id, ctx, s, v, FRAME_BUF_COUNT, rgb_type, yuv_type); |
|
|
|
s->buf.init(device_id, ctx, s, v, FRAME_BUF_COUNT, rgb_type, yuv_type); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
void run_frame_stream(MultiCameraState *s) { |
|
|
|
void run_frame_stream(CameraState &camera, const char* frame_pkt) { |
|
|
|
s->sm = new SubMaster({"frame"}); |
|
|
|
SubMaster sm({frame_pkt}); |
|
|
|
|
|
|
|
|
|
|
|
CameraState *const rear_camera = &s->rear; |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
size_t buf_idx = 0; |
|
|
|
size_t buf_idx = 0; |
|
|
|
while (!do_exit) { |
|
|
|
while (!do_exit) { |
|
|
|
if (s->sm->update(1000) == 0) continue; |
|
|
|
if (sm.update(1000) == 0) continue; |
|
|
|
|
|
|
|
|
|
|
|
auto frame = (*(s->sm))["frame"].getFrame(); |
|
|
|
auto msg = static_cast<capnp::DynamicStruct::Reader>(sm[frame_pkt]); |
|
|
|
rear_camera->buf.camera_bufs_metadata[buf_idx] = { |
|
|
|
auto frame = msg.get(frame_pkt).as<capnp::DynamicStruct>(); |
|
|
|
.frame_id = frame.getFrameId(), |
|
|
|
camera.buf.camera_bufs_metadata[buf_idx] = { |
|
|
|
.timestamp_eof = frame.getTimestampEof(), |
|
|
|
.frame_id = frame.get("frameId").as<uint32_t>(), |
|
|
|
.frame_length = static_cast<unsigned>(frame.getFrameLength()), |
|
|
|
.timestamp_eof = frame.get("timestampEof").as<uint64_t>(), |
|
|
|
.integ_lines = static_cast<unsigned>(frame.getIntegLines()), |
|
|
|
.frame_length = frame.get("frameLength").as<unsigned>(), |
|
|
|
.global_gain = static_cast<unsigned>(frame.getGlobalGain()), |
|
|
|
.integ_lines = frame.get("integLines").as<unsigned>(), |
|
|
|
|
|
|
|
.global_gain = frame.get("globalGain").as<unsigned>(), |
|
|
|
}; |
|
|
|
}; |
|
|
|
|
|
|
|
|
|
|
|
cl_command_queue q = rear_camera->buf.camera_bufs[buf_idx].copy_q; |
|
|
|
cl_command_queue q = camera.buf.camera_bufs[buf_idx].copy_q; |
|
|
|
cl_mem yuv_cl = rear_camera->buf.camera_bufs[buf_idx].buf_cl; |
|
|
|
cl_mem yuv_cl = camera.buf.camera_bufs[buf_idx].buf_cl; |
|
|
|
|
|
|
|
|
|
|
|
clEnqueueWriteBuffer(q, yuv_cl, CL_TRUE, 0, frame.getImage().size(), frame.getImage().begin(), 0, NULL, NULL); |
|
|
|
auto image = frame.get("image").as<capnp::Data>(); |
|
|
|
rear_camera->buf.queue(buf_idx); |
|
|
|
clEnqueueWriteBuffer(q, yuv_cl, CL_TRUE, 0, image.size(), image.begin(), 0, NULL, NULL); |
|
|
|
|
|
|
|
camera.buf.queue(buf_idx); |
|
|
|
buf_idx = (buf_idx + 1) % FRAME_BUF_COUNT; |
|
|
|
buf_idx = (buf_idx + 1) % FRAME_BUF_COUNT; |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
delete s->sm; |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
} // namespace
|
|
|
|
} // namespace
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// TODO: make this more generic
|
|
|
|
CameraInfo cameras_supported[CAMERA_ID_MAX] = { |
|
|
|
CameraInfo cameras_supported[CAMERA_ID_MAX] = { |
|
|
|
[CAMERA_ID_IMX298] = { |
|
|
|
[CAMERA_ID_IMX298] = { |
|
|
|
.frame_width = FRAME_WIDTH, |
|
|
|
.frame_width = FRAME_WIDTH, |
|
|
@ -94,29 +79,14 @@ void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_i |
|
|
|
VISION_STREAM_RGB_FRONT, VISION_STREAM_YUV_FRONT); |
|
|
|
VISION_STREAM_RGB_FRONT, VISION_STREAM_YUV_FRONT); |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void cameras_open(MultiCameraState *s) {} |
|
|
|
|
|
|
|
void cameras_close(MultiCameraState *s) {} |
|
|
|
void camera_autoexposure(CameraState *s, float grey_frac) {} |
|
|
|
void camera_autoexposure(CameraState *s, float grey_frac) {} |
|
|
|
|
|
|
|
void camera_process_rear(MultiCameraState *s, CameraState *c, int cnt) {} |
|
|
|
void cameras_open(MultiCameraState *s) { |
|
|
|
|
|
|
|
// LOG("*** open front ***");
|
|
|
|
|
|
|
|
camera_open(&s->front, false); |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// LOG("*** open rear ***");
|
|
|
|
|
|
|
|
camera_open(&s->rear, true); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void cameras_close(MultiCameraState *s) { |
|
|
|
|
|
|
|
camera_close(&s->rear); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// called by processing_thread
|
|
|
|
|
|
|
|
void camera_process_rear(MultiCameraState *s, CameraState *c, int cnt) { |
|
|
|
|
|
|
|
// empty
|
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
void cameras_run(MultiCameraState *s) { |
|
|
|
void cameras_run(MultiCameraState *s) { |
|
|
|
std::thread t = start_process_thread(s, "processing", &s->rear, camera_process_rear); |
|
|
|
std::thread t = start_process_thread(s, "processing", &s->rear, camera_process_rear); |
|
|
|
set_thread_name("frame_streaming"); |
|
|
|
set_thread_name("frame_streaming"); |
|
|
|
run_frame_stream(s); |
|
|
|
run_frame_stream(s->rear, "frame"); |
|
|
|
t.join(); |
|
|
|
t.join(); |
|
|
|
cameras_close(s); |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|