From 4d924f1b395440441af36b2a6931374a7525431c Mon Sep 17 00:00:00 2001 From: Willem Melching Date: Thu, 14 Jul 2022 19:21:40 +0200 Subject: [PATCH] camerad: cleanup unused RGB code (#25172) * camerad: cleanup unused RGB code * hdr is unused * more cleanup * remove envs * remove from sconsfile * fix docs old-commit-hash: f38204ad260f2af6a01b71191850178d0af03e8e --- cereal | 2 +- common/modeldata.h | 5 +- docs/c_docs.rst | 2 - release/files_common | 5 - system/camerad/SConscript | 2 - system/camerad/cameras/camera_common.cc | 54 +----- system/camerad/cameras/camera_common.h | 16 +- system/camerad/cameras/camera_qcom2.cc | 26 +-- system/camerad/cameras/camera_qcom2.h | 2 +- system/camerad/transforms/rgb_to_yuv.cc | 36 ---- system/camerad/transforms/rgb_to_yuv.h | 14 -- system/camerad/transforms/rgb_to_yuv_test.cc | 191 ------------------- 12 files changed, 20 insertions(+), 335 deletions(-) delete mode 100644 system/camerad/transforms/rgb_to_yuv.cc delete mode 100644 system/camerad/transforms/rgb_to_yuv.h delete mode 100644 system/camerad/transforms/rgb_to_yuv_test.cc diff --git a/cereal b/cereal index cda60ec965..3ed1b8c51a 160000 --- a/cereal +++ b/cereal @@ -1 +1 @@ -Subproject commit cda60ec9652c05de4ccfcad1fae7936e708434a3 +Subproject commit 3ed1b8c51afb616880565e5868806cef82bbc835 diff --git a/common/modeldata.h b/common/modeldata.h index b193841416..e13840d53e 100644 --- a/common/modeldata.h +++ b/common/modeldata.h @@ -34,12 +34,13 @@ const mat3 ecam_intrinsic_matrix = (mat3){{567.0, 0.0, 1928.0 / 2, 0.0, 567.0, 1208.0 / 2, 0.0, 0.0, 1.0}}; -static inline mat3 get_model_yuv_transform(bool bayer = true) { +static inline mat3 get_model_yuv_transform() { float db_s = 1.0; const mat3 transform = (mat3){{ 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0 }}; - return bayer ? transform_scale_buffer(transform, db_s) : transform; + // Can this be removed since scale is 1? + return transform_scale_buffer(transform, db_s); } diff --git a/docs/c_docs.rst b/docs/c_docs.rst index 5638b40bf0..94d0adb560 100644 --- a/docs/c_docs.rst +++ b/docs/c_docs.rst @@ -29,8 +29,6 @@ camerad ^^^^^^^ .. autodoxygenindex:: :project: system_camerad_cameras -.. autodoxygenindex:: - :project: system_camerad_transforms .. autodoxygenindex:: :project: system_camerad_imgproc diff --git a/release/files_common b/release/files_common index 954726d967..38f86d247a 100644 --- a/release/files_common +++ b/release/files_common @@ -324,11 +324,6 @@ system/camerad/cameras/camera_common.h system/camerad/cameras/camera_common.cc system/camerad/cameras/sensor2_i2c.h -system/camerad/transforms/rgb_to_yuv.cc -system/camerad/transforms/rgb_to_yuv.h -system/camerad/transforms/rgb_to_yuv.cl -system/camerad/transforms/rgb_to_yuv_test.cc - system/camerad/imgproc/conv.cl system/camerad/imgproc/pool.cl system/camerad/imgproc/utils.cc diff --git a/system/camerad/SConscript b/system/camerad/SConscript index b181fbab75..25e366210f 100644 --- a/system/camerad/SConscript +++ b/system/camerad/SConscript @@ -10,7 +10,6 @@ if arch == "larch64": env.Program('camerad', [ 'main.cc', 'cameras/camera_common.cc', - 'transforms/rgb_to_yuv.cc', 'imgproc/utils.cc', cameras, ], LIBS=libs) @@ -19,5 +18,4 @@ if GetOption("test"): env.Program('test/ae_gray_test', [ 'test/ae_gray_test.cc', 'cameras/camera_common.cc', - 'transforms/rgb_to_yuv.cc', ], LIBS=libs) diff --git a/system/camerad/cameras/camera_common.cc b/system/camerad/cameras/camera_common.cc index 04f3136485..1d4ecd526a 100644 --- a/system/camerad/cameras/camera_common.cc +++ b/system/camerad/cameras/camera_common.cc @@ -32,15 +32,14 @@ public: Debayer(cl_device_id device_id, cl_context context, const CameraBuf *b, const CameraState *s, int buf_width, int uv_offset) { char args[4096]; const CameraInfo *ci = &s->ci; - hdr_ = ci->hdr; snprintf(args, sizeof(args), "-cl-fast-relaxed-math -cl-denorms-are-zero " "-DFRAME_WIDTH=%d -DFRAME_HEIGHT=%d -DFRAME_STRIDE=%d -DFRAME_OFFSET=%d " "-DRGB_WIDTH=%d -DRGB_HEIGHT=%d -DRGB_STRIDE=%d -DYUV_STRIDE=%d -DUV_OFFSET=%d " - "-DBAYER_FLIP=%d -DHDR=%d -DCAM_NUM=%d%s", + "-DCAM_NUM=%d%s", ci->frame_width, ci->frame_height, ci->frame_stride, ci->frame_offset, b->rgb_width, b->rgb_height, b->rgb_stride, buf_width, uv_offset, - ci->bayer_flip, ci->hdr, s->camera_num, s->camera_num==1 ? " -DVIGNETTING" : ""); + s->camera_num, s->camera_num==1 ? " -DVIGNETTING" : ""); const char *cl_file = "cameras/real_debayer.cl"; cl_program prg_debayer = cl_program_from_file(context, device_id, cl_file, args); krnl_ = CL_CHECK_ERR(clCreateKernel(prg_debayer, "debayer10", &err)); @@ -63,12 +62,10 @@ public: private: cl_kernel krnl_; - bool hdr_; }; -void CameraBuf::init(cl_device_id device_id, cl_context context, CameraState *s, VisionIpcServer * v, int frame_cnt, VisionStreamType init_rgb_type, VisionStreamType init_yuv_type) { +void CameraBuf::init(cl_device_id device_id, cl_context context, CameraState *s, VisionIpcServer * v, int frame_cnt, VisionStreamType init_yuv_type) { vipc_server = v; - this->rgb_type = init_rgb_type; this->yuv_type = init_yuv_type; const CameraInfo *ci = &s->ci; @@ -89,11 +86,7 @@ void CameraBuf::init(cl_device_id device_id, cl_context context, CameraState *s, rgb_width = ci->frame_width; rgb_height = ci->frame_height; - yuv_transform = get_model_yuv_transform(ci->bayer); - - vipc_server->create_buffers(rgb_type, UI_BUF_COUNT, true, rgb_width, rgb_height); - rgb_stride = vipc_server->get_buffer(rgb_type)->stride; - LOGD("created %d UI vipc buffers with size %dx%d", UI_BUF_COUNT, rgb_width, rgb_height); + yuv_transform = get_model_yuv_transform(); int nv12_width = VENUS_Y_STRIDE(COLOR_FMT_NV12, rgb_width); int nv12_height = VENUS_Y_SCANLINES(COLOR_FMT_NV12, rgb_height); @@ -104,10 +97,7 @@ void CameraBuf::init(cl_device_id device_id, cl_context context, CameraState *s, vipc_server->create_buffers_with_sizes(yuv_type, YUV_BUFFER_COUNT, false, rgb_width, rgb_height, nv12_size, nv12_width, nv12_uv_offset); LOGD("created %d YUV vipc buffers with size %dx%d", YUV_BUFFER_COUNT, nv12_width, nv12_height); - if (ci->bayer) { - debayer = new Debayer(device_id, context, this, s, nv12_width, nv12_uv_offset); - } - rgb2yuv = std::make_unique(context, device_id, rgb_width, rgb_height, rgb_stride); + debayer = new Debayer(device_id, context, this, s, nv12_width, nv12_uv_offset); #ifdef __APPLE__ q = CL_CHECK_ERR(clCreateCommandQueue(context, device_id, 0, &err)); @@ -135,7 +125,6 @@ bool CameraBuf::acquire() { } cur_frame_data = camera_bufs_metadata[cur_buf_idx]; - cur_rgb_buf = vipc_server->get_buffer(rgb_type); cur_yuv_buf = vipc_server->get_buffer(yuv_type); cl_mem camrabuf_cl = camera_bufs[cur_buf_idx].buf_cl; cl_event event; @@ -144,12 +133,7 @@ bool CameraBuf::acquire() { cur_camera_buf = &camera_bufs[cur_buf_idx]; - if (debayer) { - debayer->queue(q, camrabuf_cl, cur_yuv_buf->buf_cl, rgb_width, rgb_height, &event); - } else { - assert(rgb_stride == camera_state->ci.frame_stride); - rgb2yuv->queue(q, camrabuf_cl, cur_rgb_buf->buf_cl); - } + debayer->queue(q, camrabuf_cl, cur_yuv_buf->buf_cl, rgb_width, rgb_height, &event); clWaitForEvents(1, &event); CL_CHECK(clReleaseEvent(event)); @@ -193,32 +177,6 @@ void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &fr framed.setProcessingTime(frame_data.processing_time); } -kj::Array get_frame_image(const CameraBuf *b) { - static const int x_min = util::getenv("XMIN", 0); - static const int y_min = util::getenv("YMIN", 0); - static const int env_xmax = util::getenv("XMAX", -1); - static const int env_ymax = util::getenv("YMAX", -1); - static const int scale = util::getenv("SCALE", 1); - - assert(b->cur_rgb_buf); - - const int x_max = env_xmax != -1 ? env_xmax : b->rgb_width - 1; - const int y_max = env_ymax != -1 ? env_ymax : b->rgb_height - 1; - const int new_width = (x_max - x_min + 1) / scale; - const int new_height = (y_max - y_min + 1) / scale; - const uint8_t *dat = (const uint8_t *)b->cur_rgb_buf->addr; - - kj::Array frame_image = kj::heapArray(new_width*new_height*3); - uint8_t *resized_dat = frame_image.begin(); - int goff = x_min*3 + y_min*b->rgb_stride; - for (int r=0;rrgb_stride*scale+c*3*scale], 3*sizeof(uint8_t)); - } - } - return kj::mv(frame_image); -} - kj::Array get_raw_frame_image(const CameraBuf *b) { const uint8_t *dat = (const uint8_t *)b->cur_camera_buf->addr; diff --git a/system/camerad/cameras/camera_common.h b/system/camerad/cameras/camera_common.h index 4695d4e2c9..2a56052559 100644 --- a/system/camerad/cameras/camera_common.h +++ b/system/camerad/cameras/camera_common.h @@ -9,7 +9,6 @@ #include "cereal/visionipc/visionbuf.h" #include "cereal/visionipc/visionipc.h" #include "cereal/visionipc/visionipc_server.h" -#include "system/camerad/transforms/rgb_to_yuv.h" #include "common/mat.h" #include "common/queue.h" #include "common/swaglog.h" @@ -27,7 +26,6 @@ #define CAMERA_ID_IMX390 9 #define CAMERA_ID_MAX 10 -const int UI_BUF_COUNT = 4; const int YUV_BUFFER_COUNT = 40; enum CameraType { @@ -36,11 +34,6 @@ enum CameraType { WideRoadCam }; -// TODO: remove these once all the internal tools are moved to vipc -const bool env_send_driver = getenv("SEND_DRIVER") != NULL; -const bool env_send_road = getenv("SEND_ROAD") != NULL; -const bool env_send_wide_road = getenv("SEND_WIDE_ROAD") != NULL; - // for debugging const bool env_disable_road = getenv("DISABLE_ROAD") != NULL; const bool env_disable_wide_road = getenv("DISABLE_WIDE_ROAD") != NULL; @@ -51,9 +44,6 @@ const bool env_log_raw_frames = getenv("LOG_RAW_FRAMES") != NULL; typedef struct CameraInfo { uint32_t frame_width, frame_height; uint32_t frame_stride; - bool bayer; - int bayer_flip; - bool hdr; uint32_t frame_offset = 0; uint32_t extra_height = 0; int registers_offset = -1; @@ -92,9 +82,8 @@ private: VisionIpcServer *vipc_server; CameraState *camera_state; Debayer *debayer = nullptr; - std::unique_ptr rgb2yuv; - VisionStreamType rgb_type, yuv_type; + VisionStreamType yuv_type; int cur_buf_idx; @@ -116,7 +105,7 @@ public: CameraBuf() = default; ~CameraBuf(); - void init(cl_device_id device_id, cl_context context, CameraState *s, VisionIpcServer * v, int frame_cnt, VisionStreamType rgb_type, VisionStreamType yuv_type); + void init(cl_device_id device_id, cl_context context, CameraState *s, VisionIpcServer * v, int frame_cnt, VisionStreamType yuv_type); bool acquire(); void release(); void queue(size_t buf_idx); @@ -125,7 +114,6 @@ public: typedef void (*process_thread_cb)(MultiCameraState *s, CameraState *c, int cnt); void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data); -kj::Array get_frame_image(const CameraBuf *b); kj::Array get_raw_frame_image(const CameraBuf *b); float set_exposure_target(const CameraBuf *b, int x_start, int x_end, int x_skip, int y_start, int y_end, int y_skip); std::thread start_process_thread(MultiCameraState *cameras, CameraState *cs, process_thread_cb callback); diff --git a/system/camerad/cameras/camera_qcom2.cc b/system/camerad/cameras/camera_qcom2.cc index f001009b9a..31af284d52 100644 --- a/system/camerad/cameras/camera_qcom2.cc +++ b/system/camerad/cameras/camera_qcom2.cc @@ -46,19 +46,11 @@ CameraInfo cameras_supported[CAMERA_ID_MAX] = { .registers_offset = 0, .frame_offset = AR0231_REGISTERS_HEIGHT, .stats_offset = AR0231_REGISTERS_HEIGHT + FRAME_HEIGHT, - - .bayer = true, - .bayer_flip = 1, - .hdr = false, }, [CAMERA_ID_IMX390] = { .frame_width = FRAME_WIDTH, .frame_height = FRAME_HEIGHT, .frame_stride = FRAME_STRIDE, - - .bayer = true, - .bayer_flip = 1, - .hdr = false, }, }; @@ -614,7 +606,7 @@ void CameraState::enqueue_req_multi(int start, int n, bool dp) { // ******************* camera ******************* -void CameraState::camera_init(MultiCameraState *multi_cam_state_, VisionIpcServer * v, int camera_id_, int camera_num_, unsigned int fps, cl_device_id device_id, cl_context ctx, VisionStreamType rgb_type, VisionStreamType yuv_type, bool enabled_) { +void CameraState::camera_init(MultiCameraState *multi_cam_state_, VisionIpcServer * v, int camera_id_, int camera_num_, unsigned int fps, cl_device_id device_id, cl_context ctx, VisionStreamType yuv_type, bool enabled_) { multi_cam_state = multi_cam_state_; camera_id = camera_id_; camera_num = camera_num_; @@ -638,7 +630,7 @@ void CameraState::camera_init(MultiCameraState *multi_cam_state_, VisionIpcServe exposure_time = 5; cur_ev[0] = cur_ev[1] = cur_ev[2] = (dc_gain_enabled ? DC_GAIN : 1) * sensor_analog_gains[gain_idx] * exposure_time; - buf.init(device_id, ctx, this, v, FRAME_BUF_COUNT, rgb_type, yuv_type); + buf.init(device_id, ctx, this, v, FRAME_BUF_COUNT, yuv_type); } void CameraState::camera_open() { @@ -833,9 +825,9 @@ void CameraState::camera_open() { } void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_id, cl_context ctx) { - s->driver_cam.camera_init(s, v, CAMERA_ID_AR0231, 2, 20, device_id, ctx, VISION_STREAM_RGB_DRIVER, VISION_STREAM_DRIVER, !env_disable_driver); - s->road_cam.camera_init(s, v, CAMERA_ID_AR0231, 1, 20, device_id, ctx, VISION_STREAM_RGB_ROAD, VISION_STREAM_ROAD, !env_disable_road); - s->wide_road_cam.camera_init(s, v, CAMERA_ID_AR0231, 0, 20, device_id, ctx, VISION_STREAM_RGB_WIDE_ROAD, VISION_STREAM_WIDE_ROAD, !env_disable_wide_road); + s->driver_cam.camera_init(s, v, CAMERA_ID_AR0231, 2, 20, device_id, ctx, VISION_STREAM_DRIVER, !env_disable_driver); + s->road_cam.camera_init(s, v, CAMERA_ID_AR0231, 1, 20, device_id, ctx, VISION_STREAM_ROAD, !env_disable_road); + s->wide_road_cam.camera_init(s, v, CAMERA_ID_AR0231, 0, 20, device_id, ctx, VISION_STREAM_WIDE_ROAD, !env_disable_wide_road); s->pm = new PubMaster({"roadCameraState", "driverCameraState", "wideRoadCameraState", "thumbnail"}); } @@ -1233,9 +1225,7 @@ static void process_driver_camera(MultiCameraState *s, CameraState *c, int cnt) auto framed = msg.initEvent().initDriverCameraState(); framed.setFrameType(cereal::FrameData::FrameType::FRONT); fill_frame_data(framed, c->buf.cur_frame_data); - if (env_send_driver) { - framed.setImage(get_frame_image(&c->buf)); - } + if (c->camera_id == CAMERA_ID_AR0231) { ar0231_process_registers(s, c, framed); } @@ -1248,9 +1238,7 @@ void process_road_camera(MultiCameraState *s, CameraState *c, int cnt) { MessageBuilder msg; auto framed = c == &s->road_cam ? msg.initEvent().initRoadCameraState() : msg.initEvent().initWideRoadCameraState(); fill_frame_data(framed, b->cur_frame_data); - if ((c == &s->road_cam && env_send_road) || (c == &s->wide_road_cam && env_send_wide_road)) { - framed.setImage(get_frame_image(b)); - } else if (env_log_raw_frames && c == &s->road_cam && cnt % 100 == 5) { // no overlap with qlog decimation + if (env_log_raw_frames && c == &s->road_cam && cnt % 100 == 5) { // no overlap with qlog decimation framed.setImage(get_raw_frame_image(b)); } LOGT(c->buf.cur_frame_data.frame_id, "%s: Image set", c == &s->road_cam ? "RoadCamera" : "WideRoadCamera"); diff --git a/system/camerad/cameras/camera_qcom2.h b/system/camerad/cameras/camera_qcom2.h index 57fef8d49a..03d3d1a823 100644 --- a/system/camerad/cameras/camera_qcom2.h +++ b/system/camerad/cameras/camera_qcom2.h @@ -55,7 +55,7 @@ public: void sensors_start(); void camera_open(); - void camera_init(MultiCameraState *multi_cam_state, VisionIpcServer * v, int camera_id, int camera_num, unsigned int fps, cl_device_id device_id, cl_context ctx, VisionStreamType rgb_type, VisionStreamType yuv_type, bool enabled); + void camera_init(MultiCameraState *multi_cam_state, VisionIpcServer * v, int camera_id, int camera_num, unsigned int fps, cl_device_id device_id, cl_context ctx, VisionStreamType yuv_type, bool enabled); void camera_close(); std::map ar0231_parse_registers(uint8_t *data, std::initializer_list addrs); diff --git a/system/camerad/transforms/rgb_to_yuv.cc b/system/camerad/transforms/rgb_to_yuv.cc deleted file mode 100644 index 5e51579cf9..0000000000 --- a/system/camerad/transforms/rgb_to_yuv.cc +++ /dev/null @@ -1,36 +0,0 @@ -#include "system/camerad/transforms/rgb_to_yuv.h" - -#include -#include - -Rgb2Yuv::Rgb2Yuv(cl_context ctx, cl_device_id device_id, int width, int height, int rgb_stride) { - assert(width % 2 == 0 && height % 2 == 0); - char args[1024]; - snprintf(args, sizeof(args), - "-cl-fast-relaxed-math -cl-denorms-are-zero " -#ifdef CL_DEBUG - "-DCL_DEBUG " -#endif - "-DWIDTH=%d -DHEIGHT=%d -DUV_WIDTH=%d -DUV_HEIGHT=%d -DRGB_STRIDE=%d -DRGB_SIZE=%d", - width, height, width / 2, height / 2, rgb_stride, width * height); - - cl_program prg = cl_program_from_file(ctx, device_id, "transforms/rgb_to_yuv.cl", args); - krnl = CL_CHECK_ERR(clCreateKernel(prg, "rgb_to_yuv", &err)); - CL_CHECK(clReleaseProgram(prg)); - - work_size[0] = (width + (width % 4 == 0 ? 0 : (4 - width % 4))) / 4; - work_size[1] = (height + (height % 4 == 0 ? 0 : (4 - height % 4))) / 4; -} - -Rgb2Yuv::~Rgb2Yuv() { - CL_CHECK(clReleaseKernel(krnl)); -} - -void Rgb2Yuv::queue(cl_command_queue q, cl_mem rgb_cl, cl_mem yuv_cl) { - CL_CHECK(clSetKernelArg(krnl, 0, sizeof(cl_mem), &rgb_cl)); - CL_CHECK(clSetKernelArg(krnl, 1, sizeof(cl_mem), &yuv_cl)); - cl_event event; - CL_CHECK(clEnqueueNDRangeKernel(q, krnl, 2, NULL, &work_size[0], NULL, 0, 0, &event)); - CL_CHECK(clWaitForEvents(1, &event)); - CL_CHECK(clReleaseEvent(event)); -} diff --git a/system/camerad/transforms/rgb_to_yuv.h b/system/camerad/transforms/rgb_to_yuv.h deleted file mode 100644 index e1de180d40..0000000000 --- a/system/camerad/transforms/rgb_to_yuv.h +++ /dev/null @@ -1,14 +0,0 @@ -#pragma once - -#include "common/clutil.h" - -class Rgb2Yuv { -public: - Rgb2Yuv(cl_context ctx, cl_device_id device_id, int width, int height, int rgb_stride); - ~Rgb2Yuv(); - void queue(cl_command_queue q, cl_mem rgb_cl, cl_mem yuv_cl); -private: - size_t work_size[2]; - cl_kernel krnl; -}; - diff --git a/system/camerad/transforms/rgb_to_yuv_test.cc b/system/camerad/transforms/rgb_to_yuv_test.cc deleted file mode 100644 index 2f909e3b73..0000000000 --- a/system/camerad/transforms/rgb_to_yuv_test.cc +++ /dev/null @@ -1,191 +0,0 @@ -#include -#include -#include -#include - -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include -#include - -#ifdef ANDROID - -#define MAXE 0 -#include - -#else -// The libyuv implementation on ARM is slightly different than on x86 -// Our implementation matches the ARM version, so accept errors of 1 -#define MAXE 1 - -#endif - -#include - -#include "libyuv.h" -#include "system/camerad/transforms/rgb_to_yuv.h" -#include "common/clutil.h" - -static inline double millis_since_boot() { - struct timespec t; - clock_gettime(CLOCK_BOOTTIME, &t); - return t.tv_sec * 1000.0 + t.tv_nsec * 1e-6; -} - -void cl_init(cl_device_id &device_id, cl_context &context) { - device_id = cl_get_device_id(CL_DEVICE_TYPE_DEFAULT); - context = CL_CHECK_ERR(clCreateContext(NULL, 1, &device_id, NULL, NULL, &err)); -} - - -bool compare_results(uint8_t *a, uint8_t *b, int len, int stride, int width, int height, uint8_t *rgb) { - int min_diff = 0., max_diff = 0., max_e = 0.; - int e1 = 0, e0 = 0; - int e0y = 0, e0u = 0, e0v = 0, e1y = 0, e1u = 0, e1v = 0; - int max_e_i = 0; - for (int i = 0;i < len;i++) { - int e = ((int)a[i]) - ((int)b[i]); - if(e < min_diff) { - min_diff = e; - } - if(e > max_diff) { - max_diff = e; - } - int e_abs = std::abs(e); - if(e_abs > max_e) { - max_e = e_abs; - max_e_i = i; - } - if(e_abs < 1) { - e0++; - if(i < stride * height) - e0y++; - else if(i < stride * height + stride * height / 4) - e0u++; - else - e0v++; - } else { - e1++; - if(i < stride * height) - e1y++; - else if(i < stride * height + stride * height / 4) - e1u++; - else - e1v++; - } - } - //printf("max diff : %d, min diff : %d, e < 1: %d, e >= 1: %d\n", max_diff, min_diff, e0, e1); - //printf("Y: e < 1: %d, e >= 1: %d, U: e < 1: %d, e >= 1: %d, V: e < 1: %d, e >= 1: %d\n", e0y, e1y, e0u, e1u, e0v, e1v); - if(max_e <= MAXE) { - return true; - } - int row = max_e_i / stride; - if(row < height) { - printf("max error is Y: %d = (libyuv: %u - cl: %u), row: %d, col: %d\n", max_e, a[max_e_i], b[max_e_i], row, max_e_i % stride); - } else if(row >= height && row < (height + height / 4)) { - printf("max error is U: %d = %u - %u, row: %d, col: %d\n", max_e, a[max_e_i], b[max_e_i], (row - height) / 2, max_e_i % stride / 2); - } else { - printf("max error is V: %d = %u - %u, row: %d, col: %d\n", max_e, a[max_e_i], b[max_e_i], (row - height - height / 4) / 2, max_e_i % stride / 2); - } - return false; -} - -int main(int argc, char** argv) { - srand(1337); - - cl_device_id device_id; - cl_context context; - cl_init(device_id, context) ; - - int err; - const cl_queue_properties props[] = {0}; //CL_QUEUE_PRIORITY_KHR, CL_QUEUE_PRIORITY_HIGH_KHR, 0}; - cl_command_queue q = clCreateCommandQueueWithProperties(context, device_id, props, &err); - if(err != 0) { - std::cout << "clCreateCommandQueueWithProperties error: " << err << std::endl; - } - - int width = 1164; - int height = 874; - - int opt = 0; - while ((opt = getopt(argc, argv, "f")) != -1) - { - switch (opt) - { - case 'f': - std::cout << "Using front camera dimensions" << std::endl; - int width = 1152; - int height = 846; - } - } - - std::cout << "Width: " << width << " Height: " << height << std::endl; - uint8_t *rgb_frame = new uint8_t[width * height * 3]; - - - RGBToYUVState rgb_to_yuv_state; - rgb_to_yuv_init(&rgb_to_yuv_state, context, device_id, width, height, width * 3); - - int frame_yuv_buf_size = width * height * 3 / 2; - cl_mem yuv_cl = CL_CHECK_ERR(clCreateBuffer(context, CL_MEM_READ_WRITE, frame_yuv_buf_size, (void*)NULL, &err)); - uint8_t *frame_yuv_buf = new uint8_t[frame_yuv_buf_size]; - uint8_t *frame_yuv_ptr_y = frame_yuv_buf; - uint8_t *frame_yuv_ptr_u = frame_yuv_buf + (width * height); - uint8_t *frame_yuv_ptr_v = frame_yuv_ptr_u + ((width/2) * (height/2)); - - cl_mem rgb_cl = CL_CHECK_ERR(clCreateBuffer(context, CL_MEM_READ_WRITE, width * height * 3, (void*)NULL, &err)); - int mismatched = 0; - int counter = 0; - srand (time(NULL)); - - for (int i = 0; i < 100; i++) { - for (int i = 0; i < width * height * 3; i++) { - rgb_frame[i] = (uint8_t)rand(); - } - - double t1 = millis_since_boot(); - libyuv::RGB24ToI420((uint8_t*)rgb_frame, width * 3, - frame_yuv_ptr_y, width, - frame_yuv_ptr_u, width/2, - frame_yuv_ptr_v, width/2, - width, height); - double t2 = millis_since_boot(); - //printf("Libyuv: rgb to yuv: %.2fms\n", t2-t1); - - clEnqueueWriteBuffer(q, rgb_cl, CL_TRUE, 0, width * height * 3, (void *)rgb_frame, 0, NULL, NULL); - t1 = millis_since_boot(); - rgb_to_yuv_queue(&rgb_to_yuv_state, q, rgb_cl, yuv_cl); - t2 = millis_since_boot(); - - //printf("OpenCL: rgb to yuv: %.2fms\n", t2-t1); - uint8_t *yyy = (uint8_t *)clEnqueueMapBuffer(q, yuv_cl, CL_TRUE, - CL_MAP_READ, 0, frame_yuv_buf_size, - 0, NULL, NULL, &err); - if(!compare_results(frame_yuv_ptr_y, yyy, frame_yuv_buf_size, width, width, height, (uint8_t*)rgb_frame)) - mismatched++; - clEnqueueUnmapMemObject(q, yuv_cl, yyy, 0, NULL, NULL); - - // std::this_thread::sleep_for(std::chrono::milliseconds(20)); - if(counter++ % 100 == 0) - printf("Matched: %d, Mismatched: %d\n", counter - mismatched, mismatched); - - } - printf("Matched: %d, Mismatched: %d\n", counter - mismatched, mismatched); - - delete[] frame_yuv_buf; - rgb_to_yuv_destroy(&rgb_to_yuv_state); - clReleaseContext(context); - delete[] rgb_frame; - - if (mismatched == 0) - return 0; - else - return -1; -}