mirror of https://github.com/1okko/openpilot.git
camerad: merge thread functions into one (#33025)
merge multiple thread functions into one
This commit is contained in:
parent
1989fbd4c4
commit
ccd369ee96
|
@ -244,7 +244,7 @@ static kj::Array<capnp::byte> yuv420_to_jpeg(const CameraBuf *b, int thumbnail_w
|
|||
return dat;
|
||||
}
|
||||
|
||||
static void publish_thumbnail(PubMaster *pm, const CameraBuf *b) {
|
||||
void publish_thumbnail(PubMaster *pm, const CameraBuf *b) {
|
||||
auto thumbnail = yuv420_to_jpeg(b, b->rgb_width / 4, b->rgb_height / 4);
|
||||
if (thumbnail.size() == 0) return;
|
||||
|
||||
|
@ -284,36 +284,6 @@ float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_sk
|
|||
return lum_med / 256.0;
|
||||
}
|
||||
|
||||
void *processing_thread(MultiCameraState *cameras, CameraState *cs, process_thread_cb callback) {
|
||||
const char *thread_name = nullptr;
|
||||
if (cs == &cameras->road_cam) {
|
||||
thread_name = "RoadCamera";
|
||||
} else if (cs == &cameras->driver_cam) {
|
||||
thread_name = "DriverCamera";
|
||||
} else {
|
||||
thread_name = "WideRoadCamera";
|
||||
}
|
||||
util::set_thread_name(thread_name);
|
||||
|
||||
uint32_t cnt = 0;
|
||||
while (!do_exit) {
|
||||
if (!cs->buf.acquire()) continue;
|
||||
|
||||
callback(cameras, cs, cnt);
|
||||
|
||||
if (cs == &(cameras->road_cam) && cameras->pm && cnt % 100 == 3) {
|
||||
// this takes 10ms???
|
||||
publish_thumbnail(cameras->pm, &(cs->buf));
|
||||
}
|
||||
++cnt;
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
std::thread start_process_thread(MultiCameraState *cameras, CameraState *cs, process_thread_cb callback) {
|
||||
return std::thread(processing_thread, cameras, cs, callback);
|
||||
}
|
||||
|
||||
void camerad_thread() {
|
||||
cl_device_id device_id = cl_get_device_id(CL_DEVICE_TYPE_DEFAULT);
|
||||
#ifdef QCOM2
|
||||
|
|
|
@ -2,7 +2,6 @@
|
|||
|
||||
#include <fcntl.h>
|
||||
#include <memory>
|
||||
#include <thread>
|
||||
|
||||
#include "cereal/messaging/messaging.h"
|
||||
#include "msgq/visionipc/visionipc_server.h"
|
||||
|
@ -69,13 +68,10 @@ public:
|
|||
void queue(size_t buf_idx);
|
||||
};
|
||||
|
||||
typedef void (*process_thread_cb)(MultiCameraState *s, CameraState *c, int cnt);
|
||||
|
||||
void fill_frame_data(cereal::FrameData::Builder &framed, const FrameMetadata &frame_data, CameraState *c);
|
||||
kj::Array<uint8_t> get_raw_frame_image(const CameraBuf *b);
|
||||
float set_exposure_target(const CameraBuf *b, Rect ae_xywh, int x_skip, int y_skip);
|
||||
std::thread start_process_thread(MultiCameraState *cameras, CameraState *cs, process_thread_cb callback);
|
||||
|
||||
void publish_thumbnail(PubMaster *pm, const CameraBuf *b);
|
||||
void cameras_init(VisionIpcServer *v, MultiCameraState *s, cl_device_id device_id, cl_context ctx);
|
||||
void cameras_open(MultiCameraState *s);
|
||||
void cameras_run(MultiCameraState *s);
|
||||
|
|
|
@ -31,6 +31,8 @@ CameraState::CameraState(MultiCameraState *multi_camera_state, const CameraConfi
|
|||
camera_num(config.camera_num),
|
||||
stream_type(config.stream_type),
|
||||
focal_len(config.focal_len),
|
||||
publish_name(config.publish_name),
|
||||
init_camera_state(config.init_camera_state),
|
||||
enabled(config.enabled) {
|
||||
}
|
||||
|
||||
|
@ -941,33 +943,36 @@ void CameraState::set_camera_exposure(float grey_frac) {
|
|||
sensors_i2c(exp_reg_array.data(), exp_reg_array.size(), CAM_SENSOR_PACKET_OPCODE_SENSOR_CONFIG, ci->data_word);
|
||||
}
|
||||
|
||||
static void process_driver_camera(MultiCameraState *s, CameraState *c, int cnt) {
|
||||
c->set_camera_exposure(set_exposure_target(&c->buf, c->ae_xywh, 2, 4));
|
||||
void CameraState::run() {
|
||||
util::set_thread_name(publish_name);
|
||||
|
||||
MessageBuilder msg;
|
||||
auto framed = msg.initEvent().initDriverCameraState();
|
||||
fill_frame_data(framed, c->buf.cur_frame_data, c);
|
||||
for (uint32_t cnt = 0; !do_exit; ++cnt) {
|
||||
// Acquire the buffer; continue if acquisition fails
|
||||
if (!buf.acquire()) continue;
|
||||
|
||||
c->ci->processRegisters(c, framed);
|
||||
s->pm->send("driverCameraState", msg);
|
||||
}
|
||||
MessageBuilder msg;
|
||||
auto framed = (msg.initEvent().*init_camera_state)();
|
||||
fill_frame_data(framed, buf.cur_frame_data, this);
|
||||
|
||||
void process_road_camera(MultiCameraState *s, CameraState *c, int cnt) {
|
||||
const CameraBuf *b = &c->buf;
|
||||
// Log raw frames for road camera
|
||||
if (env_log_raw_frames && stream_type == VISION_STREAM_ROAD && cnt % 100 == 5) { // no overlap with qlog decimation
|
||||
framed.setImage(get_raw_frame_image(&buf));
|
||||
}
|
||||
// Log frame id for road and wide road cameras
|
||||
if (stream_type != VISION_STREAM_DRIVER) {
|
||||
LOGT(buf.cur_frame_data.frame_id, "%s: Image set", publish_name);
|
||||
}
|
||||
|
||||
MessageBuilder msg;
|
||||
auto framed = c == &s->road_cam ? msg.initEvent().initRoadCameraState() : msg.initEvent().initWideRoadCameraState();
|
||||
fill_frame_data(framed, b->cur_frame_data, c);
|
||||
if (env_log_raw_frames && c == &s->road_cam && cnt % 100 == 5) { // no overlap with qlog decimation
|
||||
framed.setImage(get_raw_frame_image(b));
|
||||
// Process camera registers and set camera exposure
|
||||
ci->processRegisters(this, framed);
|
||||
set_camera_exposure(set_exposure_target(&buf, ae_xywh, 2, stream_type != VISION_STREAM_DRIVER ? 2 : 4));
|
||||
|
||||
// Send the message
|
||||
multi_cam_state->pm->send(publish_name, msg);
|
||||
if (stream_type == VISION_STREAM_ROAD && cnt % 100 == 3) {
|
||||
publish_thumbnail(multi_cam_state->pm, &buf); // this takes 10ms???
|
||||
}
|
||||
}
|
||||
LOGT(c->buf.cur_frame_data.frame_id, "%s: Image set", c == &s->road_cam ? "RoadCamera" : "WideRoadCamera");
|
||||
|
||||
c->ci->processRegisters(c, framed);
|
||||
s->pm->send(c == &s->road_cam ? "roadCameraState" : "wideRoadCameraState", msg);
|
||||
|
||||
const int skip = 2;
|
||||
c->set_camera_exposure(set_exposure_target(b, c->ae_xywh, skip, skip));
|
||||
}
|
||||
|
||||
MultiCameraState::MultiCameraState()
|
||||
|
@ -979,9 +984,9 @@ MultiCameraState::MultiCameraState()
|
|||
void cameras_run(MultiCameraState *s) {
|
||||
LOG("-- Starting threads");
|
||||
std::vector<std::thread> threads;
|
||||
if (s->driver_cam.enabled) threads.push_back(start_process_thread(s, &s->driver_cam, process_driver_camera));
|
||||
if (s->road_cam.enabled) threads.push_back(start_process_thread(s, &s->road_cam, process_road_camera));
|
||||
if (s->wide_road_cam.enabled) threads.push_back(start_process_thread(s, &s->wide_road_cam, process_road_camera));
|
||||
if (s->driver_cam.enabled) threads.emplace_back(&CameraState::run, &s->driver_cam);
|
||||
if (s->road_cam.enabled) threads.emplace_back(&CameraState::run, &s->road_cam);
|
||||
if (s->wide_road_cam.enabled) threads.emplace_back(&CameraState::run, &s->wide_road_cam);
|
||||
|
||||
// start devices
|
||||
LOG("-- Starting devices");
|
||||
|
|
|
@ -15,6 +15,8 @@ struct CameraConfig {
|
|||
int camera_num;
|
||||
VisionStreamType stream_type;
|
||||
float focal_len; // millimeters
|
||||
const char *publish_name;
|
||||
cereal::FrameData::Builder (cereal::Event::Builder::*init_camera_state)();
|
||||
bool enabled;
|
||||
};
|
||||
|
||||
|
@ -22,6 +24,8 @@ const CameraConfig WIDE_ROAD_CAMERA_CONFIG = {
|
|||
.camera_num = 0,
|
||||
.stream_type = VISION_STREAM_WIDE_ROAD,
|
||||
.focal_len = 1.71,
|
||||
.publish_name = "wideRoadCameraState",
|
||||
.init_camera_state = &cereal::Event::Builder::initWideRoadCameraState,
|
||||
.enabled = !getenv("DISABLE_WIDE_ROAD"),
|
||||
};
|
||||
|
||||
|
@ -29,6 +33,8 @@ const CameraConfig ROAD_CAMERA_CONFIG = {
|
|||
.camera_num = 1,
|
||||
.stream_type = VISION_STREAM_ROAD,
|
||||
.focal_len = 8.0,
|
||||
.publish_name = "roadCameraState",
|
||||
.init_camera_state = &cereal::Event::Builder::initRoadCameraState,
|
||||
.enabled = !getenv("DISABLE_ROAD"),
|
||||
};
|
||||
|
||||
|
@ -36,6 +42,8 @@ const CameraConfig DRIVER_CAMERA_CONFIG = {
|
|||
.camera_num = 2,
|
||||
.stream_type = VISION_STREAM_DRIVER,
|
||||
.focal_len = 1.71,
|
||||
.publish_name = "driverCameraState",
|
||||
.init_camera_state = &cereal::Event::Builder::initDriverCameraState,
|
||||
.enabled = !getenv("DISABLE_DRIVER"),
|
||||
};
|
||||
|
||||
|
@ -45,6 +53,8 @@ public:
|
|||
std::unique_ptr<const SensorInfo> ci;
|
||||
bool enabled = true;
|
||||
VisionStreamType stream_type;
|
||||
const char *publish_name = nullptr;
|
||||
cereal::FrameData::Builder (cereal::Event::Builder::*init_camera_state)() = nullptr;
|
||||
float focal_len = 0;
|
||||
|
||||
std::mutex exp_lock;
|
||||
|
@ -83,6 +93,7 @@ public:
|
|||
void camera_map_bufs();
|
||||
void camera_init(VisionIpcServer *v, cl_device_id device_id, cl_context ctx);
|
||||
void camera_close();
|
||||
void run();
|
||||
|
||||
int32_t session_handle = -1;
|
||||
int32_t sensor_dev_handle = -1;
|
||||
|
|
Loading…
Reference in New Issue