mirror of
https://github.com/sunnypilot/sunnypilot.git
synced 2026-02-18 13:04:01 +08:00
replay: C3/C3X hardware decoder (#35821)
* bump msgq * add third_party/linux/include/media/msm_vidc.h * add sde_rotator hw interface * add msm_vidc hw decoder interface * update SConscript to build qcom decoder and rotator * use qcom decoder in replay framereader * decode directly to NV12 with the correct stride without using the hw rotator * bump msgq back to master * don't compile rotator * cleanup * works now but much to simplify * rm signals * rm header --------- Co-authored-by: Test User <test@example.com> Co-authored-by: Adeeb Shihadeh <adeebshihadeh@gmail.com>
This commit is contained in:
@@ -12,7 +12,7 @@ else:
|
||||
base_libs.append('OpenCL')
|
||||
|
||||
replay_lib_src = ["replay.cc", "consoleui.cc", "camera.cc", "filereader.cc", "logreader.cc", "framereader.cc",
|
||||
"route.cc", "util.cc", "seg_mgr.cc", "timeline.cc", "api.cc"]
|
||||
"route.cc", "util.cc", "seg_mgr.cc", "timeline.cc", "api.cc", "qcom_decoder.cc"]
|
||||
replay_lib = replay_env.Library("replay", replay_lib_src, LIBS=base_libs, FRAMEWORKS=base_frameworks)
|
||||
Export('replay_lib')
|
||||
replay_libs = [replay_lib, 'avutil', 'avcodec', 'avformat', 'bz2', 'zstd', 'curl', 'yuv', 'ncurses'] + base_libs
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
#include "common/util.h"
|
||||
#include "third_party/libyuv/include/libyuv.h"
|
||||
#include "tools/replay/util.h"
|
||||
#include "system/hardware/hw.h"
|
||||
|
||||
#ifdef __APPLE__
|
||||
#define HW_DEVICE_TYPE AV_HWDEVICE_TYPE_VIDEOTOOLBOX
|
||||
@@ -37,7 +38,13 @@ struct DecoderManager {
|
||||
return it->second.get();
|
||||
}
|
||||
|
||||
auto decoder = std::make_unique<VideoDecoder>();
|
||||
std::unique_ptr<VideoDecoder> decoder;
|
||||
if (Hardware::TICI() && hw_decoder) {
|
||||
decoder = std::make_unique<QcomVideoDecoder>();
|
||||
} else {
|
||||
decoder = std::make_unique<FFmpegVideoDecoder>();
|
||||
}
|
||||
|
||||
if (!decoder->open(codecpar, hw_decoder)) {
|
||||
decoder.reset(nullptr);
|
||||
}
|
||||
@@ -114,19 +121,19 @@ bool FrameReader::get(int idx, VisionBuf *buf) {
|
||||
|
||||
// class VideoDecoder
|
||||
|
||||
VideoDecoder::VideoDecoder() {
|
||||
FFmpegVideoDecoder::FFmpegVideoDecoder() {
|
||||
av_frame_ = av_frame_alloc();
|
||||
hw_frame_ = av_frame_alloc();
|
||||
}
|
||||
|
||||
VideoDecoder::~VideoDecoder() {
|
||||
FFmpegVideoDecoder::~FFmpegVideoDecoder() {
|
||||
if (hw_device_ctx) av_buffer_unref(&hw_device_ctx);
|
||||
if (decoder_ctx) avcodec_free_context(&decoder_ctx);
|
||||
av_frame_free(&av_frame_);
|
||||
av_frame_free(&hw_frame_);
|
||||
}
|
||||
|
||||
bool VideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
|
||||
bool FFmpegVideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
|
||||
const AVCodec *decoder = avcodec_find_decoder(codecpar->codec_id);
|
||||
if (!decoder) return false;
|
||||
|
||||
@@ -149,7 +156,7 @@ bool VideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) {
|
||||
bool FFmpegVideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) {
|
||||
const AVCodecHWConfig *config = nullptr;
|
||||
for (int i = 0; (config = avcodec_get_hw_config(decoder_ctx->codec, i)) != nullptr; i++) {
|
||||
if (config->methods & AV_CODEC_HW_CONFIG_METHOD_HW_DEVICE_CTX && config->device_type == hw_device_type) {
|
||||
@@ -175,7 +182,7 @@ bool VideoDecoder::initHardwareDecoder(AVHWDeviceType hw_device_type) {
|
||||
return true;
|
||||
}
|
||||
|
||||
bool VideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
|
||||
bool FFmpegVideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
|
||||
int current_idx = idx;
|
||||
if (idx != reader->prev_idx + 1) {
|
||||
// seeking to the nearest key frame
|
||||
@@ -219,7 +226,7 @@ bool VideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
|
||||
return false;
|
||||
}
|
||||
|
||||
AVFrame *VideoDecoder::decodeFrame(AVPacket *pkt) {
|
||||
AVFrame *FFmpegVideoDecoder::decodeFrame(AVPacket *pkt) {
|
||||
int ret = avcodec_send_packet(decoder_ctx, pkt);
|
||||
if (ret < 0) {
|
||||
rError("Error sending a packet for decoding: %d", ret);
|
||||
@@ -239,7 +246,7 @@ AVFrame *VideoDecoder::decodeFrame(AVPacket *pkt) {
|
||||
return (av_frame_->format == hw_pix_fmt) ? hw_frame_ : av_frame_;
|
||||
}
|
||||
|
||||
bool VideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) {
|
||||
bool FFmpegVideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) {
|
||||
if (hw_pix_fmt == HW_PIX_FMT) {
|
||||
for (int i = 0; i < height/2; i++) {
|
||||
memcpy(buf->y + (i*2 + 0)*buf->stride, f->data[0] + (i*2 + 0)*f->linesize[0], width);
|
||||
@@ -256,3 +263,45 @@ bool VideoDecoder::copyBuffer(AVFrame *f, VisionBuf *buf) {
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool QcomVideoDecoder::open(AVCodecParameters *codecpar, bool hw_decoder) {
|
||||
if (codecpar->codec_id != AV_CODEC_ID_HEVC) {
|
||||
rError("Hardware decoder only supports HEVC codec");
|
||||
return false;
|
||||
}
|
||||
width = codecpar->width;
|
||||
height = codecpar->height;
|
||||
msm_vidc.init(VIDEO_DEVICE, width, height, V4L2_PIX_FMT_HEVC);
|
||||
return true;
|
||||
}
|
||||
|
||||
bool QcomVideoDecoder::decode(FrameReader *reader, int idx, VisionBuf *buf) {
|
||||
int from_idx = idx;
|
||||
if (idx != reader->prev_idx + 1) {
|
||||
// seeking to the nearest key frame
|
||||
for (int i = idx; i >= 0; --i) {
|
||||
if (reader->packets_info[i].flags & AV_PKT_FLAG_KEY) {
|
||||
from_idx = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
auto pos = reader->packets_info[from_idx].pos;
|
||||
int ret = avformat_seek_file(reader->input_ctx, 0, pos, pos, pos, AVSEEK_FLAG_BYTE);
|
||||
if (ret < 0) {
|
||||
rError("Failed to seek to byte position %lld: %d", pos, AVERROR(ret));
|
||||
return false;
|
||||
}
|
||||
}
|
||||
reader->prev_idx = idx;
|
||||
bool result = false;
|
||||
AVPacket pkt;
|
||||
msm_vidc.avctx = reader->input_ctx;
|
||||
for (int i = from_idx; i <= idx; ++i) {
|
||||
if (av_read_frame(reader->input_ctx, &pkt) == 0) {
|
||||
result = msm_vidc.decodeFrame(&pkt, buf) && (i == idx);
|
||||
av_packet_unref(&pkt);
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
@@ -6,6 +6,7 @@
|
||||
#include "msgq/visionipc/visionbuf.h"
|
||||
#include "tools/replay/filereader.h"
|
||||
#include "tools/replay/util.h"
|
||||
#include "tools/replay/qcom_decoder.h"
|
||||
|
||||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
@@ -40,11 +41,18 @@ public:
|
||||
|
||||
class VideoDecoder {
|
||||
public:
|
||||
VideoDecoder();
|
||||
~VideoDecoder();
|
||||
bool open(AVCodecParameters *codecpar, bool hw_decoder);
|
||||
bool decode(FrameReader *reader, int idx, VisionBuf *buf);
|
||||
virtual ~VideoDecoder() = default;
|
||||
virtual bool open(AVCodecParameters *codecpar, bool hw_decoder) = 0;
|
||||
virtual bool decode(FrameReader *reader, int idx, VisionBuf *buf) = 0;
|
||||
int width = 0, height = 0;
|
||||
};
|
||||
|
||||
class FFmpegVideoDecoder : public VideoDecoder {
|
||||
public:
|
||||
FFmpegVideoDecoder();
|
||||
~FFmpegVideoDecoder() override;
|
||||
bool open(AVCodecParameters *codecpar, bool hw_decoder) override;
|
||||
bool decode(FrameReader *reader, int idx, VisionBuf *buf) override;
|
||||
|
||||
private:
|
||||
bool initHardwareDecoder(AVHWDeviceType hw_device_type);
|
||||
@@ -56,3 +64,14 @@ private:
|
||||
AVPixelFormat hw_pix_fmt = AV_PIX_FMT_NONE;
|
||||
AVBufferRef *hw_device_ctx = nullptr;
|
||||
};
|
||||
|
||||
class QcomVideoDecoder : public VideoDecoder {
|
||||
public:
|
||||
QcomVideoDecoder() {};
|
||||
~QcomVideoDecoder() override {};
|
||||
bool open(AVCodecParameters *codecpar, bool hw_decoder) override;
|
||||
bool decode(FrameReader *reader, int idx, VisionBuf *buf) override;
|
||||
|
||||
private:
|
||||
MsmVidc msm_vidc = MsmVidc();
|
||||
};
|
||||
|
||||
346
tools/replay/qcom_decoder.cc
Normal file
346
tools/replay/qcom_decoder.cc
Normal file
@@ -0,0 +1,346 @@
|
||||
#include "qcom_decoder.h"
|
||||
|
||||
#include <assert.h>
|
||||
#include "third_party/linux/include/v4l2-controls.h"
|
||||
#include <linux/videodev2.h>
|
||||
|
||||
|
||||
#include "common/swaglog.h"
|
||||
#include "common/util.h"
|
||||
|
||||
// echo "0xFFFF" > /sys/kernel/debug/msm_vidc/debug_level
|
||||
|
||||
static void copyBuffer(VisionBuf *src_buf, VisionBuf *dst_buf) {
|
||||
// Copy Y plane
|
||||
memcpy(dst_buf->y, src_buf->y, src_buf->height * src_buf->stride);
|
||||
// Copy UV plane
|
||||
memcpy(dst_buf->uv, src_buf->uv, src_buf->height / 2 * src_buf->stride);
|
||||
}
|
||||
|
||||
static void request_buffers(int fd, v4l2_buf_type buf_type, unsigned int count) {
|
||||
struct v4l2_requestbuffers reqbuf = {
|
||||
.count = count,
|
||||
.type = buf_type,
|
||||
.memory = V4L2_MEMORY_USERPTR
|
||||
};
|
||||
util::safe_ioctl(fd, VIDIOC_REQBUFS, &reqbuf, "VIDIOC_REQBUFS failed");
|
||||
}
|
||||
|
||||
MsmVidc::~MsmVidc() {
|
||||
if (fd > 0) {
|
||||
close(fd);
|
||||
}
|
||||
}
|
||||
|
||||
bool MsmVidc::init(const char* dev, size_t width, size_t height, uint64_t codec) {
|
||||
LOG("Initializing msm_vidc device %s", dev);
|
||||
this->w = width;
|
||||
this->h = height;
|
||||
this->fd = open(dev, O_RDWR, 0);
|
||||
if (fd < 0) {
|
||||
LOGE("failed to open video device %s", dev);
|
||||
return false;
|
||||
}
|
||||
subscribeEvents();
|
||||
v4l2_buf_type out_type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
||||
setPlaneFormat(out_type, V4L2_PIX_FMT_HEVC); // Also allocates the output buffer
|
||||
setFPS(FPS);
|
||||
request_buffers(fd, out_type, OUTPUT_BUFFER_COUNT);
|
||||
util::safe_ioctl(fd, VIDIOC_STREAMON, &out_type, "VIDIOC_STREAMON OUTPUT failed");
|
||||
restartCapture();
|
||||
setupPolling();
|
||||
|
||||
this->initialized = true;
|
||||
return true;
|
||||
}
|
||||
|
||||
VisionBuf* MsmVidc::decodeFrame(AVPacket *pkt, VisionBuf *buf) {
|
||||
assert(initialized && (pkt != nullptr) && (buf != nullptr));
|
||||
|
||||
this->frame_ready = false;
|
||||
this->current_output_buf = buf;
|
||||
bool sent_packet = false;
|
||||
|
||||
while (!this->frame_ready) {
|
||||
if (!sent_packet) {
|
||||
int buf_index = getBufferUnlocked();
|
||||
if (buf_index >= 0) {
|
||||
assert(buf_index < out_buf_cnt);
|
||||
sendPacket(buf_index, pkt);
|
||||
sent_packet = true;
|
||||
}
|
||||
}
|
||||
|
||||
if (poll(pfd, nfds, -1) < 0) {
|
||||
LOGE("poll() error: %d", errno);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
if (VisionBuf* result = processEvents()) {
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
return buf;
|
||||
}
|
||||
|
||||
VisionBuf* MsmVidc::processEvents() {
|
||||
for (int idx = 0; idx < nfds; idx++) {
|
||||
short revents = pfd[idx].revents;
|
||||
if (!revents) continue;
|
||||
|
||||
if (idx == ev[EV_VIDEO]) {
|
||||
if (revents & (POLLIN | POLLRDNORM)) {
|
||||
VisionBuf *result = handleCapture();
|
||||
if (result == this->current_output_buf) {
|
||||
this->frame_ready = true;
|
||||
}
|
||||
}
|
||||
if (revents & (POLLOUT | POLLWRNORM)) {
|
||||
handleOutput();
|
||||
}
|
||||
if (revents & POLLPRI) {
|
||||
handleEvent();
|
||||
}
|
||||
} else {
|
||||
LOGE("Unexpected event on fd %d", pfd[idx].fd);
|
||||
}
|
||||
}
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
VisionBuf* MsmVidc::handleCapture() {
|
||||
struct v4l2_buffer buf = {0};
|
||||
struct v4l2_plane planes[1] = {0};
|
||||
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
||||
buf.memory = V4L2_MEMORY_USERPTR;
|
||||
buf.m.planes = planes;
|
||||
buf.length = 1;
|
||||
util::safe_ioctl(this->fd, VIDIOC_DQBUF, &buf, "VIDIOC_DQBUF CAPTURE failed");
|
||||
|
||||
if (this->reconfigure_pending || buf.m.planes[0].bytesused == 0) {
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
copyBuffer(&cap_bufs[buf.index], this->current_output_buf);
|
||||
queueCaptureBuffer(buf.index);
|
||||
return this->current_output_buf;
|
||||
}
|
||||
|
||||
bool MsmVidc::subscribeEvents() {
|
||||
for (uint32_t event : subscriptions) {
|
||||
struct v4l2_event_subscription sub = { .type = event};
|
||||
util::safe_ioctl(fd, VIDIOC_SUBSCRIBE_EVENT, &sub, "VIDIOC_SUBSCRIBE_EVENT failed");
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MsmVidc::setPlaneFormat(enum v4l2_buf_type type, uint32_t fourcc) {
|
||||
struct v4l2_format fmt = {.type = type};
|
||||
struct v4l2_pix_format_mplane *pix = &fmt.fmt.pix_mp;
|
||||
*pix = {
|
||||
.width = (__u32)this->w,
|
||||
.height = (__u32)this->h,
|
||||
.pixelformat = fourcc
|
||||
};
|
||||
util::safe_ioctl(fd, VIDIOC_S_FMT, &fmt, "VIDIOC_S_FMT failed");
|
||||
if (type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE) {
|
||||
this->out_buf_size = pix->plane_fmt[0].sizeimage;
|
||||
int ion_size = this->out_buf_size * OUTPUT_BUFFER_COUNT; // Output (input) buffers are ION buffer.
|
||||
this->out_buf.allocate(ion_size); // mmap rw
|
||||
for (int i = 0; i < OUTPUT_BUFFER_COUNT; i++) {
|
||||
this->out_buf_off[i] = i * this->out_buf_size;
|
||||
this->out_buf_addr[i] = (char *)this->out_buf.addr + this->out_buf_off[i];
|
||||
this->out_buf_flag[i] = false;
|
||||
}
|
||||
LOGD("Set output buffer size to %d, count %d, addr %p", this->out_buf_size, OUTPUT_BUFFER_COUNT, this->out_buf.addr);
|
||||
} else if (type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
|
||||
request_buffers(this->fd, type, CAPTURE_BUFFER_COUNT);
|
||||
util::safe_ioctl(fd, VIDIOC_G_FMT, &fmt, "VIDIOC_G_FMT failed");
|
||||
const __u32 y_size = pix->plane_fmt[0].sizeimage;
|
||||
const __u32 y_stride = pix->plane_fmt[0].bytesperline;
|
||||
for (int i = 0; i < CAPTURE_BUFFER_COUNT; i++) {
|
||||
size_t uv_offset = (size_t)y_stride * pix->height;
|
||||
size_t required = uv_offset + (y_stride * pix->height / 2); // enough for Y + UV. For linear NV12, UV plane starts at y_stride * height.
|
||||
size_t alloc_size = std::max<size_t>(y_size, required);
|
||||
this->cap_bufs[i].allocate(alloc_size);
|
||||
this->cap_bufs[i].init_yuv(pix->width, pix->height, y_stride, uv_offset);
|
||||
}
|
||||
LOGD("Set capture buffer size to %d, count %d, addr %p, extradata size %d",
|
||||
pix->plane_fmt[0].sizeimage, CAPTURE_BUFFER_COUNT, this->cap_bufs[0].addr, pix->plane_fmt[1].sizeimage);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MsmVidc::setFPS(uint32_t fps) {
|
||||
struct v4l2_streamparm streamparam = {
|
||||
.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE,
|
||||
.parm.output.timeperframe = {1, fps}
|
||||
};
|
||||
util::safe_ioctl(fd, VIDIOC_S_PARM, &streamparam, "VIDIOC_S_PARM failed");
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MsmVidc::restartCapture() {
|
||||
// stop if already initialized
|
||||
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
||||
if (this->initialized) {
|
||||
LOGD("Restarting capture, flushing buffers...");
|
||||
util::safe_ioctl(this->fd, VIDIOC_STREAMOFF, &type, "VIDIOC_STREAMOFF CAPTURE failed");
|
||||
struct v4l2_requestbuffers reqbuf = {.type = type, .memory = V4L2_MEMORY_USERPTR};
|
||||
util::safe_ioctl(this->fd, VIDIOC_REQBUFS, &reqbuf, "VIDIOC_REQBUFS failed");
|
||||
for (size_t i = 0; i < CAPTURE_BUFFER_COUNT; ++i) {
|
||||
this->cap_bufs[i].free();
|
||||
this->cap_buf_flag[i] = false; // mark as not queued
|
||||
cap_bufs[i].~VisionBuf();
|
||||
new (&cap_bufs[i]) VisionBuf();
|
||||
}
|
||||
}
|
||||
// setup, start and queue capture buffers
|
||||
setDBP();
|
||||
setPlaneFormat(type, V4L2_PIX_FMT_NV12);
|
||||
util::safe_ioctl(this->fd, VIDIOC_STREAMON, &type, "VIDIOC_STREAMON CAPTURE failed");
|
||||
for (size_t i = 0; i < CAPTURE_BUFFER_COUNT; ++i) {
|
||||
queueCaptureBuffer(i);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MsmVidc::queueCaptureBuffer(int i) {
|
||||
struct v4l2_buffer buf = {0};
|
||||
struct v4l2_plane planes[1] = {0};
|
||||
|
||||
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
||||
buf.memory = V4L2_MEMORY_USERPTR;
|
||||
buf.index = i;
|
||||
buf.m.planes = planes;
|
||||
buf.length = 1;
|
||||
// decoded frame plane
|
||||
planes[0].m.userptr = (unsigned long)this->cap_bufs[i].addr; // no security
|
||||
planes[0].length = this->cap_bufs[i].len;
|
||||
planes[0].reserved[0] = this->cap_bufs[i].fd; // ION fd
|
||||
planes[0].reserved[1] = 0;
|
||||
planes[0].bytesused = this->cap_bufs[i].len;
|
||||
planes[0].data_offset = 0;
|
||||
util::safe_ioctl(this->fd, VIDIOC_QBUF, &buf, "VIDIOC_QBUF failed");
|
||||
this->cap_buf_flag[i] = true; // mark as queued
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MsmVidc::queueOutputBuffer(int i, size_t size) {
|
||||
struct v4l2_buffer buf = {0};
|
||||
struct v4l2_plane planes[1] = {0};
|
||||
|
||||
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
||||
buf.memory = V4L2_MEMORY_USERPTR;
|
||||
buf.index = i;
|
||||
buf.m.planes = planes;
|
||||
buf.length = 1;
|
||||
// decoded frame plane
|
||||
planes[0].m.userptr = (unsigned long)this->out_buf_off[i]; // check this
|
||||
planes[0].length = this->out_buf_size;
|
||||
planes[0].reserved[0] = this->out_buf.fd; // ION fd
|
||||
planes[0].reserved[1] = 0;
|
||||
planes[0].bytesused = size;
|
||||
planes[0].data_offset = 0;
|
||||
assert((this->out_buf_off[i] & 0xfff) == 0); // must be 4 KiB aligned
|
||||
assert(this->out_buf_size % 4096 == 0); // ditto for size
|
||||
|
||||
util::safe_ioctl(this->fd, VIDIOC_QBUF, &buf, "VIDIOC_QBUF failed");
|
||||
this->out_buf_flag[i] = true; // mark as queued
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MsmVidc::setDBP() {
|
||||
struct v4l2_ext_control control[2] = {0};
|
||||
struct v4l2_ext_controls controls = {0};
|
||||
control[0].id = V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_MODE;
|
||||
control[0].value = 1; // V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_SECONDARY
|
||||
control[1].id = V4L2_CID_MPEG_VIDC_VIDEO_DPB_COLOR_FORMAT;
|
||||
control[1].value = 0; // V4L2_MPEG_VIDC_VIDEO_DPB_COLOR_FMT_NONE
|
||||
controls.count = 2;
|
||||
controls.ctrl_class = V4L2_CTRL_CLASS_MPEG;
|
||||
controls.controls = control;
|
||||
util::safe_ioctl(fd, VIDIOC_S_EXT_CTRLS, &controls, "VIDIOC_S_EXT_CTRLS failed");
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MsmVidc::setupPolling() {
|
||||
// Initialize poll array
|
||||
pfd[EV_VIDEO] = {fd, POLLIN | POLLOUT | POLLWRNORM | POLLRDNORM | POLLPRI, 0};
|
||||
ev[EV_VIDEO] = EV_VIDEO;
|
||||
nfds = 1;
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MsmVidc::sendPacket(int buf_index, AVPacket *pkt) {
|
||||
assert(buf_index >= 0 && buf_index < out_buf_cnt);
|
||||
assert(pkt != nullptr && pkt->data != nullptr && pkt->size > 0);
|
||||
// Prepare output buffer
|
||||
memset(this->out_buf_addr[buf_index], 0, this->out_buf_size);
|
||||
uint8_t * data = (uint8_t *)this->out_buf_addr[buf_index];
|
||||
memcpy(data, pkt->data, pkt->size);
|
||||
queueOutputBuffer(buf_index, pkt->size);
|
||||
return true;
|
||||
}
|
||||
|
||||
int MsmVidc::getBufferUnlocked() {
|
||||
for (int i = 0; i < this->out_buf_cnt; i++) {
|
||||
if (!out_buf_flag[i]) {
|
||||
return i;
|
||||
}
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
|
||||
bool MsmVidc::handleOutput() {
|
||||
struct v4l2_buffer buf = {0};
|
||||
struct v4l2_plane planes[1];
|
||||
buf.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
|
||||
buf.memory = V4L2_MEMORY_USERPTR;
|
||||
buf.m.planes = planes;
|
||||
buf.length = 1;
|
||||
util::safe_ioctl(this->fd, VIDIOC_DQBUF, &buf, "VIDIOC_DQBUF OUTPUT failed");
|
||||
this->out_buf_flag[buf.index] = false; // mark as not queued
|
||||
return true;
|
||||
}
|
||||
|
||||
bool MsmVidc::handleEvent() {
|
||||
// dequeue event
|
||||
struct v4l2_event event = {0};
|
||||
util::safe_ioctl(this->fd, VIDIOC_DQEVENT, &event, "VIDIOC_DQEVENT failed");
|
||||
switch (event.type) {
|
||||
case V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT: {
|
||||
unsigned int *ptr = (unsigned int *)event.u.data;
|
||||
unsigned int height = ptr[0];
|
||||
unsigned int width = ptr[1];
|
||||
this->w = width;
|
||||
this->h = height;
|
||||
LOGD("Port Reconfig received insufficient, new size %ux%u, flushing capture bufs...", width, height); // This is normal
|
||||
struct v4l2_decoder_cmd dec;
|
||||
dec.flags = V4L2_QCOM_CMD_FLUSH_CAPTURE;
|
||||
dec.cmd = V4L2_QCOM_CMD_FLUSH;
|
||||
util::safe_ioctl(this->fd, VIDIOC_DECODER_CMD, &dec, "VIDIOC_DECODER_CMD FLUSH_CAPTURE failed");
|
||||
this->reconfigure_pending = true;
|
||||
LOGD("Waiting for flush done event to reconfigure capture queue");
|
||||
break;
|
||||
}
|
||||
|
||||
case V4L2_EVENT_MSM_VIDC_FLUSH_DONE: {
|
||||
unsigned int *ptr = (unsigned int *)event.u.data;
|
||||
unsigned int flags = ptr[0];
|
||||
if (flags & V4L2_QCOM_CMD_FLUSH_CAPTURE) {
|
||||
if (this->reconfigure_pending) {
|
||||
this->restartCapture();
|
||||
this->reconfigure_pending = false;
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
break;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
88
tools/replay/qcom_decoder.h
Normal file
88
tools/replay/qcom_decoder.h
Normal file
@@ -0,0 +1,88 @@
|
||||
#pragma once
|
||||
|
||||
#include <linux/videodev2.h>
|
||||
#include <poll.h>
|
||||
|
||||
#include "msgq/visionipc/visionbuf.h"
|
||||
|
||||
extern "C" {
|
||||
#include <libavcodec/avcodec.h>
|
||||
#include <libavformat/avformat.h>
|
||||
}
|
||||
|
||||
#define V4L2_EVENT_MSM_VIDC_START (V4L2_EVENT_PRIVATE_START + 0x00001000)
|
||||
#define V4L2_EVENT_MSM_VIDC_FLUSH_DONE (V4L2_EVENT_MSM_VIDC_START + 1)
|
||||
#define V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT (V4L2_EVENT_MSM_VIDC_START + 3)
|
||||
#define V4L2_CID_MPEG_MSM_VIDC_BASE 0x00992000
|
||||
#define V4L2_CID_MPEG_VIDC_VIDEO_DPB_COLOR_FORMAT (V4L2_CID_MPEG_MSM_VIDC_BASE + 44)
|
||||
#define V4L2_CID_MPEG_VIDC_VIDEO_STREAM_OUTPUT_MODE (V4L2_CID_MPEG_MSM_VIDC_BASE + 22)
|
||||
#define V4L2_QCOM_CMD_FLUSH_CAPTURE (1 << 1)
|
||||
#define V4L2_QCOM_CMD_FLUSH (4)
|
||||
|
||||
#define VIDEO_DEVICE "/dev/video32"
|
||||
#define OUTPUT_BUFFER_COUNT 8
|
||||
#define CAPTURE_BUFFER_COUNT 8
|
||||
#define FPS 20
|
||||
|
||||
|
||||
class MsmVidc {
|
||||
public:
|
||||
MsmVidc() = default;
|
||||
~MsmVidc();
|
||||
|
||||
bool init(const char* dev, size_t width, size_t height, uint64_t codec);
|
||||
VisionBuf* decodeFrame(AVPacket* pkt, VisionBuf* buf);
|
||||
|
||||
AVFormatContext* avctx = nullptr;
|
||||
int fd = 0;
|
||||
|
||||
private:
|
||||
bool initialized = false;
|
||||
bool reconfigure_pending = false;
|
||||
bool frame_ready = false;
|
||||
|
||||
VisionBuf* current_output_buf = nullptr;
|
||||
VisionBuf out_buf; // Single input buffer
|
||||
VisionBuf cap_bufs[CAPTURE_BUFFER_COUNT]; // Capture (output) buffers
|
||||
|
||||
size_t w = 1928, h = 1208;
|
||||
size_t cap_height = 0, cap_width = 0;
|
||||
|
||||
int cap_buf_size = 0;
|
||||
int out_buf_size = 0;
|
||||
|
||||
size_t cap_plane_off[CAPTURE_BUFFER_COUNT] = {0};
|
||||
size_t cap_plane_stride[CAPTURE_BUFFER_COUNT] = {0};
|
||||
bool cap_buf_flag[CAPTURE_BUFFER_COUNT] = {false};
|
||||
|
||||
size_t out_buf_off[OUTPUT_BUFFER_COUNT] = {0};
|
||||
void* out_buf_addr[OUTPUT_BUFFER_COUNT] = {0};
|
||||
bool out_buf_flag[OUTPUT_BUFFER_COUNT] = {false};
|
||||
const int out_buf_cnt = OUTPUT_BUFFER_COUNT;
|
||||
|
||||
const int subscriptions[2] = {
|
||||
V4L2_EVENT_MSM_VIDC_FLUSH_DONE,
|
||||
V4L2_EVENT_MSM_VIDC_PORT_SETTINGS_CHANGED_INSUFFICIENT
|
||||
};
|
||||
|
||||
enum { EV_VIDEO, EV_COUNT };
|
||||
struct pollfd pfd[EV_COUNT] = {0};
|
||||
int ev[EV_COUNT] = {-1};
|
||||
int nfds = 0;
|
||||
|
||||
VisionBuf* processEvents();
|
||||
bool setupOutput();
|
||||
bool subscribeEvents();
|
||||
bool setPlaneFormat(v4l2_buf_type type, uint32_t fourcc);
|
||||
bool setFPS(uint32_t fps);
|
||||
bool restartCapture();
|
||||
bool queueCaptureBuffer(int i);
|
||||
bool queueOutputBuffer(int i, size_t size);
|
||||
bool setDBP();
|
||||
bool setupPolling();
|
||||
bool sendPacket(int buf_index, AVPacket* pkt);
|
||||
int getBufferUnlocked();
|
||||
VisionBuf* handleCapture();
|
||||
bool handleOutput();
|
||||
bool handleEvent();
|
||||
};
|
||||
Reference in New Issue
Block a user