changed some log messages

This commit is contained in:
Maxim Devaev
2021-11-16 02:24:41 +03:00
parent 78a12f7ed2
commit ea313d3517
7 changed files with 75 additions and 72 deletions

View File

@@ -72,7 +72,7 @@ Drop frames smaller then this limit. Useful if the device produces small\-sized
Don't re\-initialize device on timeout. Default: disabled. Don't re\-initialize device on timeout. Default: disabled.
.TP .TP
.BR \-t ", " \-\-dv\-timings .BR \-t ", " \-\-dv\-timings
Enable DV timings querying and events processing to automatic resolution change. Default: disabled. Enable DV-timings querying and events processing to automatic resolution change. Default: disabled.
.TP .TP
.BR \-b\ \fIN ", " \-\-buffers\ \fIN .BR \-b\ \fIN ", " \-\-buffers\ \fIN
The number of buffers to receive data from the device. Each buffer may processed using an independent thread. The number of buffers to receive data from the device. Each buffer may processed using an independent thread.

View File

@@ -80,7 +80,7 @@ unsigned frame_get_padding(const frame_s *frame) {
// case V4L2_PIX_FMT_H264: // case V4L2_PIX_FMT_H264:
case V4L2_PIX_FMT_MJPEG: case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_JPEG: bytes_per_pixel = 0; break; case V4L2_PIX_FMT_JPEG: bytes_per_pixel = 0; break;
default: assert(0 && "Unknown pixelformat"); default: assert(0 && "Unknown format");
} }
if (bytes_per_pixel > 0 && frame->stride > frame->width) { if (bytes_per_pixel > 0 && frame->stride > frame->width) {
return (frame->stride - frame->width * bytes_per_pixel); return (frame->stride - frame->width * bytes_per_pixel);

View File

@@ -186,7 +186,7 @@ void device_close(device_s *dev) {
if (dev->io_method == V4L2_MEMORY_MMAP) { if (dev->io_method == V4L2_MEMORY_MMAP) {
if (HW(raw.allocated) > 0 && HW(raw.data) != MAP_FAILED) { if (HW(raw.allocated) > 0 && HW(raw.data) != MAP_FAILED) {
if (munmap(HW(raw.data), HW(raw.allocated)) < 0) { if (munmap(HW(raw.data), HW(raw.allocated)) < 0) {
LOG_PERROR("Can't unmap device buffer %u", index); LOG_PERROR("Can't unmap device buffer index=%u", index);
} }
} }
} else { // V4L2_MEMORY_USERPTR } else { // V4L2_MEMORY_USERPTR
@@ -222,9 +222,9 @@ int device_export_to_dma(device_s *dev) {
exp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; exp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
exp.index = index; exp.index = index;
LOG_DEBUG("Calling ioctl(VIDIOC_EXPBUF) for buffer index=%u ...", index); LOG_DEBUG("Exporting device buffer index=%u to DMA ...", index);
if (xioctl(RUN(fd), VIDIOC_EXPBUF, &exp) < 0) { if (xioctl(RUN(fd), VIDIOC_EXPBUF, &exp) < 0) {
LOG_PERROR("Unable to export device buffer index=%u", index); LOG_PERROR("Unable to export device buffer index=%u to DMA", index);
goto error; goto error;
} }
DMA_FD = exp.fd; DMA_FD = exp.fd;
@@ -248,7 +248,7 @@ int device_switch_capturing(device_s *dev, bool enable) {
if (enable != RUN(capturing)) { if (enable != RUN(capturing)) {
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE; enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
LOG_DEBUG("Calling ioctl(%s) ...", (enable ? "VIDIOC_STREAMON" : "VIDIOC_STREAMOFF")); LOG_DEBUG("%s device capturing ...", (enable ? "Starting" : "Stopping"));
if (xioctl(RUN(fd), (enable ? VIDIOC_STREAMON : VIDIOC_STREAMOFF), &type) < 0) { if (xioctl(RUN(fd), (enable ? VIDIOC_STREAMON : VIDIOC_STREAMOFF), &type) < 0) {
LOG_PERROR("Unable to %s capturing", (enable ? "start" : "stop")); LOG_PERROR("Unable to %s capturing", (enable ? "start" : "stop"));
if (enable) { if (enable) {
@@ -389,7 +389,7 @@ int device_release_buffer(device_s *dev, hw_buffer_s *hw) {
int device_consume_event(device_s *dev) { int device_consume_event(device_s *dev) {
struct v4l2_event event; struct v4l2_event event;
LOG_DEBUG("Calling ioctl(VIDIOC_DQEVENT) ..."); LOG_DEBUG("Consuming V4L2 event ...");
if (xioctl(RUN(fd), VIDIOC_DQEVENT, &event) == 0) { if (xioctl(RUN(fd), VIDIOC_DQEVENT, &event) == 0) {
switch (event.type) { switch (event.type) {
case V4L2_EVENT_SOURCE_CHANGE: case V4L2_EVENT_SOURCE_CHANGE:
@@ -408,14 +408,14 @@ int device_consume_event(device_s *dev) {
static int _device_open_check_cap(device_s *dev) { static int _device_open_check_cap(device_s *dev) {
struct v4l2_capability cap = {0}; struct v4l2_capability cap = {0};
LOG_DEBUG("Calling ioctl(VIDIOC_QUERYCAP) ..."); LOG_DEBUG("Querying device capabilities ...");
if (xioctl(RUN(fd), VIDIOC_QUERYCAP, &cap) < 0) { if (xioctl(RUN(fd), VIDIOC_QUERYCAP, &cap) < 0) {
LOG_PERROR("Can't query device (VIDIOC_QUERYCAP)"); LOG_PERROR("Can't query device capabilities");
return -1; return -1;
} }
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) { if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
LOG_ERROR("Video capture not supported by the device"); LOG_ERROR("Video capture is not supported by device");
return -1; return -1;
} }
@@ -446,7 +446,7 @@ static int _device_open_check_cap(device_s *dev) {
static int _device_open_dv_timings(device_s *dev) { static int _device_open_dv_timings(device_s *dev) {
_device_apply_resolution(dev, dev->width, dev->height); _device_apply_resolution(dev, dev->width, dev->height);
if (dev->dv_timings) { if (dev->dv_timings) {
LOG_DEBUG("Using DV timings"); LOG_DEBUG("Using DV-timings");
if (_device_apply_dv_timings(dev) < 0) { if (_device_apply_dv_timings(dev) < 0) {
return -1; return -1;
@@ -455,9 +455,9 @@ static int _device_open_dv_timings(device_s *dev) {
struct v4l2_event_subscription sub = {0}; struct v4l2_event_subscription sub = {0};
sub.type = V4L2_EVENT_SOURCE_CHANGE; sub.type = V4L2_EVENT_SOURCE_CHANGE;
LOG_DEBUG("Calling ioctl(VIDIOC_SUBSCRIBE_EVENT) ..."); LOG_DEBUG("Subscribing to DV-timings events ...")
if (xioctl(RUN(fd), VIDIOC_SUBSCRIBE_EVENT, &sub) < 0) { if (xioctl(RUN(fd), VIDIOC_SUBSCRIBE_EVENT, &sub) < 0) {
LOG_PERROR("Can't subscribe to V4L2_EVENT_SOURCE_CHANGE"); LOG_PERROR("Can't subscribe to DV-timings events");
return -1; return -1;
} }
} }
@@ -488,7 +488,7 @@ static int _device_apply_dv_timings(device_s *dev) {
LOG_DEBUG("Calling ioctl(VIDIOC_S_DV_TIMINGS) ..."); LOG_DEBUG("Calling ioctl(VIDIOC_S_DV_TIMINGS) ...");
if (xioctl(RUN(fd), VIDIOC_S_DV_TIMINGS, &dv) < 0) { if (xioctl(RUN(fd), VIDIOC_S_DV_TIMINGS, &dv) < 0) {
LOG_PERROR("Failed to set DV timings"); LOG_PERROR("Failed to set DV-timings");
return -1; return -1;
} }
@@ -521,10 +521,10 @@ static int _device_open_format(device_s *dev, bool first) {
fmt.fmt.pix.bytesperline = stride; fmt.fmt.pix.bytesperline = stride;
// Set format // Set format
LOG_DEBUG("Calling ioctl(VIDIOC_S_FMT) ..."); LOG_DEBUG("Probing device format=%s, stride=%u, resolution=%ux%u ...",
_format_to_string_supported(dev->format), stride, RUN(width), RUN(height));
if (xioctl(RUN(fd), VIDIOC_S_FMT, &fmt) < 0) { if (xioctl(RUN(fd), VIDIOC_S_FMT, &fmt) < 0) {
LOG_PERROR("Unable to set pixelformat=%s, stride=%u, resolution=%ux%u", LOG_PERROR("Unable to set device format");
_format_to_string_supported(dev->format), stride, RUN(width), RUN(height));
return -1; return -1;
} }
@@ -544,23 +544,23 @@ static int _device_open_format(device_s *dev, bool first) {
// Check format // Check format
if (fmt.fmt.pix.pixelformat != dev->format) { if (fmt.fmt.pix.pixelformat != dev->format) {
LOG_ERROR("Could not obtain the requested pixelformat=%s; driver gave us %s", LOG_ERROR("Could not obtain the requested format=%s; driver gave us %s",
_format_to_string_supported(dev->format), _format_to_string_supported(dev->format),
_format_to_string_supported(fmt.fmt.pix.pixelformat)); _format_to_string_supported(fmt.fmt.pix.pixelformat));
char *format_str; char *format_str;
if ((format_str = (char *)_format_to_string_nullable(fmt.fmt.pix.pixelformat)) != NULL) { if ((format_str = (char *)_format_to_string_nullable(fmt.fmt.pix.pixelformat)) != NULL) {
LOG_INFO("Falling back to pixelformat=%s", format_str); LOG_INFO("Falling back to format=%s", format_str);
} else { } else {
char fourcc_str[8]; char fourcc_str[8];
LOG_ERROR("Unsupported pixelformat=%s (fourcc)", LOG_ERROR("Unsupported format=%s (fourcc)",
fourcc_to_string(fmt.fmt.pix.pixelformat, fourcc_str, 8)); fourcc_to_string(fmt.fmt.pix.pixelformat, fourcc_str, 8));
return -1; return -1;
} }
} }
RUN(format) = fmt.fmt.pix.pixelformat; RUN(format) = fmt.fmt.pix.pixelformat;
LOG_INFO("Using pixelformat: %s", _format_to_string_supported(RUN(format))); LOG_INFO("Using format: %s", _format_to_string_supported(RUN(format)));
RUN(stride) = fmt.fmt.pix.bytesperline; RUN(stride) = fmt.fmt.pix.bytesperline;
RUN(raw_size) = fmt.fmt.pix.sizeimage; // Only for userptr RUN(raw_size) = fmt.fmt.pix.sizeimage; // Only for userptr
@@ -573,9 +573,9 @@ static void _device_open_hw_fps(device_s *dev) {
struct v4l2_streamparm setfps = {0}; struct v4l2_streamparm setfps = {0};
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
LOG_DEBUG("Calling ioctl(VIDIOC_G_PARM) ..."); LOG_DEBUG("Querying HW FPS ...");
if (xioctl(RUN(fd), VIDIOC_G_PARM, &setfps) < 0) { if (xioctl(RUN(fd), VIDIOC_G_PARM, &setfps) < 0) {
if (errno == ENOTTY) { // Quiet message for Auvidea B101 if (errno == ENOTTY) { // Quiet message for TC358743
LOG_INFO("Querying HW FPS changing is not supported"); LOG_INFO("Querying HW FPS changing is not supported");
} else { } else {
LOG_PERROR("Unable to query HW FPS changing"); LOG_PERROR("Unable to query HW FPS changing");
@@ -657,9 +657,9 @@ static int _device_open_io_method_mmap(device_s *dev) {
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_MMAP; req.memory = V4L2_MEMORY_MMAP;
LOG_DEBUG("Calling ioctl(VIDIOC_REQBUFS) for V4L2_MEMORY_MMAP ..."); LOG_DEBUG("Requesting %u device buffers for MMAP ...", req.count);
if (xioctl(RUN(fd), VIDIOC_REQBUFS, &req) < 0) { if (xioctl(RUN(fd), VIDIOC_REQBUFS, &req) < 0) {
LOG_PERROR("Device '%s' doesn't support V4L2_MEMORY_MMAP", dev->path); LOG_PERROR("Device '%s' doesn't support MMAP method", dev->path);
return -1; return -1;
} }
@@ -679,7 +679,7 @@ static int _device_open_io_method_mmap(device_s *dev) {
buf.memory = V4L2_MEMORY_MMAP; buf.memory = V4L2_MEMORY_MMAP;
buf.index = RUN(n_bufs); buf.index = RUN(n_bufs);
LOG_DEBUG("Calling ioctl(VIDIOC_QUERYBUF) for device buffer %u ...", RUN(n_bufs)); LOG_DEBUG("Calling ioctl(VIDIOC_QUERYBUF) for device buffer index=%u ...", RUN(n_bufs));
if (xioctl(RUN(fd), VIDIOC_QUERYBUF, &buf) < 0) { if (xioctl(RUN(fd), VIDIOC_QUERYBUF, &buf) < 0) {
LOG_PERROR("Can't VIDIOC_QUERYBUF"); LOG_PERROR("Can't VIDIOC_QUERYBUF");
return -1; return -1;
@@ -691,7 +691,7 @@ static int _device_open_io_method_mmap(device_s *dev) {
A_MUTEX_INIT(&HW(grabbed_mutex)); A_MUTEX_INIT(&HW(grabbed_mutex));
LOG_DEBUG("Mapping device buffer %u ...", RUN(n_bufs)); LOG_DEBUG("Mapping device buffer index=%u ...", RUN(n_bufs));
if ((HW(raw.data) = mmap( if ((HW(raw.data) = mmap(
NULL, NULL,
buf.length, buf.length,
@@ -700,7 +700,7 @@ static int _device_open_io_method_mmap(device_s *dev) {
RUN(fd), RUN(fd),
buf.m.offset buf.m.offset
)) == MAP_FAILED) { )) == MAP_FAILED) {
LOG_PERROR("Can't map device buffer %u", RUN(n_bufs)); LOG_PERROR("Can't map device buffer index=%u", RUN(n_bufs));
return -1; return -1;
} }
HW(raw.allocated) = buf.length; HW(raw.allocated) = buf.length;
@@ -716,9 +716,9 @@ static int _device_open_io_method_userptr(device_s *dev) {
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE; req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.memory = V4L2_MEMORY_USERPTR; req.memory = V4L2_MEMORY_USERPTR;
LOG_DEBUG("Calling ioctl(VIDIOC_REQBUFS) for V4L2_MEMORY_USERPTR ..."); LOG_DEBUG("Requesting %u device buffers for USERPTR ...", req.count);
if (xioctl(RUN(fd), VIDIOC_REQBUFS, &req) < 0) { if (xioctl(RUN(fd), VIDIOC_REQBUFS, &req) < 0) {
LOG_PERROR("Device '%s' doesn't support V4L2_MEMORY_USERPTR", dev->path); LOG_PERROR("Device '%s' doesn't support USERPTR method", dev->path);
return -1; return -1;
} }
@@ -757,7 +757,7 @@ static int _device_open_queue_buffers(device_s *dev) {
buf.length = RUN(hw_bufs)[index].raw.allocated; buf.length = RUN(hw_bufs)[index].raw.allocated;
} }
LOG_DEBUG("Calling ioctl(VIDIOC_QBUF) for buffer %u ...", index); LOG_DEBUG("Calling ioctl(VIDIOC_QBUF) for buffer index=%u ...", index);
if (xioctl(RUN(fd), VIDIOC_QBUF, &buf) < 0) { if (xioctl(RUN(fd), VIDIOC_QBUF, &buf) < 0) {
LOG_PERROR("Can't VIDIOC_QBUF"); LOG_PERROR("Can't VIDIOC_QBUF");
return -1; return -1;
@@ -768,7 +768,7 @@ static int _device_open_queue_buffers(device_s *dev) {
static int _device_apply_resolution(device_s *dev, unsigned width, unsigned height) { static int _device_apply_resolution(device_s *dev, unsigned width, unsigned height) {
// Тут VIDEO_MIN_* не используются из-за странностей минимального разрешения при отсутствии сигнала // Тут VIDEO_MIN_* не используются из-за странностей минимального разрешения при отсутствии сигнала
// у некоторых устройств, например Auvidea B101 // у некоторых устройств, например TC358743
if ( if (
width == 0 || width > VIDEO_MAX_WIDTH width == 0 || width > VIDEO_MAX_WIDTH
|| height == 0 || height > VIDEO_MAX_HEIGHT || height == 0 || height > VIDEO_MAX_HEIGHT

View File

@@ -234,7 +234,7 @@ static bool _worker_run_job(worker_s *wr) {
# define ER(_next) job->enc->run->_next # define ER(_next) job->enc->run->_next
LOG_DEBUG("Worker %s compressing JPEG from buffer %u ...", wr->name, job->hw->buf.index); LOG_DEBUG("Worker %s compressing JPEG from buffer index=%u ...", wr->name, job->hw->buf.index);
assert(ER(type) != ENCODER_TYPE_UNKNOWN); assert(ER(type) != ENCODER_TYPE_UNKNOWN);
assert(src->used > 0); assert(src->used > 0);

View File

@@ -279,7 +279,7 @@ static int _omx_setup_input(omx_encoder_s *omx, const frame_s *frame) {
// FIXME: RGB24 не работает нормально, нижняя половина экрана зеленая. // FIXME: RGB24 не работает нормально, нижняя половина экрана зеленая.
// FIXME: Китайский EasyCap тоже не работает, мусор на экране. // FIXME: Китайский EasyCap тоже не работает, мусор на экране.
// Вероятно обе проблемы вызваны некорректной реализацией OMX на пае. // Вероятно обе проблемы вызваны некорректной реализацией OMX на пае.
default: assert(0 && "Unsupported pixelformat"); default: assert(0 && "Unsupported format");
} }
# undef IFMT # undef IFMT

View File

@@ -70,7 +70,7 @@ bool h264_encoder_is_prepared_for(h264_encoder_s *enc, const frame_s *frame, boo
} }
int h264_encoder_prepare(h264_encoder_s *enc, const frame_s *frame, bool dma) { int h264_encoder_prepare(h264_encoder_s *enc, const frame_s *frame, bool dma) {
LOG_INFO("H264: Configuring encoder: dma=%d ...", dma); LOG_INFO("H264: Configuring encoder: DMA=%d ...", dma);
_h264_encoder_cleanup(enc); _h264_encoder_cleanup(enc);
@@ -112,8 +112,8 @@ int h264_encoder_prepare(h264_encoder_s *enc, const frame_s *frame, bool dma) {
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY; fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_JPEG; // libcamera currently has no means to request the right colour space fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_JPEG; // libcamera currently has no means to request the right colour space
fmt.fmt.pix_mp.num_planes = 1; fmt.fmt.pix_mp.num_planes = 1;
LOG_DEBUG("H264: Configuring input format ..."); LOG_DEBUG("H264: Configuring INPUT format ...");
ENCODER_XIOCTL(VIDIOC_S_FMT, &fmt, "H264: Can't set input format"); ENCODER_XIOCTL(VIDIOC_S_FMT, &fmt, "H264: Can't set INPUT format");
} }
{ {
@@ -127,8 +127,8 @@ int h264_encoder_prepare(h264_encoder_s *enc, const frame_s *frame, bool dma) {
fmt.fmt.pix_mp.num_planes = 1; fmt.fmt.pix_mp.num_planes = 1;
fmt.fmt.pix_mp.plane_fmt[0].bytesperline = 0; fmt.fmt.pix_mp.plane_fmt[0].bytesperline = 0;
fmt.fmt.pix_mp.plane_fmt[0].sizeimage = 512 << 10; fmt.fmt.pix_mp.plane_fmt[0].sizeimage = 512 << 10;
LOG_DEBUG("H264: Configuring output format ..."); LOG_DEBUG("H264: Configuring OUTPUT format ...");
ENCODER_XIOCTL(VIDIOC_S_FMT, &fmt, "H264: Can't set output format"); ENCODER_XIOCTL(VIDIOC_S_FMT, &fmt, "H264: Can't set OUTPUT format");
} }
{ {
@@ -136,30 +136,31 @@ int h264_encoder_prepare(h264_encoder_s *enc, const frame_s *frame, bool dma) {
setfps.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; setfps.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
setfps.parm.output.timeperframe.numerator = 1; setfps.parm.output.timeperframe.numerator = 1;
setfps.parm.output.timeperframe.denominator = enc->fps; setfps.parm.output.timeperframe.denominator = enc->fps;
LOG_DEBUG("H264: Configuring input FPS ..."); LOG_DEBUG("H264: Configuring INPUT FPS ...");
ENCODER_XIOCTL(VIDIOC_S_PARM, &setfps, "H264: Can't set input FPS"); ENCODER_XIOCTL(VIDIOC_S_PARM, &setfps, "H264: Can't set INPUT FPS");
} }
if (_h264_encoder_init_buffers(enc, (dma ? "input-dma" : "input"), V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE, if (_h264_encoder_init_buffers(enc, (dma ? "INPUT-DMA" : "INPUT"), V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE,
&enc->input_bufs, &enc->n_input_bufs, dma) < 0) { &enc->input_bufs, &enc->n_input_bufs, dma) < 0) {
goto error; goto error;
} }
if (_h264_encoder_init_buffers(enc, "output", V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE, if (_h264_encoder_init_buffers(enc, "OUTPUT", V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE,
&enc->output_bufs, &enc->n_output_bufs, false) < 0) { &enc->output_bufs, &enc->n_output_bufs, false) < 0) {
goto error; goto error;
} }
{ {
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
LOG_DEBUG("H264: Starting input ..."); LOG_DEBUG("H264: Starting INPUT ...");
ENCODER_XIOCTL(VIDIOC_STREAMON, &type, "H264: Can't start input"); ENCODER_XIOCTL(VIDIOC_STREAMON, &type, "H264: Can't start INPUT");
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
LOG_DEBUG("H264: Starting output ..."); LOG_DEBUG("H264: Starting OUTPUT ...");
ENCODER_XIOCTL(VIDIOC_STREAMON, &type, "H264: Can't start output"); ENCODER_XIOCTL(VIDIOC_STREAMON, &type, "H264: Can't start OUTPUT");
} }
enc->ready = true; enc->ready = true;
LOG_DEBUG("H264: Encoder state: *** READY ***");
return 0; return 0;
error: error:
@@ -200,10 +201,10 @@ static int _h264_encoder_init_buffers(
buf.length = 1; buf.length = 1;
buf.m.planes = &plane; buf.m.planes = &plane;
LOG_DEBUG("H264: Querying %s buffer %u ...", name, *n_bufs_ptr); LOG_DEBUG("H264: Querying %s buffer index=%u ...", name, *n_bufs_ptr);
ENCODER_XIOCTL(VIDIOC_QUERYBUF, &buf, "H264: Can't query %s buffer %u", name, *n_bufs_ptr); ENCODER_XIOCTL(VIDIOC_QUERYBUF, &buf, "H264: Can't query %s buffer index=%u", name, *n_bufs_ptr);
LOG_DEBUG("H264: Mapping %s buffer %u ...", name, *n_bufs_ptr); LOG_DEBUG("H264: Mapping %s buffer index=%u ...", name, *n_bufs_ptr);
if (((*bufs_ptr)[*n_bufs_ptr].data = mmap( if (((*bufs_ptr)[*n_bufs_ptr].data = mmap(
NULL, NULL,
plane.length, plane.length,
@@ -212,13 +213,13 @@ static int _h264_encoder_init_buffers(
enc->fd, enc->fd,
plane.m.mem_offset plane.m.mem_offset
)) == MAP_FAILED) { )) == MAP_FAILED) {
LOG_PERROR("H264: Can't map %s buffer %u", name, *n_bufs_ptr); LOG_PERROR("H264: Can't map %s buffer index=%u", name, *n_bufs_ptr);
goto error; goto error;
} }
(*bufs_ptr)[*n_bufs_ptr].allocated = plane.length; (*bufs_ptr)[*n_bufs_ptr].allocated = plane.length;
LOG_DEBUG("H264: Queuing %s buffer %u ...", name, *n_bufs_ptr); LOG_DEBUG("H264: Queuing %s buffer index=%u ...", name, *n_bufs_ptr);
ENCODER_XIOCTL(VIDIOC_QBUF, &buf, "H264: Can't queue %s buffer %u", name, *n_bufs_ptr); ENCODER_XIOCTL(VIDIOC_QBUF, &buf, "H264: Can't queue %s buffer index=%u", name, *n_bufs_ptr);
} }
} }
@@ -237,18 +238,18 @@ static void _h264_encoder_cleanup(h264_encoder_s *enc) {
} \ } \
} }
STOP_STREAM("output", V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE); STOP_STREAM("OUTPUT", V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
STOP_STREAM("input", V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE); STOP_STREAM("INPUT", V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE);
# undef STOP_STREAM # undef STOP_STREAM
} }
# define DESTROY_BUFFERS(_target) { \ # define DESTROY_BUFFERS(_name, _target) { \
if (enc->_target##_bufs) { \ if (enc->_target##_bufs) { \
for (unsigned index = 0; index < enc->n_##_target##_bufs; ++index) { \ for (unsigned index = 0; index < enc->n_##_target##_bufs; ++index) { \
if (enc->_target##_bufs[index].allocated > 0 && enc->_target##_bufs[index].data != MAP_FAILED) { \ if (enc->_target##_bufs[index].allocated > 0 && enc->_target##_bufs[index].data != MAP_FAILED) { \
if (munmap(enc->_target##_bufs[index].data, enc->_target##_bufs[index].allocated) < 0) { \ if (munmap(enc->_target##_bufs[index].data, enc->_target##_bufs[index].allocated) < 0) { \
LOG_PERROR("H264: Can't unmap %s buffer %u", #_target, index); \ LOG_PERROR("H264: Can't unmap %s buffer index=%u", #_name, index); \
} \ } \
} \ } \
} \ } \
@@ -258,8 +259,8 @@ static void _h264_encoder_cleanup(h264_encoder_s *enc) {
enc->n_##_target##_bufs = 0; \ enc->n_##_target##_bufs = 0; \
} }
DESTROY_BUFFERS(output); DESTROY_BUFFERS("OUTPUT", output);
DESTROY_BUFFERS(input); DESTROY_BUFFERS("INPUT", input);
# undef DESTROY_BUFFERS # undef DESTROY_BUFFERS
@@ -272,6 +273,8 @@ static void _h264_encoder_cleanup(h264_encoder_s *enc) {
enc->last_online = -1; enc->last_online = -1;
enc->ready = false; enc->ready = false;
LOG_DEBUG("H264: Encoder state: ~~~ NOT READY ~~~");
} }
int h264_encoder_compress(h264_encoder_s *enc, const frame_s *src, int src_dma_fd, frame_s *dest, bool force_key) { int h264_encoder_compress(h264_encoder_s *enc, const frame_s *src, int src_dma_fd, frame_s *dest, bool force_key) {
@@ -334,18 +337,18 @@ static int _h264_encoder_compress_raw(
input_buf.memory = V4L2_MEMORY_DMABUF; input_buf.memory = V4L2_MEMORY_DMABUF;
input_buf.field = V4L2_FIELD_NONE; input_buf.field = V4L2_FIELD_NONE;
input_plane.m.fd = src_dma_fd; input_plane.m.fd = src_dma_fd;
LOG_DEBUG("H264: Using input-dma buffer %u", input_buf.index); LOG_DEBUG("H264: Using INPUT-DMA buffer index=%u", input_buf.index);
} else { } else {
assert(src_dma_fd < 0); assert(src_dma_fd < 0);
input_buf.memory = V4L2_MEMORY_MMAP; input_buf.memory = V4L2_MEMORY_MMAP;
LOG_DEBUG("H264: Grabbing input buffer ..."); LOG_DEBUG("H264: Grabbing INPUT buffer ...");
ENCODER_XIOCTL(VIDIOC_DQBUF, &input_buf, "H264: Can't grab input buffer"); ENCODER_XIOCTL(VIDIOC_DQBUF, &input_buf, "H264: Can't grab INPUT buffer");
if (input_buf.index >= enc->n_input_bufs) { if (input_buf.index >= enc->n_input_bufs) {
LOG_ERROR("H264: V4L2 error: grabbed invalid input buffer: index=%u, n_bufs=%u", LOG_ERROR("H264: V4L2 error: grabbed invalid INPUT buffer: index=%u, n_bufs=%u",
input_buf.index, enc->n_input_bufs); input_buf.index, enc->n_input_bufs);
goto error; goto error;
} }
LOG_DEBUG("H264: Grabbed input buffer %u", input_buf.index); LOG_DEBUG("H264: Grabbed INPUT buffer index=%u", input_buf.index);
} }
uint64_t now = get_now_monotonic_u64(); uint64_t now = get_now_monotonic_u64();
@@ -357,7 +360,7 @@ static int _h264_encoder_compress_raw(
memcpy(enc->input_bufs[input_buf.index].data, src->data, src->used); memcpy(enc->input_bufs[input_buf.index].data, src->data, src->used);
} }
const char *input_name = (enc->dma ? "input-dma" : "input"); const char *input_name = (enc->dma ? "INPUT-DMA" : "INPUT");
LOG_DEBUG("H264: Sending %s buffer ...", input_name); LOG_DEBUG("H264: Sending %s buffer ...", input_name);
ENCODER_XIOCTL(VIDIOC_QBUF, &input_buf, "H264: Can't send %s buffer", input_name); ENCODER_XIOCTL(VIDIOC_QBUF, &input_buf, "H264: Can't send %s buffer", input_name);
@@ -373,8 +376,8 @@ static int _h264_encoder_compress_raw(
if (enc_poll.revents & POLLIN) { if (enc_poll.revents & POLLIN) {
if (!input_released) { if (!input_released) {
LOG_DEBUG("H264: Releasing %s buffer %u ...", input_name, input_buf.index); LOG_DEBUG("H264: Releasing %s buffer index=%u ...", input_name, input_buf.index);
ENCODER_XIOCTL(VIDIOC_DQBUF, &input_buf, "H264: Can't release %s buffer %u", ENCODER_XIOCTL(VIDIOC_DQBUF, &input_buf, "H264: Can't release %s buffer index=%u",
input_name, input_buf.index); input_name, input_buf.index);
input_released = true; input_released = true;
} }
@@ -385,14 +388,14 @@ static int _h264_encoder_compress_raw(
output_buf.memory = V4L2_MEMORY_MMAP; output_buf.memory = V4L2_MEMORY_MMAP;
output_buf.length = 1; output_buf.length = 1;
output_buf.m.planes = &output_plane; output_buf.m.planes = &output_plane;
LOG_DEBUG("H264: Fetching output buffer ..."); LOG_DEBUG("H264: Fetching OUTPUT buffer ...");
ENCODER_XIOCTL(VIDIOC_DQBUF, &output_buf, "H264: Can't fetch output buffer"); ENCODER_XIOCTL(VIDIOC_DQBUF, &output_buf, "H264: Can't fetch OUTPUT buffer");
frame_set_data(dest, enc->output_bufs[output_buf.index].data, output_plane.bytesused); frame_set_data(dest, enc->output_bufs[output_buf.index].data, output_plane.bytesused);
dest->key = output_buf.flags & V4L2_BUF_FLAG_KEYFRAME; dest->key = output_buf.flags & V4L2_BUF_FLAG_KEYFRAME;
LOG_DEBUG("H264: Releasing output buffer %u ...", output_buf.index); LOG_DEBUG("H264: Releasing OUTPUT buffer index=%u ...", output_buf.index);
ENCODER_XIOCTL(VIDIOC_QBUF, &output_buf, "H264: Can't release output buffer %u", output_buf.index); ENCODER_XIOCTL(VIDIOC_QBUF, &output_buf, "H264: Can't release OUTPUT buffer index=%u", output_buf.index);
break; break;
} }
} }

View File

@@ -601,7 +601,7 @@ static void _help(FILE *fp, device_s *dev, encoder_s *enc, stream_s *stream, ser
SAY(" -z|--min-frame-size <N> ───────────── Drop frames smaller then this limit. Useful if the device"); SAY(" -z|--min-frame-size <N> ───────────── Drop frames smaller then this limit. Useful if the device");
SAY(" produces small-sized garbage frames. Default: %zu bytes.\n", dev->min_frame_size); SAY(" produces small-sized garbage frames. Default: %zu bytes.\n", dev->min_frame_size);
SAY(" -n|--persistent ───────────────────── Don't re-initialize device on timeout. Default: disabled.\n"); SAY(" -n|--persistent ───────────────────── Don't re-initialize device on timeout. Default: disabled.\n");
SAY(" -t|--dv-timings ───────────────────── Enable DV timings querying and events processing"); SAY(" -t|--dv-timings ───────────────────── Enable DV-timings querying and events processing");
SAY(" to automatic resolution change. Default: disabled.\n"); SAY(" to automatic resolution change. Default: disabled.\n");
SAY(" -b|--buffers <N> ──────────────────── The number of buffers to receive data from the device."); SAY(" -b|--buffers <N> ──────────────────── The number of buffers to receive data from the device.");
SAY(" Each buffer may processed using an independent thread."); SAY(" Each buffer may processed using an independent thread.");