mirror of
https://github.com/pikvm/ustreamer.git
synced 2026-02-19 08:16:31 +00:00
Compare commits
9 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
33fdf9bf43 | ||
|
|
6bd4ef59c0 | ||
|
|
79987da1bf | ||
|
|
05e5db09e4 | ||
|
|
55e432a529 | ||
|
|
4732c85ec4 | ||
|
|
0ce7f28754 | ||
|
|
a2641dfcb6 | ||
|
|
ec33425c05 |
@@ -1,7 +1,7 @@
|
||||
[bumpversion]
|
||||
commit = True
|
||||
tag = True
|
||||
current_version = 5.43
|
||||
current_version = 5.45
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)
|
||||
serialize =
|
||||
{major}.{minor}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
.\" Manpage for ustreamer-dump.
|
||||
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
|
||||
.TH USTREAMER-DUMP 1 "version 5.43" "January 2021"
|
||||
.TH USTREAMER-DUMP 1 "version 5.45" "January 2021"
|
||||
|
||||
.SH NAME
|
||||
ustreamer-dump \- Dump uStreamer's memory sink to file
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
.\" Manpage for ustreamer.
|
||||
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
|
||||
.TH USTREAMER 1 "version 5.43" "November 2020"
|
||||
.TH USTREAMER 1 "version 5.45" "November 2020"
|
||||
|
||||
.SH NAME
|
||||
ustreamer \- stream MJPEG video from any V4L2 device to the network
|
||||
@@ -248,7 +248,7 @@ Timeout for lock. Default: 1.
|
||||
H264 bitrate in Kbps. Default: 5000.
|
||||
.TP
|
||||
.BR \-\-h264\-gop\ \fIN
|
||||
Intarval between keyframes. Default: 30.
|
||||
Interval between keyframes. Default: 30.
|
||||
.TP
|
||||
.BR \-\-h264\-m2m\-device\ \fI/dev/path
|
||||
Path to V4L2 mem-to-mem encoder device. Default: auto-select.
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
|
||||
|
||||
pkgname=ustreamer
|
||||
pkgver=5.43
|
||||
pkgver=5.45
|
||||
pkgrel=1
|
||||
pkgdesc="Lightweight and fast MJPEG-HTTP streamer"
|
||||
url="https://github.com/pikvm/ustreamer"
|
||||
|
||||
@@ -6,7 +6,7 @@
|
||||
include $(TOPDIR)/rules.mk
|
||||
|
||||
PKG_NAME:=ustreamer
|
||||
PKG_VERSION:=5.43
|
||||
PKG_VERSION:=5.45
|
||||
PKG_RELEASE:=1
|
||||
PKG_MAINTAINER:=Maxim Devaev <mdevaev@gmail.com>
|
||||
|
||||
|
||||
@@ -17,7 +17,7 @@ def _find_sources(suffix: str) -> list[str]:
|
||||
if __name__ == "__main__":
|
||||
setup(
|
||||
name="ustreamer",
|
||||
version="5.43",
|
||||
version="5.45",
|
||||
description="uStreamer tools",
|
||||
author="Maxim Devaev",
|
||||
author_email="mdevaev@gmail.com",
|
||||
|
||||
@@ -23,7 +23,7 @@
|
||||
#pragma once
|
||||
|
||||
#define US_VERSION_MAJOR 5
|
||||
#define US_VERSION_MINOR 43
|
||||
#define US_VERSION_MINOR 45
|
||||
|
||||
#define US_MAKE_VERSION2(_major, _minor) #_major "." #_minor
|
||||
#define US_MAKE_VERSION1(_major, _minor) US_MAKE_VERSION2(_major, _minor)
|
||||
|
||||
@@ -75,6 +75,7 @@ unsigned us_frame_get_padding(const us_frame_s *frame) {
|
||||
case V4L2_PIX_FMT_YUYV:
|
||||
case V4L2_PIX_FMT_UYVY:
|
||||
case V4L2_PIX_FMT_RGB565: bytes_per_pixel = 2; break;
|
||||
case V4L2_PIX_FMT_BGR24:
|
||||
case V4L2_PIX_FMT_RGB24: bytes_per_pixel = 3; break;
|
||||
// case V4L2_PIX_FMT_H264:
|
||||
case V4L2_PIX_FMT_MJPEG:
|
||||
|
||||
@@ -41,6 +41,7 @@ static const struct {
|
||||
{"UYVY", V4L2_PIX_FMT_UYVY},
|
||||
{"RGB565", V4L2_PIX_FMT_RGB565},
|
||||
{"RGB24", V4L2_PIX_FMT_RGB24},
|
||||
{"BGR24", V4L2_PIX_FMT_BGR24},
|
||||
{"MJPEG", V4L2_PIX_FMT_MJPEG},
|
||||
{"JPEG", V4L2_PIX_FMT_JPEG},
|
||||
};
|
||||
@@ -54,6 +55,7 @@ static const struct {
|
||||
};
|
||||
|
||||
|
||||
static void _v4l2_buffer_copy(const struct v4l2_buffer *src, struct v4l2_buffer *dest);
|
||||
static bool _device_is_buffer_valid(us_device_s *dev, const struct v4l2_buffer *buf, const uint8_t *data);
|
||||
static int _device_open_check_cap(us_device_s *dev);
|
||||
static int _device_open_dv_timings(us_device_s *dev);
|
||||
@@ -83,6 +85,7 @@ static const char *_io_method_to_string_supported(enum v4l2_memory io_method);
|
||||
|
||||
#define _RUN(x_next) dev->run->x_next
|
||||
#define _D_XIOCTL(...) us_xioctl(_RUN(fd), __VA_ARGS__)
|
||||
#define _D_IS_MPLANE (_RUN(capture_type) == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
|
||||
|
||||
|
||||
us_device_s *us_device_init(void) {
|
||||
@@ -195,6 +198,10 @@ void us_device_close(us_device_s *dev) {
|
||||
US_DELETE(HW(raw.data), free);
|
||||
}
|
||||
|
||||
if (_D_IS_MPLANE) {
|
||||
free(HW(buf.m.planes));
|
||||
}
|
||||
|
||||
# undef HW
|
||||
}
|
||||
_RUN(n_bufs) = 0;
|
||||
@@ -218,7 +225,7 @@ int us_device_export_to_dma(us_device_s *dev) {
|
||||
|
||||
for (unsigned index = 0; index < _RUN(n_bufs); ++index) {
|
||||
struct v4l2_exportbuffer exp = {0};
|
||||
exp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
exp.type = _RUN(capture_type);
|
||||
exp.index = index;
|
||||
|
||||
US_LOG_DEBUG("Exporting device buffer=%u to DMA ...", index);
|
||||
@@ -245,7 +252,7 @@ int us_device_export_to_dma(us_device_s *dev) {
|
||||
|
||||
int us_device_switch_capturing(us_device_s *dev, bool enable) {
|
||||
if (enable != _RUN(capturing)) {
|
||||
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
enum v4l2_buf_type type = _RUN(capture_type);
|
||||
|
||||
US_LOG_DEBUG("%s device capturing ...", (enable ? "Starting" : "Stopping"));
|
||||
if (_D_XIOCTL((enable ? VIDIOC_STREAMON : VIDIOC_STREAMOFF), &type) < 0) {
|
||||
@@ -311,6 +318,12 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
|
||||
*hw = NULL;
|
||||
|
||||
struct v4l2_buffer buf = {0};
|
||||
struct v4l2_plane buf_planes[VIDEO_MAX_PLANES] = {0};
|
||||
if (_D_IS_MPLANE) {
|
||||
// Just for _v4l2_buffer_copy(), buf.length is not needed here
|
||||
buf.m.planes = buf_planes;
|
||||
}
|
||||
|
||||
bool buf_got = false;
|
||||
unsigned skipped = 0;
|
||||
bool broken = false;
|
||||
@@ -319,8 +332,14 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
|
||||
|
||||
do {
|
||||
struct v4l2_buffer new = {0};
|
||||
new.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
struct v4l2_plane new_planes[VIDEO_MAX_PLANES] = {0};
|
||||
new.type = _RUN(capture_type);
|
||||
new.memory = dev->io_method;
|
||||
if (_D_IS_MPLANE) {
|
||||
new.length = VIDEO_MAX_PLANES;
|
||||
new.m.planes = new_planes;
|
||||
}
|
||||
|
||||
const bool new_got = (_D_XIOCTL(VIDIOC_DQBUF, &new) >= 0);
|
||||
|
||||
if (new_got) {
|
||||
@@ -338,6 +357,10 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
|
||||
}
|
||||
GRABBED(new) = true;
|
||||
|
||||
if (_D_IS_MPLANE) {
|
||||
new.bytesused = new.m.planes[0].bytesused;
|
||||
}
|
||||
|
||||
broken = !_device_is_buffer_valid(dev, &new, FRAME_DATA(new));
|
||||
if (broken) {
|
||||
US_LOG_DEBUG("Releasing device buffer=%u (broken frame) ...", new.index);
|
||||
@@ -362,7 +385,7 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
|
||||
# undef GRABBED
|
||||
# undef FRAME_DATA
|
||||
|
||||
memcpy(&buf, &new, sizeof(struct v4l2_buffer));
|
||||
_v4l2_buffer_copy(&new, &buf);
|
||||
buf_got = true;
|
||||
|
||||
} else {
|
||||
@@ -386,8 +409,8 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
|
||||
HW(raw.format) = _RUN(format);
|
||||
HW(raw.stride) = _RUN(stride);
|
||||
HW(raw.online) = true;
|
||||
memcpy(&HW(buf), &buf, sizeof(struct v4l2_buffer));
|
||||
HW(raw.grab_ts)= (long double)((buf.timestamp.tv_sec * (uint64_t)1000) + (buf.timestamp.tv_usec / 1000)) / 1000;
|
||||
_v4l2_buffer_copy(&buf, &HW(buf));
|
||||
HW(raw.grab_ts) = (long double)((buf.timestamp.tv_sec * (uint64_t)1000) + (buf.timestamp.tv_usec / 1000)) / 1000;
|
||||
US_LOG_DEBUG("Grabbed new frame: buffer=%u, bytesused=%u, grab_ts=%.3Lf, latency=%.3Lf, skipped=%u",
|
||||
buf.index, buf.bytesused, HW(raw.grab_ts), us_get_now_monotonic() - HW(raw.grab_ts), skipped);
|
||||
# undef HW
|
||||
@@ -427,6 +450,16 @@ int us_device_consume_event(us_device_s *dev) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void _v4l2_buffer_copy(const struct v4l2_buffer *src, struct v4l2_buffer *dest) {
|
||||
struct v4l2_plane *dest_planes = dest->m.planes;
|
||||
memcpy(dest, src, sizeof(struct v4l2_buffer));
|
||||
if (src->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
|
||||
assert(dest_planes);
|
||||
dest->m.planes = dest_planes;
|
||||
memcpy(dest->m.planes, src->m.planes, sizeof(struct v4l2_plane) * VIDEO_MAX_PLANES);
|
||||
}
|
||||
}
|
||||
|
||||
bool _device_is_buffer_valid(us_device_s *dev, const struct v4l2_buffer *buf, const uint8_t *data) {
|
||||
// Workaround for broken, corrupted frames:
|
||||
// Under low light conditions corrupted frames may get captured.
|
||||
@@ -475,7 +508,13 @@ static int _device_open_check_cap(us_device_s *dev) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
|
||||
if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
|
||||
_RUN(capture_type) = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
US_LOG_INFO("Using capture type: single-planar");
|
||||
} else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
|
||||
_RUN(capture_type) = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
|
||||
US_LOG_INFO("Using capture type: multi-planar");
|
||||
} else {
|
||||
US_LOG_ERROR("Video capture is not supported by device");
|
||||
return -1;
|
||||
}
|
||||
@@ -485,11 +524,13 @@ static int _device_open_check_cap(us_device_s *dev) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
int input = dev->input; // Needs a pointer to int for ioctl()
|
||||
US_LOG_INFO("Using input channel: %d", input);
|
||||
if (_D_XIOCTL(VIDIOC_S_INPUT, &input) < 0) {
|
||||
US_LOG_ERROR("Can't set input channel");
|
||||
return -1;
|
||||
if (!_D_IS_MPLANE) {
|
||||
int input = dev->input; // Needs a pointer to int for ioctl()
|
||||
US_LOG_INFO("Using input channel: %d", input);
|
||||
if (_D_XIOCTL(VIDIOC_S_INPUT, &input) < 0) {
|
||||
US_LOG_ERROR("Can't set input channel");
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
if (dev->standard != V4L2_STD_UNKNOWN) {
|
||||
@@ -567,16 +608,25 @@ static int _device_apply_dv_timings(us_device_s *dev) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int _device_open_format(us_device_s *dev, bool first) {
|
||||
static int _device_open_format(us_device_s *dev, bool first) { // FIXME
|
||||
const unsigned stride = us_align_size(_RUN(width), 32) << 1;
|
||||
|
||||
struct v4l2_format fmt = {0};
|
||||
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
fmt.fmt.pix.width = _RUN(width);
|
||||
fmt.fmt.pix.height = _RUN(height);
|
||||
fmt.fmt.pix.pixelformat = dev->format;
|
||||
fmt.fmt.pix.field = V4L2_FIELD_ANY;
|
||||
fmt.fmt.pix.bytesperline = stride;
|
||||
fmt.type = _RUN(capture_type);
|
||||
if (_D_IS_MPLANE) {
|
||||
fmt.fmt.pix_mp.width = _RUN(width);
|
||||
fmt.fmt.pix_mp.height = _RUN(height);
|
||||
fmt.fmt.pix_mp.pixelformat = dev->format;
|
||||
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
|
||||
fmt.fmt.pix_mp.flags = 0;
|
||||
fmt.fmt.pix_mp.num_planes = 1;
|
||||
} else {
|
||||
fmt.fmt.pix.width = _RUN(width);
|
||||
fmt.fmt.pix.height = _RUN(height);
|
||||
fmt.fmt.pix.pixelformat = dev->format;
|
||||
fmt.fmt.pix.field = V4L2_FIELD_ANY;
|
||||
fmt.fmt.pix.bytesperline = stride;
|
||||
}
|
||||
|
||||
// Set format
|
||||
US_LOG_DEBUG("Probing device format=%s, stride=%u, resolution=%ux%u ...",
|
||||
@@ -586,13 +636,21 @@ static int _device_open_format(us_device_s *dev, bool first) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (fmt.type != _RUN(capture_type)) {
|
||||
US_LOG_ERROR("Capture format mismatch, please report to the developer");
|
||||
return -1;
|
||||
}
|
||||
|
||||
# define FMT(x_next) (_D_IS_MPLANE ? fmt.fmt.pix_mp.x_next : fmt.fmt.pix.x_next)
|
||||
# define FMTS(x_next) (_D_IS_MPLANE ? fmt.fmt.pix_mp.plane_fmt[0].x_next : fmt.fmt.pix.x_next)
|
||||
|
||||
// Check resolution
|
||||
bool retry = false;
|
||||
if (fmt.fmt.pix.width != _RUN(width) || fmt.fmt.pix.height != _RUN(height)) {
|
||||
if (FMT(width) != _RUN(width) || FMT(height) != _RUN(height)) {
|
||||
US_LOG_ERROR("Requested resolution=%ux%u is unavailable", _RUN(width), _RUN(height));
|
||||
retry = true;
|
||||
}
|
||||
if (_device_apply_resolution(dev, fmt.fmt.pix.width, fmt.fmt.pix.height) < 0) {
|
||||
if (_device_apply_resolution(dev, FMT(width), FMT(height)) < 0) {
|
||||
return -1;
|
||||
}
|
||||
if (first && retry) {
|
||||
@@ -601,27 +659,32 @@ static int _device_open_format(us_device_s *dev, bool first) {
|
||||
US_LOG_INFO("Using resolution: %ux%u", _RUN(width), _RUN(height));
|
||||
|
||||
// Check format
|
||||
if (fmt.fmt.pix.pixelformat != dev->format) {
|
||||
if (FMT(pixelformat) != dev->format) {
|
||||
US_LOG_ERROR("Could not obtain the requested format=%s; driver gave us %s",
|
||||
_format_to_string_supported(dev->format),
|
||||
_format_to_string_supported(fmt.fmt.pix.pixelformat));
|
||||
_format_to_string_supported(FMT(pixelformat)));
|
||||
|
||||
char *format_str;
|
||||
if ((format_str = (char *)_format_to_string_nullable(fmt.fmt.pix.pixelformat)) != NULL) {
|
||||
if ((format_str = (char *)_format_to_string_nullable(FMT(pixelformat))) != NULL) {
|
||||
US_LOG_INFO("Falling back to format=%s", format_str);
|
||||
} else {
|
||||
char fourcc_str[8];
|
||||
US_LOG_ERROR("Unsupported format=%s (fourcc)",
|
||||
us_fourcc_to_string(fmt.fmt.pix.pixelformat, fourcc_str, 8));
|
||||
us_fourcc_to_string(FMT(pixelformat), fourcc_str, 8));
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
_RUN(format) = fmt.fmt.pix.pixelformat;
|
||||
_RUN(format) = FMT(pixelformat);
|
||||
US_LOG_INFO("Using format: %s", _format_to_string_supported(_RUN(format)));
|
||||
|
||||
_RUN(stride) = fmt.fmt.pix.bytesperline;
|
||||
_RUN(raw_size) = fmt.fmt.pix.sizeimage; // Only for userptr
|
||||
|
||||
_RUN(stride) = FMTS(bytesperline);
|
||||
_RUN(raw_size) = FMTS(sizeimage); // Only for userptr
|
||||
|
||||
# undef FMTS
|
||||
# undef FMT
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
@@ -629,7 +692,7 @@ static void _device_open_hw_fps(us_device_s *dev) {
|
||||
_RUN(hw_fps) = 0;
|
||||
|
||||
struct v4l2_streamparm setfps = {0};
|
||||
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
setfps.type = _RUN(capture_type);
|
||||
|
||||
US_LOG_DEBUG("Querying HW FPS ...");
|
||||
if (_D_XIOCTL(VIDIOC_G_PARM, &setfps) < 0) {
|
||||
@@ -649,7 +712,7 @@ static void _device_open_hw_fps(us_device_s *dev) {
|
||||
# define SETFPS_TPF(x_next) setfps.parm.capture.timeperframe.x_next
|
||||
|
||||
US_MEMSET_ZERO(setfps);
|
||||
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
setfps.type = _RUN(capture_type);
|
||||
SETFPS_TPF(numerator) = 1;
|
||||
SETFPS_TPF(denominator) = (dev->desired_fps == 0 ? 255 : dev->desired_fps);
|
||||
|
||||
@@ -712,7 +775,7 @@ static int _device_open_io_method(us_device_s *dev) {
|
||||
static int _device_open_io_method_mmap(us_device_s *dev) {
|
||||
struct v4l2_requestbuffers req = {0};
|
||||
req.count = dev->n_bufs;
|
||||
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
req.type = _RUN(capture_type);
|
||||
req.memory = V4L2_MEMORY_MMAP;
|
||||
|
||||
US_LOG_DEBUG("Requesting %u device buffers for MMAP ...", req.count);
|
||||
@@ -733,9 +796,14 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
|
||||
US_CALLOC(_RUN(hw_bufs), req.count);
|
||||
for (_RUN(n_bufs) = 0; _RUN(n_bufs) < req.count; ++_RUN(n_bufs)) {
|
||||
struct v4l2_buffer buf = {0};
|
||||
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
|
||||
buf.type = _RUN(capture_type);
|
||||
buf.memory = V4L2_MEMORY_MMAP;
|
||||
buf.index = _RUN(n_bufs);
|
||||
if (_D_IS_MPLANE) {
|
||||
buf.m.planes = planes;
|
||||
buf.length = VIDEO_MAX_PLANES;
|
||||
}
|
||||
|
||||
US_LOG_DEBUG("Calling us_xioctl(VIDIOC_QUERYBUF) for device buffer=%u ...", _RUN(n_bufs));
|
||||
if (_D_XIOCTL(VIDIOC_QUERYBUF, &buf) < 0) {
|
||||
@@ -747,20 +815,28 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
|
||||
|
||||
HW(dma_fd) = -1;
|
||||
|
||||
const size_t buf_size = (_D_IS_MPLANE ? buf.m.planes[0].length : buf.length);
|
||||
const off_t buf_offset = (_D_IS_MPLANE ? buf.m.planes[0].m.mem_offset : buf.m.offset);
|
||||
|
||||
US_LOG_DEBUG("Mapping device buffer=%u ...", _RUN(n_bufs));
|
||||
if ((HW(raw.data) = mmap(
|
||||
NULL,
|
||||
buf.length,
|
||||
buf_size,
|
||||
PROT_READ | PROT_WRITE,
|
||||
MAP_SHARED,
|
||||
_RUN(fd),
|
||||
buf.m.offset
|
||||
buf_offset
|
||||
)) == MAP_FAILED) {
|
||||
US_LOG_PERROR("Can't map device buffer=%u", _RUN(n_bufs));
|
||||
return -1;
|
||||
}
|
||||
assert(HW(raw.data) != NULL);
|
||||
HW(raw.allocated) = buf.length;
|
||||
|
||||
HW(raw.allocated) = buf_size;
|
||||
|
||||
if (_D_IS_MPLANE) {
|
||||
US_CALLOC(HW(buf.m.planes), VIDEO_MAX_PLANES);
|
||||
}
|
||||
|
||||
# undef HW
|
||||
}
|
||||
@@ -770,7 +846,7 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
|
||||
static int _device_open_io_method_userptr(us_device_s *dev) {
|
||||
struct v4l2_requestbuffers req = {0};
|
||||
req.count = dev->n_bufs;
|
||||
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
req.type = _RUN(capture_type);
|
||||
req.memory = V4L2_MEMORY_USERPTR;
|
||||
|
||||
US_LOG_DEBUG("Requesting %u device buffers for USERPTR ...", req.count);
|
||||
@@ -798,6 +874,9 @@ static int _device_open_io_method_userptr(us_device_s *dev) {
|
||||
assert((HW(raw.data) = aligned_alloc(page_size, buf_size)) != NULL);
|
||||
memset(HW(raw.data), 0, buf_size);
|
||||
HW(raw.allocated) = buf_size;
|
||||
if (_D_IS_MPLANE) {
|
||||
US_CALLOC(HW(buf.m.planes), VIDEO_MAX_PLANES);
|
||||
}
|
||||
# undef HW
|
||||
}
|
||||
return 0;
|
||||
@@ -806,10 +885,18 @@ static int _device_open_io_method_userptr(us_device_s *dev) {
|
||||
static int _device_open_queue_buffers(us_device_s *dev) {
|
||||
for (unsigned index = 0; index < _RUN(n_bufs); ++index) {
|
||||
struct v4l2_buffer buf = {0};
|
||||
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
|
||||
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
|
||||
buf.type = _RUN(capture_type);
|
||||
buf.memory = dev->io_method;
|
||||
buf.index = index;
|
||||
if (_D_IS_MPLANE) {
|
||||
buf.m.planes = planes;
|
||||
buf.length = 1;
|
||||
}
|
||||
|
||||
if (dev->io_method == V4L2_MEMORY_USERPTR) {
|
||||
// I am not sure, may be this is incorrect for mplane device,
|
||||
// but i don't have one which supports V4L2_MEMORY_USERPTR
|
||||
buf.m.userptr = (unsigned long)_RUN(hw_bufs)[index].raw.data;
|
||||
buf.length = _RUN(hw_bufs)[index].raw.allocated;
|
||||
}
|
||||
|
||||
@@ -61,7 +61,7 @@
|
||||
#define US_STANDARDS_STR "PAL, NTSC, SECAM"
|
||||
|
||||
#define US_FORMAT_UNKNOWN -1
|
||||
#define US_FORMATS_STR "YUYV, UYVY, RGB565, RGB24, MJPEG, JPEG"
|
||||
#define US_FORMATS_STR "YUYV, UYVY, RGB565, RGB24, BGR24, MJPEG, JPEG"
|
||||
|
||||
#define US_IO_METHOD_UNKNOWN -1
|
||||
#define US_IO_METHODS_STR "MMAP, USERPTR"
|
||||
@@ -75,18 +75,19 @@ typedef struct {
|
||||
} us_hw_buffer_s;
|
||||
|
||||
typedef struct {
|
||||
int fd;
|
||||
unsigned width;
|
||||
unsigned height;
|
||||
unsigned format;
|
||||
unsigned stride;
|
||||
unsigned hw_fps;
|
||||
unsigned jpeg_quality;
|
||||
size_t raw_size;
|
||||
unsigned n_bufs;
|
||||
us_hw_buffer_s *hw_bufs;
|
||||
bool capturing;
|
||||
bool persistent_timeout_reported;
|
||||
int fd;
|
||||
unsigned width;
|
||||
unsigned height;
|
||||
unsigned format;
|
||||
unsigned stride;
|
||||
unsigned hw_fps;
|
||||
unsigned jpeg_quality;
|
||||
size_t raw_size;
|
||||
unsigned n_bufs;
|
||||
us_hw_buffer_s *hw_bufs;
|
||||
enum v4l2_buf_type capture_type;
|
||||
bool capturing;
|
||||
bool persistent_timeout_reported;
|
||||
} us_device_runtime_s;
|
||||
|
||||
typedef enum {
|
||||
@@ -132,9 +133,7 @@ typedef struct {
|
||||
size_t min_frame_size;
|
||||
bool persistent;
|
||||
unsigned timeout;
|
||||
|
||||
us_controls_s ctl;
|
||||
|
||||
us_device_runtime_s *run;
|
||||
} us_device_s;
|
||||
|
||||
|
||||
@@ -41,6 +41,7 @@ static void _jpeg_write_scanlines_yuyv(struct jpeg_compress_struct *jpeg, const
|
||||
static void _jpeg_write_scanlines_uyvy(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
|
||||
static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
|
||||
static void _jpeg_write_scanlines_rgb24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
|
||||
static void _jpeg_write_scanlines_bgr24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
|
||||
|
||||
static void _jpeg_init_destination(j_compress_ptr jpeg);
|
||||
static boolean _jpeg_empty_output_buffer(j_compress_ptr jpeg);
|
||||
@@ -63,7 +64,7 @@ void us_cpu_encoder_compress(const us_frame_s *src, us_frame_s *dest, unsigned q
|
||||
jpeg.image_width = src->width;
|
||||
jpeg.image_height = src->height;
|
||||
jpeg.input_components = 3;
|
||||
jpeg.in_color_space = JCS_RGB;
|
||||
jpeg.in_color_space = ((src->format == V4L2_PIX_FMT_YUYV || src->format == V4L2_PIX_FMT_UYVY) ? JCS_YCbCr : JCS_RGB);
|
||||
|
||||
jpeg_set_defaults(&jpeg);
|
||||
jpeg_set_quality(&jpeg, quality, TRUE);
|
||||
@@ -79,6 +80,7 @@ void us_cpu_encoder_compress(const us_frame_s *src, us_frame_s *dest, unsigned q
|
||||
WRITE_SCANLINES(V4L2_PIX_FMT_UYVY, _jpeg_write_scanlines_uyvy);
|
||||
WRITE_SCANLINES(V4L2_PIX_FMT_RGB565, _jpeg_write_scanlines_rgb565);
|
||||
WRITE_SCANLINES(V4L2_PIX_FMT_RGB24, _jpeg_write_scanlines_rgb24);
|
||||
WRITE_SCANLINES(V4L2_PIX_FMT_BGR24, _jpeg_write_scanlines_bgr24);
|
||||
default: assert(0 && "Unsupported input format for CPU encoder");
|
||||
}
|
||||
|
||||
@@ -106,39 +108,29 @@ static void _jpeg_set_dest_frame(j_compress_ptr jpeg, us_frame_s *frame) {
|
||||
frame->used = 0;
|
||||
}
|
||||
|
||||
#define YUV_R(_y, _, _v) (((_y) + (359 * (_v))) >> 8)
|
||||
#define YUV_G(_y, _u, _v) (((_y) - (88 * (_u)) - (183 * (_v))) >> 8)
|
||||
#define YUV_B(_y, _u, _) (((_y) + (454 * (_u))) >> 8)
|
||||
#define NORM_COMPONENT(_x) (((_x) > 255) ? 255 : (((_x) < 0) ? 0 : (_x)))
|
||||
|
||||
static void _jpeg_write_scanlines_yuyv(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
|
||||
uint8_t *line_buf;
|
||||
US_CALLOC(line_buf, frame->width * 3);
|
||||
|
||||
const unsigned padding = us_frame_get_padding(frame);
|
||||
const uint8_t *data = frame->data;
|
||||
unsigned z = 0;
|
||||
|
||||
while (jpeg->next_scanline < frame->height) {
|
||||
uint8_t *ptr = line_buf;
|
||||
|
||||
for (unsigned x = 0; x < frame->width; ++x) {
|
||||
const int y = (!z ? data[0] << 8 : data[2] << 8);
|
||||
const int u = data[1] - 128;
|
||||
const int v = data[3] - 128;
|
||||
// See also: https://www.kernel.org/doc/html/v4.8/media/uapi/v4l/pixfmt-yuyv.html
|
||||
const bool is_odd_pixel = x & 1;
|
||||
const uint8_t y = data[is_odd_pixel ? 2 : 0];
|
||||
const uint8_t u = data[1];
|
||||
const uint8_t v = data[3];
|
||||
|
||||
const int r = YUV_R(y, u, v);
|
||||
const int g = YUV_G(y, u, v);
|
||||
const int b = YUV_B(y, u, v);
|
||||
ptr[0] = y;
|
||||
ptr[1] = u;
|
||||
ptr[2] = v;
|
||||
ptr += 3;
|
||||
|
||||
*(ptr++) = NORM_COMPONENT(r);
|
||||
*(ptr++) = NORM_COMPONENT(g);
|
||||
*(ptr++) = NORM_COMPONENT(b);
|
||||
|
||||
if (z++) {
|
||||
z = 0;
|
||||
data += 4;
|
||||
}
|
||||
data += (is_odd_pixel ? 4: 0);
|
||||
}
|
||||
data += padding;
|
||||
|
||||
@@ -155,28 +147,23 @@ static void _jpeg_write_scanlines_uyvy(struct jpeg_compress_struct *jpeg, const
|
||||
|
||||
const unsigned padding = us_frame_get_padding(frame);
|
||||
const uint8_t *data = frame->data;
|
||||
unsigned z = 0;
|
||||
|
||||
while (jpeg->next_scanline < frame->height) {
|
||||
uint8_t *ptr = line_buf;
|
||||
|
||||
for (unsigned x = 0; x < frame->width; ++x) {
|
||||
const int y = (!z ? data[1] << 8 : data[3] << 8);
|
||||
const int u = data[0] - 128;
|
||||
const int v = data[2] - 128;
|
||||
// See also: https://www.kernel.org/doc/html/v4.8/media/uapi/v4l/pixfmt-uyvy.html
|
||||
const bool is_odd_pixel = x & 1;
|
||||
const uint8_t y = data[is_odd_pixel ? 3 : 1];
|
||||
const uint8_t u = data[0];
|
||||
const uint8_t v = data[2];
|
||||
|
||||
const int r = YUV_R(y, u, v);
|
||||
const int g = YUV_G(y, u, v);
|
||||
const int b = YUV_B(y, u, v);
|
||||
ptr[0] = y;
|
||||
ptr[1] = u;
|
||||
ptr[2] = v;
|
||||
ptr += 3;
|
||||
|
||||
*(ptr++) = NORM_COMPONENT(r);
|
||||
*(ptr++) = NORM_COMPONENT(g);
|
||||
*(ptr++) = NORM_COMPONENT(b);
|
||||
|
||||
if (z++) {
|
||||
z = 0;
|
||||
data += 4;
|
||||
}
|
||||
data += (is_odd_pixel ? 4 : 0);
|
||||
}
|
||||
data += padding;
|
||||
|
||||
@@ -187,11 +174,6 @@ static void _jpeg_write_scanlines_uyvy(struct jpeg_compress_struct *jpeg, const
|
||||
free(line_buf);
|
||||
}
|
||||
|
||||
#undef NORM_COMPONENT
|
||||
#undef YUV_B
|
||||
#undef YUV_G
|
||||
#undef YUV_R
|
||||
|
||||
static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
|
||||
uint8_t *line_buf;
|
||||
US_CALLOC(line_buf, frame->width * 3);
|
||||
@@ -205,9 +187,10 @@ static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, cons
|
||||
for (unsigned x = 0; x < frame->width; ++x) {
|
||||
const unsigned int two_byte = (data[1] << 8) + data[0];
|
||||
|
||||
*(ptr++) = data[1] & 248; // Red
|
||||
*(ptr++) = (uint8_t)((two_byte & 2016) >> 3); // Green
|
||||
*(ptr++) = (data[0] & 31) * 8; // Blue
|
||||
ptr[0] = data[1] & 248; // Red
|
||||
ptr[1] = (uint8_t)((two_byte & 2016) >> 3); // Green
|
||||
ptr[2] = (data[0] & 31) * 8; // Blue
|
||||
ptr += 3;
|
||||
|
||||
data += 2;
|
||||
}
|
||||
@@ -232,6 +215,33 @@ static void _jpeg_write_scanlines_rgb24(struct jpeg_compress_struct *jpeg, const
|
||||
}
|
||||
}
|
||||
|
||||
static void _jpeg_write_scanlines_bgr24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
|
||||
uint8_t *line_buf;
|
||||
US_CALLOC(line_buf, frame->width * 3);
|
||||
|
||||
const unsigned padding = us_frame_get_padding(frame);
|
||||
uint8_t *data = frame->data;
|
||||
|
||||
while (jpeg->next_scanline < frame->height) {
|
||||
uint8_t *ptr = line_buf;
|
||||
|
||||
// swap B and R values
|
||||
for (unsigned x = 0; x < frame->width * 3; x += 3) {
|
||||
ptr[0] = data[x + 2];
|
||||
ptr[1] = data[x + 1];
|
||||
ptr[2] = data[x];
|
||||
ptr += 3;
|
||||
}
|
||||
|
||||
JSAMPROW scanlines[1] = {line_buf};
|
||||
jpeg_write_scanlines(jpeg, scanlines, 1);
|
||||
|
||||
data += (frame->width * 3) + padding;
|
||||
}
|
||||
|
||||
free(line_buf);
|
||||
}
|
||||
|
||||
#define JPEG_OUTPUT_BUFFER_SIZE ((size_t)4096)
|
||||
|
||||
static void _jpeg_init_destination(j_compress_ptr jpeg) {
|
||||
|
||||
@@ -696,7 +696,7 @@ static void _help(FILE *fp, const us_device_s *dev, const us_encoder_s *enc, con
|
||||
ADD_SINK("RAW", "raw-")
|
||||
ADD_SINK("H264", "h264-")
|
||||
SAY(" --h264-bitrate <kbps> ───────── H264 bitrate in Kbps. Default: %u.\n", stream->h264_bitrate);
|
||||
SAY(" --h264-gop <N> ──────────────── Intarval between keyframes. Default: %u.\n", stream->h264_gop);
|
||||
SAY(" --h264-gop <N> ──────────────── Interval between keyframes. Default: %u.\n", stream->h264_gop);
|
||||
SAY(" --h264-m2m-device </dev/path> ─ Path to V4L2 M2M encoder device. Default: auto select.\n");
|
||||
# undef ADD_SINK
|
||||
# ifdef WITH_GPIO
|
||||
|
||||
@@ -127,7 +127,6 @@ void us_stream_loop(us_stream_s *stream) {
|
||||
unsigned slc = 0;
|
||||
for (; slc < 10 && !atomic_load(&_RUN(stop)) && !us_stream_has_clients(stream); ++slc) {
|
||||
usleep(100000);
|
||||
++slc;
|
||||
}
|
||||
h264_force_key = (slc == 10);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user