Compare commits

..

9 Commits
v6.7 ... v6.8

Author SHA1 Message Date
Maxim Devaev
d43014346d Bump version: 6.7 → 6.8 2024-03-26 20:23:16 +02:00
Maxim Devaev
bcd447963c build fix 2024-03-26 20:22:10 +02:00
Maxim Devaev
eec6cfd0d4 lint fix 2024-03-26 20:10:06 +02:00
Maxim Devaev
f177300e69 ustreamer/drm: fixed assertion 2024-03-26 18:59:33 +02:00
Maxim Devaev
7015a26a63 Userspace workaround for the wrong TC358743 RGB bytes ordering
- https://github.com/raspberrypi/linux/issues/6068
2024-03-26 18:35:13 +02:00
Maxim Devaev
290282b6b6 drm: fixed big endian case for rgb/bgr 2024-03-26 18:05:51 +02:00
Maxim Devaev
a339ff5d06 v4p mode in ustreamer 2024-03-26 17:45:53 +02:00
Maxim Devaev
8d4e9a6ca0 renamed us_hw_buffer_s to us_capture_hwbuf_s 2024-03-26 01:54:01 +02:00
Maxim Devaev
f0f5fcd67f renamed us_device* to us_capture* 2024-03-26 01:25:04 +02:00
21 changed files with 570 additions and 385 deletions

View File

@@ -1,7 +1,7 @@
[bumpversion]
commit = True
tag = True
current_version = 6.7
current_version = 6.8
parse = (?P<major>\d+)\.(?P<minor>\d+)
serialize =
{major}.{minor}

View File

@@ -1,6 +1,6 @@
.\" Manpage for ustreamer-dump.
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
.TH USTREAMER-DUMP 1 "version 6.7" "January 2021"
.TH USTREAMER-DUMP 1 "version 6.8" "January 2021"
.SH NAME
ustreamer-dump \- Dump uStreamer's memory sink to file

View File

@@ -1,6 +1,6 @@
.\" Manpage for ustreamer.
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
.TH USTREAMER 1 "version 6.7" "November 2020"
.TH USTREAMER 1 "version 6.8" "November 2020"
.SH NAME
ustreamer \- stream MJPEG video from any V4L2 device to the network

View File

@@ -3,7 +3,7 @@
pkgname=ustreamer
pkgver=6.7
pkgver=6.8
pkgrel=1
pkgdesc="Lightweight and fast MJPEG-HTTP streamer"
url="https://github.com/pikvm/ustreamer"

View File

@@ -6,7 +6,7 @@
include $(TOPDIR)/rules.mk
PKG_NAME:=ustreamer
PKG_VERSION:=6.7
PKG_VERSION:=6.8
PKG_RELEASE:=1
PKG_MAINTAINER:=Maxim Devaev <mdevaev@gmail.com>

View File

@@ -17,7 +17,7 @@ def _find_sources(suffix: str) -> list[str]:
if __name__ == "__main__":
setup(
name="ustreamer",
version="6.7",
version="6.8",
description="uStreamer tools",
author="Maxim Devaev",
author_email="mdevaev@gmail.com",

View File

@@ -12,11 +12,11 @@ _DUMP = ustreamer-dump.bin
_V4P = ustreamer-v4p.bin
_CFLAGS = -MD -c -std=c17 -Wall -Wextra -D_GNU_SOURCE $(CFLAGS)
_LDFLAGS = $(LDFLAGS)
_COMMON_LIBS = -lm -ljpeg -pthread -lrt -latomic
_USTR_LDFLAGS = $(LDFLAGS) -lm -ljpeg -pthread -lrt -latomic -levent -levent_pthreads
_DUMP_LDFLAGS = $(LDFLAGS) -lm -ljpeg -pthread -lrt -latomic
_V4P_LDFLAGS = $(LDFLAGS) -lm -ljpeg -pthread -lrt -latomic
_USTR_LIBS = $(_COMMON_LIBS) -levent -levent_pthreads
_USTR_SRCS = $(shell ls \
libs/*.c \
ustreamer/*.c \
@@ -27,15 +27,14 @@ _USTR_SRCS = $(shell ls \
ustreamer/*.c \
)
_DUMP_LIBS = $(_COMMON_LIBS)
_DUMP_SRCS = $(shell ls \
libs/*.c \
dump/*.c \
)
_V4P_LIBS = $(_COMMON_LIBS)
_V4P_SRCS = $(shell ls \
libs/*.c \
libs/drm/*.c \
v4p/*.c \
)
@@ -52,16 +51,16 @@ endef
ifneq ($(call optbool,$(WITH_GPIO)),)
_USTR_LIBS += -lgpiod
override _CFLAGS += -DWITH_GPIO $(shell pkg-config --atleast-version=2 libgpiod 2> /dev/null && echo -DHAVE_GPIOD2)
_USTR_SRCS += $(shell ls ustreamer/gpio/*.c)
override _USTR_LDFLAGS += -lgpiod
override _USTR_SRCS += $(shell ls ustreamer/gpio/*.c)
endif
ifneq ($(call optbool,$(WITH_SYSTEMD)),)
_USTR_LIBS += -lsystemd
override _CFLAGS += -DWITH_SYSTEMD
_USTR_SRCS += $(shell ls ustreamer/http/systemd/*.c)
override _USTR_LDFLAGS += -lsystemd
override _USTR_SRCS += $(shell ls ustreamer/http/systemd/*.c)
endif
@@ -73,10 +72,10 @@ endif
WITH_SETPROCTITLE ?= 1
ifneq ($(call optbool,$(WITH_SETPROCTITLE)),)
ifeq ($(shell uname -s | tr A-Z a-z),linux)
_USTR_LIBS += -lbsd
endif
override _CFLAGS += -DWITH_SETPROCTITLE
ifeq ($(shell uname -s | tr A-Z a-z),linux)
override _USTR_LDFLAGS += -lbsd
endif
endif
@@ -84,8 +83,10 @@ WITH_V4P ?= 0
ifneq ($(call optbool,$(WITH_V4P)),)
override _TARGETS += $(_V4P)
override _OBJS += $(_V4P_SRCS:%.c=$(_BUILD)/%.o)
override _CFLAGS += $(shell pkg-config --cflags libdrm)
_V4P_LDFLAGS = $(shell pkg-config --libs libdrm)
override _CFLAGS += -DWITH_V4P $(shell pkg-config --cflags libdrm)
override _V4P_LDFLAGS += $(shell pkg-config --libs libdrm)
override _USTR_SRCS += $(shell ls libs/drm/*.c)
override _USTR_LDFLAGS += $(shell pkg-config --libs libdrm)
endif
@@ -108,17 +109,17 @@ install-strip: install
$(_USTR): $(_USTR_SRCS:%.c=$(_BUILD)/%.o)
$(info == LD $@)
$(ECHO) $(CC) $^ -o $@ $(_LDFLAGS) $(_USTR_LIBS)
$(ECHO) $(CC) $^ -o $@ $(_USTR_LDFLAGS)
$(_DUMP): $(_DUMP_SRCS:%.c=$(_BUILD)/%.o)
$(info == LD $@)
$(ECHO) $(CC) $^ -o $@ $(_LDFLAGS) $(_DUMP_LIBS)
$(ECHO) $(CC) $^ -o $@ $(_DUMP_LDFLAGS)
$(_V4P): $(_V4P_SRCS:%.c=$(_BUILD)/%.o)
$(info == LD $@)
$(ECHO) $(CC) $^ -o $@ $(_LDFLAGS) $(_V4P_LDFLAGS) $(_V4P_LIBS)
$(ECHO) $(CC) $^ -o $@ $(_V4P_LDFLAGS)
$(_BUILD)/%.o: %.c

View File

@@ -20,7 +20,7 @@
*****************************************************************************/
#include "device.h"
#include "capture.h"
#include <stdlib.h>
#include <stdatomic.h>
@@ -81,28 +81,28 @@ static const struct {
{"USERPTR", V4L2_MEMORY_USERPTR},
};
static int _device_wait_buffer(us_device_s *dev);
static int _device_consume_event(us_device_s *dev);
static int _capture_wait_buffer(us_capture_s *cap);
static int _capture_consume_event(us_capture_s *cap);
static void _v4l2_buffer_copy(const struct v4l2_buffer *src, struct v4l2_buffer *dest);
static bool _device_is_buffer_valid(us_device_s *dev, const struct v4l2_buffer *buf, const u8 *data);
static int _device_open_check_cap(us_device_s *dev);
static int _device_open_dv_timings(us_device_s *dev, bool apply);
static int _device_open_format(us_device_s *dev, bool first);
static void _device_open_hw_fps(us_device_s *dev);
static void _device_open_jpeg_quality(us_device_s *dev);
static int _device_open_io_method(us_device_s *dev);
static int _device_open_io_method_mmap(us_device_s *dev);
static int _device_open_io_method_userptr(us_device_s *dev);
static int _device_open_queue_buffers(us_device_s *dev);
static int _device_open_export_to_dma(us_device_s *dev);
static int _device_apply_resolution(us_device_s *dev, uint width, uint height, float hz);
static bool _capture_is_buffer_valid(us_capture_s *cap, const struct v4l2_buffer *buf, const u8 *data);
static int _capture_open_check_cap(us_capture_s *cap);
static int _capture_open_dv_timings(us_capture_s *cap, bool apply);
static int _capture_open_format(us_capture_s *cap, bool first);
static void _capture_open_hw_fps(us_capture_s *cap);
static void _capture_open_jpeg_quality(us_capture_s *cap);
static int _capture_open_io_method(us_capture_s *cap);
static int _capture_open_io_method_mmap(us_capture_s *cap);
static int _capture_open_io_method_userptr(us_capture_s *cap);
static int _capture_open_queue_buffers(us_capture_s *cap);
static int _capture_open_export_to_dma(us_capture_s *cap);
static int _capture_apply_resolution(us_capture_s *cap, uint width, uint height, float hz);
static void _device_apply_controls(us_device_s *dev);
static int _device_query_control(
us_device_s *dev, struct v4l2_queryctrl *query,
static void _capture_apply_controls(us_capture_s *cap);
static int _capture_query_control(
us_capture_s *cap, struct v4l2_queryctrl *query,
const char *name, uint cid, bool quiet);
static void _device_set_control(
us_device_s *dev, const struct v4l2_queryctrl *query,
static void _capture_set_control(
us_capture_s *cap, const struct v4l2_queryctrl *query,
const char *name, uint cid, int value, bool quiet);
static const char *_format_to_string_nullable(uint format);
@@ -118,33 +118,33 @@ static const char *_io_method_to_string_supported(enum v4l2_memory io_method);
#define _D_LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("CAP: " x_msg, ##__VA_ARGS__)
us_device_s *us_device_init(void) {
us_device_runtime_s *run;
us_capture_s *us_capture_init(void) {
us_capture_runtime_s *run;
US_CALLOC(run, 1);
run->fd = -1;
us_device_s *dev;
US_CALLOC(dev, 1);
dev->path = "/dev/video0";
dev->width = 640;
dev->height = 480;
dev->format = V4L2_PIX_FMT_YUYV;
dev->jpeg_quality = 80;
dev->standard = V4L2_STD_UNKNOWN;
dev->io_method = V4L2_MEMORY_MMAP;
dev->n_bufs = us_get_cores_available() + 1;
dev->min_frame_size = 128;
dev->timeout = 1;
dev->run = run;
return dev;
us_capture_s *cap;
US_CALLOC(cap, 1);
cap->path = "/dev/video0";
cap->width = 640;
cap->height = 480;
cap->format = V4L2_PIX_FMT_YUYV;
cap->jpeg_quality = 80;
cap->standard = V4L2_STD_UNKNOWN;
cap->io_method = V4L2_MEMORY_MMAP;
cap->n_bufs = us_get_cores_available() + 1;
cap->min_frame_size = 128;
cap->timeout = 1;
cap->run = run;
return cap;
}
void us_device_destroy(us_device_s *dev) {
free(dev->run);
free(dev);
void us_capture_destroy(us_capture_s *cap) {
free(cap->run);
free(cap);
}
int us_device_parse_format(const char *str) {
int us_capture_parse_format(const char *str) {
US_ARRAY_ITERATE(_FORMATS, 0, item, {
if (!strcasecmp(item->name, str)) {
return item->format;
@@ -153,7 +153,7 @@ int us_device_parse_format(const char *str) {
return -1;
}
int us_device_parse_standard(const char *str) {
int us_capture_parse_standard(const char *str) {
US_ARRAY_ITERATE(_STANDARDS, 0, item, {
if (!strcasecmp(item->name, str)) {
return item->standard;
@@ -162,7 +162,7 @@ int us_device_parse_standard(const char *str) {
return -1;
}
int us_device_parse_io_method(const char *str) {
int us_capture_parse_io_method(const char *str) {
US_ARRAY_ITERATE(_IO_METHODS, 0, item, {
if (!strcasecmp(item->name, str)) {
return item->io_method;
@@ -171,10 +171,10 @@ int us_device_parse_io_method(const char *str) {
return -1;
}
int us_device_open(us_device_s *dev) {
us_device_runtime_s *const run = dev->run;
int us_capture_open(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
if (access(dev->path, R_OK | W_OK) < 0) {
if (access(cap->path, R_OK | W_OK) < 0) {
if (run->open_error_reported != -errno) {
run->open_error_reported = -errno; // Don't confuse it with __LINE__
US_LOG_PERROR("No access to capture device");
@@ -183,15 +183,15 @@ int us_device_open(us_device_s *dev) {
}
_D_LOG_DEBUG("Opening capture device ...");
if ((run->fd = open(dev->path, O_RDWR | O_NONBLOCK)) < 0) {
if ((run->fd = open(cap->path, O_RDWR | O_NONBLOCK)) < 0) {
_D_LOG_PERROR("Can't capture open device");
goto error;
}
_D_LOG_DEBUG("Capture device fd=%d opened", run->fd);
if (dev->dv_timings && dev->persistent) {
if (cap->dv_timings && cap->persistent) {
_D_LOG_DEBUG("Probing DV-timings or QuerySTD ...");
if (_device_open_dv_timings(dev, false) < 0) {
if (_capture_open_dv_timings(cap, false) < 0) {
const int line = __LINE__;
if (run->open_error_reported != line) {
run->open_error_reported = line;
@@ -201,34 +201,34 @@ int us_device_open(us_device_s *dev) {
}
}
if (_device_open_check_cap(dev) < 0) {
if (_capture_open_check_cap(cap) < 0) {
goto error;
}
if (_device_apply_resolution(dev, dev->width, dev->height, dev->run->hz)) {
if (_capture_apply_resolution(cap, cap->width, cap->height, cap->run->hz)) {
goto error;
}
if (dev->dv_timings && _device_open_dv_timings(dev, true) < 0) {
if (cap->dv_timings && _capture_open_dv_timings(cap, true) < 0) {
goto error;
}
if (_device_open_format(dev, true) < 0) {
if (_capture_open_format(cap, true) < 0) {
goto error;
}
_device_open_hw_fps(dev);
_device_open_jpeg_quality(dev);
if (_device_open_io_method(dev) < 0) {
_capture_open_hw_fps(cap);
_capture_open_jpeg_quality(cap);
if (_capture_open_io_method(cap) < 0) {
goto error;
}
if (_device_open_queue_buffers(dev) < 0) {
if (_capture_open_queue_buffers(cap) < 0) {
goto error;
}
if (dev->dma_export && !us_is_jpeg(run->format)) {
if (cap->dma_export && !us_is_jpeg(run->format)) {
// uStreamer doesn't have any component that could handle JPEG capture via DMA
run->dma = !_device_open_export_to_dma(dev);
if (!run->dma && dev->dma_required) {
run->dma = !_capture_open_export_to_dma(cap);
if (!run->dma && cap->dma_required) {
goto error;
}
}
_device_apply_controls(dev);
_capture_apply_controls(cap);
enum v4l2_buf_type type = run->capture_type;
if (us_xioctl(run->fd, VIDIOC_STREAMON, &type) < 0) {
@@ -242,17 +242,17 @@ int us_device_open(us_device_s *dev) {
return 0;
tmp_error:
us_device_close(dev);
us_capture_close(cap);
return -2;
error:
run->open_error_reported = 0;
us_device_close(dev);
us_capture_close(cap);
return -1;
}
void us_device_close(us_device_s *dev) {
us_device_runtime_s *const run = dev->run;
void us_capture_close(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
bool say = false;
@@ -266,15 +266,15 @@ void us_device_close(us_device_s *dev) {
run->streamon = false;
}
if (run->hw_bufs != NULL) {
if (run->bufs != NULL) {
say = true;
_D_LOG_DEBUG("Releasing HW buffers ...");
for (uint index = 0; index < run->n_bufs; ++index) {
us_hw_buffer_s *hw = &run->hw_bufs[index];
us_capture_hwbuf_s *hw = &run->bufs[index];
US_CLOSE_FD(hw->dma_fd);
if (dev->io_method == V4L2_MEMORY_MMAP) {
if (cap->io_method == V4L2_MEMORY_MMAP) {
if (hw->raw.allocated > 0 && hw->raw.data != NULL) {
if (munmap(hw->raw.data, hw->raw.allocated) < 0) {
_D_LOG_PERROR("Can't unmap HW buffer=%u", index);
@@ -288,7 +288,7 @@ void us_device_close(us_device_s *dev) {
free(hw->buf.m.planes);
}
}
US_DELETE(run->hw_bufs, free);
US_DELETE(run->bufs, free);
run->n_bufs = 0;
}
@@ -299,20 +299,20 @@ void us_device_close(us_device_s *dev) {
}
}
int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw) {
// Это сложная функция, которая делает сразу много всего, чтобы получить новый фрейм.
// - Вызывается _device_wait_buffer() с select() внутри, чтобы подождать новый фрейм
// - Вызывается _capture_wait_buffer() с select() внутри, чтобы подождать новый фрейм
// или эвент V4L2. Обработка эвентов более приоритетна, чем кадров.
// - Если есть новые фреймы, то пропустить их все, пока не закончатся и вернуть
// самый-самый свежий, содержащий при этом валидные данные.
// - Если таковых не нашлось, вернуть -2.
// - Ошибка -1 возвращается при любых сбоях.
if (_device_wait_buffer(dev) < 0) {
if (_capture_wait_buffer(cap) < 0) {
return -1;
}
us_device_runtime_s *const run = dev->run;
us_capture_runtime_s *const run = cap->run;
*hw = NULL;
@@ -333,7 +333,7 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
struct v4l2_buffer new = {0};
struct v4l2_plane new_planes[VIDEO_MAX_PLANES] = {0};
new.type = run->capture_type;
new.memory = dev->io_method;
new.memory = cap->io_method;
if (run->capture_mplane) {
new.length = VIDEO_MAX_PLANES;
new.m.planes = new_planes;
@@ -347,8 +347,8 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
return -1;
}
# define GRABBED(x_buf) run->hw_bufs[x_buf.index].grabbed
# define FRAME_DATA(x_buf) run->hw_bufs[x_buf.index].raw.data
# define GRABBED(x_buf) run->bufs[x_buf.index].grabbed
# define FRAME_DATA(x_buf) run->bufs[x_buf.index].raw.data
if (GRABBED(new)) {
_D_LOG_ERROR("V4L2 error: grabbed HW buffer=%u is already used", new.index);
@@ -360,7 +360,7 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
new.bytesused = new.m.planes[0].bytesused;
}
broken = !_device_is_buffer_valid(dev, &new, FRAME_DATA(new));
broken = !_capture_is_buffer_valid(cap, &new, FRAME_DATA(new));
if (broken) {
_D_LOG_DEBUG("Releasing HW buffer=%u (broken frame) ...", new.index);
if (us_xioctl(run->fd, VIDIOC_QBUF, &new) < 0) {
@@ -400,7 +400,7 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
}
} while (true);
*hw = &run->hw_bufs[buf.index];
*hw = &run->bufs[buf.index];
atomic_store(&(*hw)->refs, 0);
(*hw)->raw.dma_fd = (*hw)->dma_fd;
(*hw)->raw.used = buf.bytesused;
@@ -417,11 +417,11 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
return buf.index;
}
int us_device_release_buffer(us_device_s *dev, us_hw_buffer_s *hw) {
int us_capture_release_buffer(us_capture_s *cap, us_capture_hwbuf_s *hw) {
assert(atomic_load(&hw->refs) == 0);
const uint index = hw->buf.index;
_D_LOG_DEBUG("Releasing HW buffer=%u ...", index);
if (us_xioctl(dev->run->fd, VIDIOC_QBUF, &hw->buf) < 0) {
if (us_xioctl(cap->run->fd, VIDIOC_QBUF, &hw->buf) < 0) {
_D_LOG_PERROR("Can't release HW buffer=%u", index);
return -1;
}
@@ -430,16 +430,16 @@ int us_device_release_buffer(us_device_s *dev, us_hw_buffer_s *hw) {
return 0;
}
void us_device_buffer_incref(us_hw_buffer_s *hw) {
void us_capture_buffer_incref(us_capture_hwbuf_s *hw) {
atomic_fetch_add(&hw->refs, 1);
}
void us_device_buffer_decref(us_hw_buffer_s *hw) {
void us_capture_buffer_decref(us_capture_hwbuf_s *hw) {
atomic_fetch_sub(&hw->refs, 1);
}
int _device_wait_buffer(us_device_s *dev) {
us_device_runtime_s *const run = dev->run;
int _capture_wait_buffer(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
# define INIT_FD_SET(x_set) \
fd_set x_set; FD_ZERO(&x_set); FD_SET(run->fd, &x_set);
@@ -452,7 +452,7 @@ int _device_wait_buffer(us_device_s *dev) {
// has_write не делает никому плохо.
struct timeval timeout;
timeout.tv_sec = dev->timeout;
timeout.tv_sec = cap->timeout;
timeout.tv_usec = 0;
_D_LOG_DEBUG("Calling select() on video device ...");
@@ -475,16 +475,16 @@ int _device_wait_buffer(us_device_s *dev) {
_D_LOG_ERROR("Device select() timeout");
return -1;
} else {
if (has_error && _device_consume_event(dev) < 0) {
if (has_error && _capture_consume_event(cap) < 0) {
return -1; // Restart required
}
}
return 0;
}
static int _device_consume_event(us_device_s *dev) {
static int _capture_consume_event(us_capture_s *cap) {
struct v4l2_event event;
if (us_xioctl(dev->run->fd, VIDIOC_DQEVENT, &event) < 0) {
if (us_xioctl(cap->run->fd, VIDIOC_DQEVENT, &event) < 0) {
_D_LOG_PERROR("Can't consume V4L2 event");
return -1;
}
@@ -509,13 +509,13 @@ static void _v4l2_buffer_copy(const struct v4l2_buffer *src, struct v4l2_buffer
}
}
bool _device_is_buffer_valid(us_device_s *dev, const struct v4l2_buffer *buf, const u8 *data) {
bool _capture_is_buffer_valid(us_capture_s *cap, const struct v4l2_buffer *buf, const u8 *data) {
// Workaround for broken, corrupted frames:
// Under low light conditions corrupted frames may get captured.
// The good thing is such frames are quite small compared to the regular frames.
// For example a VGA (640x480) webcam frame is normally >= 8kByte large,
// corrupted frames are smaller.
if (buf->bytesused < dev->min_frame_size) {
if (buf->bytesused < cap->min_frame_size) {
_D_LOG_DEBUG("Dropped too small frame, assuming it was broken: buffer=%u, bytesused=%u",
buf->index, buf->bytesused);
return false;
@@ -529,7 +529,7 @@ bool _device_is_buffer_valid(us_device_s *dev, const struct v4l2_buffer *buf, co
// A more sophisticated method would scan for the end of image marker, but
// that takes precious CPU cycles and this should be good enough for most
// cases.
if (us_is_jpeg(dev->run->format)) {
if (us_is_jpeg(cap->run->format)) {
if (buf->bytesused < 125) {
// https://stackoverflow.com/questions/2253404/what-is-the-smallest-valid-jpeg-file-size-in-bytes
_D_LOG_DEBUG("Discarding invalid frame, too small to be a valid JPEG: bytesused=%u", buf->bytesused);
@@ -548,21 +548,21 @@ bool _device_is_buffer_valid(us_device_s *dev, const struct v4l2_buffer *buf, co
return true;
}
static int _device_open_check_cap(us_device_s *dev) {
us_device_runtime_s *const run = dev->run;
static int _capture_open_check_cap(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
struct v4l2_capability cap = {0};
struct v4l2_capability cpb = {0};
_D_LOG_DEBUG("Querying device capabilities ...");
if (us_xioctl(run->fd, VIDIOC_QUERYCAP, &cap) < 0) {
if (us_xioctl(run->fd, VIDIOC_QUERYCAP, &cpb) < 0) {
_D_LOG_PERROR("Can't query device capabilities");
return -1;
}
if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
if (cpb.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
run->capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
run->capture_mplane = false;
_D_LOG_INFO("Using capture type: single-planar");
} else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
} else if (cpb.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
run->capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
run->capture_mplane = true;
_D_LOG_INFO("Using capture type: multi-planar");
@@ -571,13 +571,13 @@ static int _device_open_check_cap(us_device_s *dev) {
return -1;
}
if (!(cap.capabilities & V4L2_CAP_STREAMING)) {
if (!(cpb.capabilities & V4L2_CAP_STREAMING)) {
_D_LOG_ERROR("Device doesn't support streaming IO");
return -1;
}
if (!run->capture_mplane) {
int input = dev->input; // Needs a pointer to int for ioctl()
int input = cap->input; // Needs a pointer to int for ioctl()
_D_LOG_INFO("Using input channel: %d", input);
if (us_xioctl(run->fd, VIDIOC_S_INPUT, &input) < 0) {
_D_LOG_ERROR("Can't set input channel");
@@ -585,9 +585,9 @@ static int _device_open_check_cap(us_device_s *dev) {
}
}
if (dev->standard != V4L2_STD_UNKNOWN) {
_D_LOG_INFO("Using TV standard: %s", _standard_to_string(dev->standard));
if (us_xioctl(run->fd, VIDIOC_S_STD, &dev->standard) < 0) {
if (cap->standard != V4L2_STD_UNKNOWN) {
_D_LOG_INFO("Using TV standard: %s", _standard_to_string(cap->standard));
if (us_xioctl(run->fd, VIDIOC_S_STD, &cap->standard) < 0) {
_D_LOG_ERROR("Can't set video standard");
return -1;
}
@@ -597,10 +597,10 @@ static int _device_open_check_cap(us_device_s *dev) {
return 0;
}
static int _device_open_dv_timings(us_device_s *dev, bool apply) {
static int _capture_open_dv_timings(us_capture_s *cap, bool apply) {
// Just probe only if @apply is false
const us_device_runtime_s *const run = dev->run;
const us_capture_runtime_s *const run = cap->run;
int dv_errno = 0;
@@ -637,14 +637,14 @@ static int _device_open_dv_timings(us_device_s *dev, bool apply) {
_D_LOG_PERROR("Failed to apply DV-timings");
return -1;
}
if (_device_apply_resolution(dev, dv.bt.width, dv.bt.height, hz) < 0) {
if (_capture_apply_resolution(cap, dv.bt.width, dv.bt.height, hz) < 0) {
return -1;
}
goto subscribe;
querystd:
_D_LOG_DEBUG("Failed to query DV-timings, trying QuerySTD ...");
if (us_xioctl(run->fd, VIDIOC_QUERYSTD, &dev->standard) < 0) {
if (us_xioctl(run->fd, VIDIOC_QUERYSTD, &cap->standard) < 0) {
if (apply) {
char *std_error = us_errno_to_string(errno); // Read the errno first
char *dv_error = us_errno_to_string(dv_errno);
@@ -656,17 +656,17 @@ querystd:
} else if (!apply) {
goto probe_only;
}
if (us_xioctl(run->fd, VIDIOC_S_STD, &dev->standard) < 0) {
_D_LOG_PERROR("Can't set apply standard: %s", _standard_to_string(dev->standard));
if (us_xioctl(run->fd, VIDIOC_S_STD, &cap->standard) < 0) {
_D_LOG_PERROR("Can't set apply standard: %s", _standard_to_string(cap->standard));
return -1;
}
_D_LOG_DEBUG("Applied new video standard: %s", _standard_to_string(dev->standard));
_D_LOG_DEBUG("Applied new video standard: %s", _standard_to_string(cap->standard));
subscribe:
; // Empty statement for the goto label above
struct v4l2_event_subscription sub = {.type = V4L2_EVENT_SOURCE_CHANGE};
_D_LOG_DEBUG("Subscribing to V4L2_EVENT_SOURCE_CHANGE ...")
if (us_xioctl(dev->run->fd, VIDIOC_SUBSCRIBE_EVENT, &sub) < 0) {
if (us_xioctl(cap->run->fd, VIDIOC_SUBSCRIBE_EVENT, &sub) < 0) {
_D_LOG_PERROR("Can't subscribe to V4L2_EVENT_SOURCE_CHANGE");
return -1;
}
@@ -675,8 +675,8 @@ probe_only:
return 0;
}
static int _device_open_format(us_device_s *dev, bool first) {
us_device_runtime_s *const run = dev->run;
static int _capture_open_format(us_capture_s *cap, bool first) {
us_capture_runtime_s *const run = cap->run;
const uint stride = us_align_size(run->width, 32) << 1;
@@ -685,21 +685,21 @@ static int _device_open_format(us_device_s *dev, bool first) {
if (run->capture_mplane) {
fmt.fmt.pix_mp.width = run->width;
fmt.fmt.pix_mp.height = run->height;
fmt.fmt.pix_mp.pixelformat = dev->format;
fmt.fmt.pix_mp.pixelformat = cap->format;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.flags = 0;
fmt.fmt.pix_mp.num_planes = 1;
} else {
fmt.fmt.pix.width = run->width;
fmt.fmt.pix.height = run->height;
fmt.fmt.pix.pixelformat = dev->format;
fmt.fmt.pix.pixelformat = cap->format;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
fmt.fmt.pix.bytesperline = stride;
}
// Set format
_D_LOG_DEBUG("Probing device format=%s, stride=%u, resolution=%ux%u ...",
_format_to_string_supported(dev->format), stride, run->width, run->height);
_format_to_string_supported(cap->format), stride, run->width, run->height);
if (us_xioctl(run->fd, VIDIOC_S_FMT, &fmt) < 0) {
_D_LOG_PERROR("Can't set device format");
return -1;
@@ -719,18 +719,18 @@ static int _device_open_format(us_device_s *dev, bool first) {
_D_LOG_ERROR("Requested resolution=%ux%u is unavailable", run->width, run->height);
retry = true;
}
if (_device_apply_resolution(dev, FMT(width), FMT(height), run->hz) < 0) {
if (_capture_apply_resolution(cap, FMT(width), FMT(height), run->hz) < 0) {
return -1;
}
if (first && retry) {
return _device_open_format(dev, false);
return _capture_open_format(cap, false);
}
_D_LOG_INFO("Using resolution: %ux%u", run->width, run->height);
// Check format
if (FMT(pixelformat) != dev->format) {
if (FMT(pixelformat) != cap->format) {
_D_LOG_ERROR("Could not obtain the requested format=%s; driver gave us %s",
_format_to_string_supported(dev->format),
_format_to_string_supported(cap->format),
_format_to_string_supported(FMT(pixelformat)));
char *format_str;
@@ -747,6 +747,21 @@ static int _device_open_format(us_device_s *dev, bool first) {
run->format = FMT(pixelformat);
_D_LOG_INFO("Using format: %s", _format_to_string_supported(run->format));
if (cap->format_swap_rgb) {
// Userspace workaround for TC358743 RGB/BGR bug:
// - https://github.com/raspberrypi/linux/issues/6068
uint swapped = 0;
switch (run->format) {
case V4L2_PIX_FMT_RGB24: swapped = V4L2_PIX_FMT_BGR24; break;
case V4L2_PIX_FMT_BGR24: swapped = V4L2_PIX_FMT_RGB24; break;
}
if (swapped > 0) {
_D_LOG_INFO("Using format swap: %s -> %s",
_format_to_string_supported(run->format),
_format_to_string_supported(swapped));
run->format = swapped;
}
}
run->stride = FMTS(bytesperline);
run->raw_size = FMTS(sizeimage); // Only for userptr
@@ -757,8 +772,8 @@ static int _device_open_format(us_device_s *dev, bool first) {
return 0;
}
static void _device_open_hw_fps(us_device_s *dev) {
us_device_runtime_s *const run = dev->run;
static void _capture_open_hw_fps(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
run->hw_fps = 0;
@@ -783,7 +798,7 @@ static void _device_open_hw_fps(us_device_s *dev) {
US_MEMSET_ZERO(setfps);
setfps.type = run->capture_type;
SETFPS_TPF(numerator) = 1;
SETFPS_TPF(denominator) = (dev->desired_fps == 0 ? 255 : dev->desired_fps);
SETFPS_TPF(denominator) = (cap->desired_fps == 0 ? 255 : cap->desired_fps);
if (us_xioctl(run->fd, VIDIOC_S_PARM, &setfps) < 0) {
_D_LOG_PERROR("Can't set HW FPS");
@@ -801,8 +816,8 @@ static void _device_open_hw_fps(us_device_s *dev) {
}
run->hw_fps = SETFPS_TPF(denominator);
if (dev->desired_fps != run->hw_fps) {
_D_LOG_INFO("Using HW FPS: %u -> %u (coerced)", dev->desired_fps, run->hw_fps);
if (cap->desired_fps != run->hw_fps) {
_D_LOG_INFO("Using HW FPS: %u -> %u (coerced)", cap->desired_fps, run->hw_fps);
} else {
_D_LOG_INFO("Using HW FPS: %u", run->hw_fps);
}
@@ -810,46 +825,46 @@ static void _device_open_hw_fps(us_device_s *dev) {
# undef SETFPS_TPF
}
static void _device_open_jpeg_quality(us_device_s *dev) {
us_device_runtime_s *const run = dev->run;
static void _capture_open_jpeg_quality(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
uint quality = 0;
if (us_is_jpeg(run->format)) {
struct v4l2_jpegcompression comp = {0};
if (us_xioctl(run->fd, VIDIOC_G_JPEGCOMP, &comp) < 0) {
_D_LOG_ERROR("Device doesn't support setting of HW encoding quality parameters");
} else {
comp.quality = dev->jpeg_quality;
comp.quality = cap->jpeg_quality;
if (us_xioctl(run->fd, VIDIOC_S_JPEGCOMP, &comp) < 0) {
_D_LOG_ERROR("Can't change MJPEG quality for JPEG source with HW pass-through encoder");
} else {
quality = dev->jpeg_quality;
quality = cap->jpeg_quality;
}
}
}
run->jpeg_quality = quality;
}
static int _device_open_io_method(us_device_s *dev) {
_D_LOG_INFO("Using IO method: %s", _io_method_to_string_supported(dev->io_method));
switch (dev->io_method) {
case V4L2_MEMORY_MMAP: return _device_open_io_method_mmap(dev);
case V4L2_MEMORY_USERPTR: return _device_open_io_method_userptr(dev);
static int _capture_open_io_method(us_capture_s *cap) {
_D_LOG_INFO("Using IO method: %s", _io_method_to_string_supported(cap->io_method));
switch (cap->io_method) {
case V4L2_MEMORY_MMAP: return _capture_open_io_method_mmap(cap);
case V4L2_MEMORY_USERPTR: return _capture_open_io_method_userptr(cap);
default: assert(0 && "Unsupported IO method");
}
return -1;
}
static int _device_open_io_method_mmap(us_device_s *dev) {
us_device_runtime_s *const run = dev->run;
static int _capture_open_io_method_mmap(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
struct v4l2_requestbuffers req = {
.count = dev->n_bufs,
.count = cap->n_bufs,
.type = run->capture_type,
.memory = V4L2_MEMORY_MMAP,
};
_D_LOG_DEBUG("Requesting %u device buffers for MMAP ...", req.count);
if (us_xioctl(run->fd, VIDIOC_REQBUFS, &req) < 0) {
_D_LOG_PERROR("Device '%s' doesn't support MMAP method", dev->path);
_D_LOG_PERROR("Device '%s' doesn't support MMAP method", cap->path);
return -1;
}
@@ -857,12 +872,12 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
_D_LOG_ERROR("Insufficient buffer memory: %u", req.count);
return -1;
} else {
_D_LOG_INFO("Requested %u device buffers, got %u", dev->n_bufs, req.count);
_D_LOG_INFO("Requested %u device buffers, got %u", cap->n_bufs, req.count);
}
_D_LOG_DEBUG("Allocating device buffers ...");
US_CALLOC(run->hw_bufs, req.count);
US_CALLOC(run->bufs, req.count);
for (run->n_bufs = 0; run->n_bufs < req.count; ++run->n_bufs) {
struct v4l2_buffer buf = {0};
@@ -881,7 +896,7 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
return -1;
}
us_hw_buffer_s *hw = &run->hw_bufs[run->n_bufs];
us_capture_hwbuf_s *hw = &run->bufs[run->n_bufs];
atomic_init(&hw->refs, 0);
const uz buf_size = (run->capture_mplane ? buf.m.planes[0].length : buf.length);
const off_t buf_offset = (run->capture_mplane ? buf.m.planes[0].m.mem_offset : buf.m.offset);
@@ -907,17 +922,17 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
return 0;
}
static int _device_open_io_method_userptr(us_device_s *dev) {
us_device_runtime_s *const run = dev->run;
static int _capture_open_io_method_userptr(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
struct v4l2_requestbuffers req = {
.count = dev->n_bufs,
.count = cap->n_bufs,
.type = run->capture_type,
.memory = V4L2_MEMORY_USERPTR,
};
_D_LOG_DEBUG("Requesting %u device buffers for USERPTR ...", req.count);
if (us_xioctl(run->fd, VIDIOC_REQBUFS, &req) < 0) {
_D_LOG_PERROR("Device '%s' doesn't support USERPTR method", dev->path);
_D_LOG_PERROR("Device '%s' doesn't support USERPTR method", cap->path);
return -1;
}
@@ -925,18 +940,18 @@ static int _device_open_io_method_userptr(us_device_s *dev) {
_D_LOG_ERROR("Insufficient buffer memory: %u", req.count);
return -1;
} else {
_D_LOG_INFO("Requested %u device buffers, got %u", dev->n_bufs, req.count);
_D_LOG_INFO("Requested %u device buffers, got %u", cap->n_bufs, req.count);
}
_D_LOG_DEBUG("Allocating device buffers ...");
US_CALLOC(run->hw_bufs, req.count);
US_CALLOC(run->bufs, req.count);
const uint page_size = getpagesize();
const uint buf_size = us_align_size(run->raw_size, page_size);
for (run->n_bufs = 0; run->n_bufs < req.count; ++run->n_bufs) {
us_hw_buffer_s *hw = &run->hw_bufs[run->n_bufs];
us_capture_hwbuf_s *hw = &run->bufs[run->n_bufs];
assert((hw->raw.data = aligned_alloc(page_size, buf_size)) != NULL);
memset(hw->raw.data, 0, buf_size);
hw->raw.allocated = buf_size;
@@ -947,25 +962,25 @@ static int _device_open_io_method_userptr(us_device_s *dev) {
return 0;
}
static int _device_open_queue_buffers(us_device_s *dev) {
us_device_runtime_s *const run = dev->run;
static int _capture_open_queue_buffers(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
for (uint index = 0; index < run->n_bufs; ++index) {
struct v4l2_buffer buf = {0};
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
buf.type = run->capture_type;
buf.memory = dev->io_method;
buf.memory = cap->io_method;
buf.index = index;
if (run->capture_mplane) {
buf.m.planes = planes;
buf.length = 1;
}
if (dev->io_method == V4L2_MEMORY_USERPTR) {
if (cap->io_method == V4L2_MEMORY_USERPTR) {
// I am not sure, may be this is incorrect for mplane device,
// but i don't have one which supports V4L2_MEMORY_USERPTR
buf.m.userptr = (unsigned long)run->hw_bufs[index].raw.data;
buf.length = run->hw_bufs[index].raw.allocated;
buf.m.userptr = (unsigned long)run->bufs[index].raw.data;
buf.length = run->bufs[index].raw.allocated;
}
_D_LOG_DEBUG("Calling us_xioctl(VIDIOC_QBUF) for buffer=%u ...", index);
@@ -977,8 +992,8 @@ static int _device_open_queue_buffers(us_device_s *dev) {
return 0;
}
static int _device_open_export_to_dma(us_device_s *dev) {
us_device_runtime_s *const run = dev->run;
static int _capture_open_export_to_dma(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
for (uint index = 0; index < run->n_bufs; ++index) {
struct v4l2_exportbuffer exp = {
@@ -990,18 +1005,18 @@ static int _device_open_export_to_dma(us_device_s *dev) {
_D_LOG_PERROR("Can't export device buffer=%u to DMA", index);
goto error;
}
run->hw_bufs[index].dma_fd = exp.fd;
run->bufs[index].dma_fd = exp.fd;
}
return 0;
error:
for (uint index = 0; index < run->n_bufs; ++index) {
US_CLOSE_FD(run->hw_bufs[index].dma_fd);
US_CLOSE_FD(run->bufs[index].dma_fd);
}
return -1;
}
static int _device_apply_resolution(us_device_s *dev, uint width, uint height, float hz) {
static int _capture_apply_resolution(us_capture_s *cap, uint width, uint height, float hz) {
// Тут VIDEO_MIN_* не используются из-за странностей минимального разрешения при отсутствии сигнала
// у некоторых устройств, например TC358743
if (
@@ -1012,42 +1027,42 @@ static int _device_apply_resolution(us_device_s *dev, uint width, uint height, f
width, height, US_VIDEO_MAX_WIDTH, US_VIDEO_MAX_HEIGHT);
return -1;
}
dev->run->width = width;
dev->run->height = height;
dev->run->hz = hz;
cap->run->width = width;
cap->run->height = height;
cap->run->hz = hz;
return 0;
}
static void _device_apply_controls(us_device_s *dev) {
static void _capture_apply_controls(us_capture_s *cap) {
# define SET_CID_VALUE(x_cid, x_field, x_value, x_quiet) { \
struct v4l2_queryctrl m_query; \
if (_device_query_control(dev, &m_query, #x_field, x_cid, x_quiet) == 0) { \
_device_set_control(dev, &m_query, #x_field, x_cid, x_value, x_quiet); \
if (_capture_query_control(cap, &m_query, #x_field, x_cid, x_quiet) == 0) { \
_capture_set_control(cap, &m_query, #x_field, x_cid, x_value, x_quiet); \
} \
}
# define SET_CID_DEFAULT(x_cid, x_field, x_quiet) { \
struct v4l2_queryctrl m_query; \
if (_device_query_control(dev, &m_query, #x_field, x_cid, x_quiet) == 0) { \
_device_set_control(dev, &m_query, #x_field, x_cid, m_query.default_value, x_quiet); \
if (_capture_query_control(cap, &m_query, #x_field, x_cid, x_quiet) == 0) { \
_capture_set_control(cap, &m_query, #x_field, x_cid, m_query.default_value, x_quiet); \
} \
}
# define CONTROL_MANUAL_CID(x_cid, x_field) { \
if (dev->ctl.x_field.mode == CTL_MODE_VALUE) { \
SET_CID_VALUE(x_cid, x_field, dev->ctl.x_field.value, false); \
} else if (dev->ctl.x_field.mode == CTL_MODE_DEFAULT) { \
if (cap->ctl.x_field.mode == CTL_MODE_VALUE) { \
SET_CID_VALUE(x_cid, x_field, cap->ctl.x_field.value, false); \
} else if (cap->ctl.x_field.mode == CTL_MODE_DEFAULT) { \
SET_CID_DEFAULT(x_cid, x_field, false); \
} \
}
# define CONTROL_AUTO_CID(x_cid_auto, x_cid_manual, x_field) { \
if (dev->ctl.x_field.mode == CTL_MODE_VALUE) { \
if (cap->ctl.x_field.mode == CTL_MODE_VALUE) { \
SET_CID_VALUE(x_cid_auto, x_field##_auto, 0, true); \
SET_CID_VALUE(x_cid_manual, x_field, dev->ctl.x_field.value, false); \
} else if (dev->ctl.x_field.mode == CTL_MODE_AUTO) { \
SET_CID_VALUE(x_cid_manual, x_field, cap->ctl.x_field.value, false); \
} else if (cap->ctl.x_field.mode == CTL_MODE_AUTO) { \
SET_CID_VALUE(x_cid_auto, x_field##_auto, 1, false); \
} else if (dev->ctl.x_field.mode == CTL_MODE_DEFAULT) { \
} else if (cap->ctl.x_field.mode == CTL_MODE_DEFAULT) { \
SET_CID_VALUE(x_cid_auto, x_field##_auto, 0, true); /* Reset inactive flag */ \
SET_CID_DEFAULT(x_cid_manual, x_field, false); \
SET_CID_DEFAULT(x_cid_auto, x_field##_auto, false); \
@@ -1074,15 +1089,15 @@ static void _device_apply_controls(us_device_s *dev) {
# undef SET_CID_VALUE
}
static int _device_query_control(
us_device_s *dev, struct v4l2_queryctrl *query,
static int _capture_query_control(
us_capture_s *cap, struct v4l2_queryctrl *query,
const char *name, uint cid, bool quiet) {
// cppcheck-suppress redundantPointerOp
US_MEMSET_ZERO(*query);
query->id = cid;
if (us_xioctl(dev->run->fd, VIDIOC_QUERYCTRL, query) < 0 || query->flags & V4L2_CTRL_FLAG_DISABLED) {
if (us_xioctl(cap->run->fd, VIDIOC_QUERYCTRL, query) < 0 || query->flags & V4L2_CTRL_FLAG_DISABLED) {
if (!quiet) {
_D_LOG_ERROR("Changing control %s is unsupported", name);
}
@@ -1091,8 +1106,8 @@ static int _device_query_control(
return 0;
}
static void _device_set_control(
us_device_s *dev, const struct v4l2_queryctrl *query,
static void _capture_set_control(
us_capture_s *cap, const struct v4l2_queryctrl *query,
const char *name, uint cid, int value, bool quiet) {
if (value < query->minimum || value > query->maximum || value % query->step != 0) {
@@ -1107,7 +1122,7 @@ static void _device_set_control(
.id = cid,
.value = value,
};
if (us_xioctl(dev->run->fd, VIDIOC_S_CTRL, &ctl) < 0) {
if (us_xioctl(cap->run->fd, VIDIOC_S_CTRL, &ctl) < 0) {
if (!quiet) {
_D_LOG_PERROR("Can't set control %s", name);
}

View File

@@ -49,7 +49,7 @@ typedef struct {
int dma_fd;
bool grabbed;
atomic_int refs;
} us_hw_buffer_s;
} us_capture_hwbuf_s;
typedef struct {
int fd;
@@ -62,13 +62,13 @@ typedef struct {
uint jpeg_quality;
uz raw_size;
uint n_bufs;
us_hw_buffer_s *hw_bufs;
us_capture_hwbuf_s *bufs;
bool dma;
enum v4l2_buf_type capture_type;
bool capture_mplane;
bool streamon;
int open_error_reported;
} us_device_runtime_s;
} us_capture_runtime_s;
typedef enum {
CTL_MODE_NONE = 0,
@@ -104,6 +104,8 @@ typedef struct {
uint width;
uint height;
uint format;
bool format_swap_rgb;
uint jpeg_quality;
v4l2_std_id standard;
enum v4l2_memory io_method;
@@ -116,22 +118,22 @@ typedef struct {
bool persistent;
uint timeout;
us_controls_s ctl;
us_device_runtime_s *run;
} us_device_s;
us_capture_runtime_s *run;
} us_capture_s;
us_device_s *us_device_init(void);
void us_device_destroy(us_device_s *dev);
us_capture_s *us_capture_init(void);
void us_capture_destroy(us_capture_s *cap);
int us_device_parse_format(const char *str);
int us_device_parse_standard(const char *str);
int us_device_parse_io_method(const char *str);
int us_capture_parse_format(const char *str);
int us_capture_parse_standard(const char *str);
int us_capture_parse_io_method(const char *str);
int us_device_open(us_device_s *dev);
void us_device_close(us_device_s *dev);
int us_capture_open(us_capture_s *cap);
void us_capture_close(us_capture_s *cap);
int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw);
int us_device_release_buffer(us_device_s *dev, us_hw_buffer_s *hw);
int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw);
int us_capture_release_buffer(us_capture_s *cap, us_capture_hwbuf_s *hw);
void us_device_buffer_incref(us_hw_buffer_s *hw);
void us_device_buffer_decref(us_hw_buffer_s *hw);
void us_capture_buffer_incref(us_capture_hwbuf_s *hw);
void us_capture_buffer_decref(us_capture_hwbuf_s *hw);

View File

@@ -26,7 +26,7 @@
#define US_VERSION_MAJOR 6
#define US_VERSION_MINOR 7
#define US_VERSION_MINOR 8
#define US_MAKE_VERSION2(_major, _minor) #_major "." #_minor
#define US_MAKE_VERSION1(_major, _minor) US_MAKE_VERSION2(_major, _minor)

View File

@@ -37,17 +37,18 @@
#include <drm_fourcc.h>
#include <libdrm/drm.h>
#include "../libs/types.h"
#include "../libs/tools.h"
#include "../libs/logging.h"
#include "../libs/frame.h"
#include "../libs/frametext.h"
#include "../types.h"
#include "../tools.h"
#include "../logging.h"
#include "../frame.h"
#include "../frametext.h"
#include "../capture.h"
static void _drm_vsync_callback(int fd, uint n_frame, uint sec, uint usec, void *v_buf);
static int _drm_check_status(us_drm_s *drm);
static void _drm_ensure_dpms_power(us_drm_s *drm, bool on);
static int _drm_init_buffers(us_drm_s *drm, const us_device_s *dev);
static int _drm_init_buffers(us_drm_s *drm, const us_capture_s *cap);
static int _drm_find_sink(us_drm_s *drm, uint width, uint height, float hz);
static drmModeModeInfo *_find_best_mode(drmModeConnector *conn, uint width, uint height, float hz);
@@ -78,7 +79,7 @@ us_drm_s *us_drm_init(void) {
US_CALLOC(drm, 1);
// drm->path = "/dev/dri/card0";
drm->path = "/dev/dri/by-path/platform-gpu-card";
drm->port = "HDMI-A-1";
drm->port = "HDMI-A-2"; // OUT2 on PiKVM V4 Plus
drm->timeout = 5;
drm->run = run;
return drm;
@@ -90,7 +91,7 @@ void us_drm_destroy(us_drm_s *drm) {
US_DELETE(drm, free); // cppcheck-suppress uselessAssignmentPtrArg
}
int us_drm_open(us_drm_s *drm, const us_device_s *dev) {
int us_drm_open(us_drm_s *drm, const us_capture_s *cap) {
us_drm_runtime_s *const run = drm->run;
assert(run->fd < 0);
@@ -101,7 +102,7 @@ int us_drm_open(us_drm_s *drm, const us_device_s *dev) {
default: goto error;
}
_D_LOG_INFO("Configuring DRM device for %s ...", (dev == NULL ? "STUB" : "DMA"));
_D_LOG_INFO("Configuring DRM device for %s ...", (cap == NULL ? "STUB" : "DMA"));
if ((run->fd = open(drm->path, O_RDWR | O_CLOEXEC | O_NONBLOCK)) < 0) {
_D_LOG_PERROR("Can't open DRM device");
@@ -110,12 +111,12 @@ int us_drm_open(us_drm_s *drm, const us_device_s *dev) {
_D_LOG_DEBUG("DRM device fd=%d opened", run->fd);
int stub = 0; // Open the real device with DMA
if (dev == NULL) {
if (cap == NULL) {
stub = US_DRM_STUB_USER;
} else if (dev->run->format != V4L2_PIX_FMT_RGB24) {
} else if (cap->run->format != V4L2_PIX_FMT_RGB24 && cap->run->format != V4L2_PIX_FMT_BGR24) {
stub = US_DRM_STUB_BAD_FORMAT;
char fourcc_str[8];
us_fourcc_to_string(dev->run->format, fourcc_str, 8);
us_fourcc_to_string(cap->run->format, fourcc_str, 8);
_D_LOG_ERROR("Input format %s is not supported, forcing to STUB ...", fourcc_str);
}
@@ -137,9 +138,9 @@ int us_drm_open(us_drm_s *drm, const us_device_s *dev) {
}
# undef CHECK_CAP
const uint width = (stub > 0 ? 0 : dev->run->width);
const uint height = (stub > 0 ? 0 : dev->run->height);
const uint hz = (stub > 0 ? 0 : dev->run->hz);
const uint width = (stub > 0 ? 0 : cap->run->width);
const uint height = (stub > 0 ? 0 : cap->run->height);
const uint hz = (stub > 0 ? 0 : cap->run->hz);
switch (_drm_find_sink(drm, width, height, hz)) {
case 0: break;
case -2: goto unplugged;
@@ -151,7 +152,7 @@ int us_drm_open(us_drm_s *drm, const us_device_s *dev) {
_D_LOG_ERROR("There is no appropriate modes for the capture, forcing to STUB ...");
}
if (_drm_init_buffers(drm, (stub > 0 ? NULL : dev)) < 0) {
if (_drm_init_buffers(drm, (stub > 0 ? NULL : cap)) < 0) {
goto error;
}
@@ -308,7 +309,7 @@ static void _drm_vsync_callback(int fd, uint n_frame, uint sec, uint usec, void
_D_LOG_DEBUG("Got VSync signal");
}
int us_drm_expose_stub(us_drm_s *drm, us_drm_stub_e stub, const us_device_s *dev) {
int us_drm_expose_stub(us_drm_s *drm, us_drm_stub_e stub, const us_capture_s *cap) {
us_drm_runtime_s *const run = drm->run;
assert(run->fd >= 0);
@@ -324,14 +325,14 @@ int us_drm_expose_stub(us_drm_s *drm, us_drm_stub_e stub, const us_device_s *dev
# define DRAW_MSG(x_msg) us_frametext_draw(run->ft, (x_msg), run->mode.hdisplay, run->mode.vdisplay)
switch (stub) {
case US_DRM_STUB_BAD_RESOLUTION: {
assert(dev != NULL);
assert(cap != NULL);
char msg[1024];
US_SNPRINTF(msg, 1023,
"=== PiKVM ==="
"\n \n< UNSUPPORTED RESOLUTION >"
"\n \n< %ux%up%.02f >"
"\n \nby this display",
dev->run->width, dev->run->height, dev->run->hz);
cap->run->width, cap->run->height, cap->run->hz);
DRAW_MSG(msg);
break;
};
@@ -376,7 +377,7 @@ int us_drm_expose_stub(us_drm_s *drm, us_drm_stub_e stub, const us_device_s *dev
return retval;
}
int us_drm_expose_dma(us_drm_s *drm, const us_hw_buffer_s *hw) {
int us_drm_expose_dma(us_drm_s *drm, const us_capture_hwbuf_s *hw) {
us_drm_runtime_s *const run = drm->run;
us_drm_buffer_s *const buf = &run->bufs[hw->buf.index];
@@ -459,14 +460,16 @@ static void _drm_ensure_dpms_power(us_drm_s *drm, bool on) {
run->dpms_state = (int)on;
}
static int _drm_init_buffers(us_drm_s *drm, const us_device_s *dev) {
static int _drm_init_buffers(us_drm_s *drm, const us_capture_s *cap) {
us_drm_runtime_s *const run = drm->run;
const uint n_bufs = (dev == NULL ? 4 : dev->run->n_bufs);
const char *name = (dev == NULL ? "STUB" : "DMA");
const uint n_bufs = (cap == NULL ? 4 : cap->run->n_bufs);
const char *name = (cap == NULL ? "STUB" : "DMA");
_D_LOG_DEBUG("Initializing %u %s buffers ...", n_bufs, name);
uint format = DRM_FORMAT_RGB888;
US_CALLOC(run->bufs, n_bufs);
for (run->n_bufs = 0; run->n_bufs < n_bufs; ++run->n_bufs) {
const uint n_buf = run->n_bufs;
@@ -479,7 +482,7 @@ static int _drm_init_buffers(us_drm_s *drm, const us_device_s *dev) {
u32 strides[4] = {0};
u32 offsets[4] = {0};
if (dev == NULL) {
if (cap == NULL) {
struct drm_mode_create_dumb create = {
.width = run->mode.hdisplay,
.height = run->mode.vdisplay,
@@ -512,17 +515,22 @@ static int _drm_init_buffers(us_drm_s *drm, const us_device_s *dev) {
strides[0] = create.pitch;
} else {
if (drmPrimeFDToHandle(run->fd, dev->run->hw_bufs[n_buf].dma_fd, &buf->handle) < 0) {
if (drmPrimeFDToHandle(run->fd, cap->run->bufs[n_buf].dma_fd, &buf->handle) < 0) {
_D_LOG_PERROR("Can't import DMA buffer=%u from capture device", n_buf);
return -1;
}
handles[0] = buf->handle;
strides[0] = dev->run->stride;
strides[0] = cap->run->stride;
switch (cap->run->format) {
case V4L2_PIX_FMT_RGB24: format = (DRM_FORMAT_BIG_ENDIAN ? DRM_FORMAT_BGR888 : DRM_FORMAT_RGB888); break;
case V4L2_PIX_FMT_BGR24: format = (DRM_FORMAT_BIG_ENDIAN ? DRM_FORMAT_RGB888 : DRM_FORMAT_BGR888); break;
}
}
if (drmModeAddFB2(
run->fd,
run->mode.hdisplay, run->mode.vdisplay, DRM_FORMAT_RGB888,
run->mode.hdisplay, run->mode.vdisplay, format,
handles, strides, offsets, &buf->id, 0
)) {
_D_LOG_PERROR("Can't setup buffer=%u", n_buf);

View File

@@ -25,10 +25,10 @@
#include <xf86drmMode.h>
#include "../libs/types.h"
#include "../libs/frame.h"
#include "../libs/frametext.h"
#include "../libs/device.h"
#include "../types.h"
#include "../frame.h"
#include "../frametext.h"
#include "../capture.h"
typedef enum {
@@ -83,10 +83,10 @@ typedef struct {
us_drm_s *us_drm_init(void);
void us_drm_destroy(us_drm_s *drm);
int us_drm_open(us_drm_s *drm, const us_device_s *dev);
int us_drm_open(us_drm_s *drm, const us_capture_s *cap);
void us_drm_close(us_drm_s *drm);
int us_drm_dpms_power_off(us_drm_s *drm);
int us_drm_wait_for_vsync(us_drm_s *drm);
int us_drm_expose_stub(us_drm_s *drm, us_drm_stub_e stub, const us_device_s *dev);
int us_drm_expose_dma(us_drm_s *drm, const us_hw_buffer_s *hw);
int us_drm_expose_stub(us_drm_s *drm, us_drm_stub_e stub, const us_capture_s *cap);
int us_drm_expose_dma(us_drm_s *drm, const us_capture_hwbuf_s *hw);

View File

@@ -91,13 +91,13 @@ const char *us_encoder_type_to_string(us_encoder_type_e type) {
return _ENCODER_TYPES[0].name;
}
void us_encoder_open(us_encoder_s *enc, us_device_s *dev) {
void us_encoder_open(us_encoder_s *enc, us_capture_s *cap) {
assert(enc->run->pool == NULL);
# define DR(x_next) dev->run->x_next
# define DR(x_next) cap->run->x_next
us_encoder_type_e type = (_ER(cpu_forced) ? US_ENCODER_TYPE_CPU : enc->type);
unsigned quality = dev->jpeg_quality;
unsigned quality = cap->jpeg_quality;
unsigned n_workers = US_MIN(enc->n_workers, DR(n_bufs));
bool cpu_forced = false;
@@ -139,7 +139,7 @@ void us_encoder_open(us_encoder_s *enc, us_device_s *dev) {
use_cpu:
type = US_ENCODER_TYPE_CPU;
quality = dev->jpeg_quality;
quality = cap->jpeg_quality;
ok:
if (type == US_ENCODER_TYPE_NOOP) {
@@ -159,8 +159,8 @@ void us_encoder_open(us_encoder_s *enc, us_device_s *dev) {
US_MUTEX_UNLOCK(_ER(mutex));
const long double desired_interval = (
dev->desired_fps > 0 && (dev->desired_fps < dev->run->hw_fps || dev->run->hw_fps == 0)
? (long double)1 / dev->desired_fps
cap->desired_fps > 0 && (cap->desired_fps < cap->run->hw_fps || cap->run->hw_fps == 0)
? (long double)1 / cap->desired_fps
: 0
);

View File

@@ -35,7 +35,7 @@
#include "../libs/threading.h"
#include "../libs/logging.h"
#include "../libs/frame.h"
#include "../libs/device.h"
#include "../libs/capture.h"
#include "workers.h"
#include "m2m.h"
@@ -75,9 +75,9 @@ typedef struct {
} us_encoder_s;
typedef struct {
us_encoder_s *enc;
us_hw_buffer_s *hw;
us_frame_s *dest;
us_encoder_s *enc;
us_capture_hwbuf_s *hw;
us_frame_s *dest;
} us_encoder_job_s;
@@ -87,7 +87,7 @@ void us_encoder_destroy(us_encoder_s *enc);
int us_encoder_parse_type(const char *str);
const char *us_encoder_type_to_string(us_encoder_type_e type);
void us_encoder_open(us_encoder_s *enc, us_device_s *dev);
void us_encoder_open(us_encoder_s *enc, us_capture_s *cap);
void us_encoder_close(us_encoder_s *enc);
void us_encoder_get_runtime_params(us_encoder_s *enc, us_encoder_type_e *type, unsigned *quality);

View File

@@ -205,8 +205,8 @@ int us_server_listen(us_server_s *server) {
{
struct timeval interval = {0};
if (stream->dev->desired_fps > 0) {
interval.tv_usec = 1000000 / (stream->dev->desired_fps * 2);
if (stream->cap->desired_fps > 0) {
interval.tv_usec = 1000000 / (stream->cap->desired_fps * 2);
} else {
interval.tv_usec = 16000; // ~60fps
}
@@ -512,7 +512,7 @@ static void _http_callback_state(struct evhttp_request *request, void *v_server)
(server->fake_width ? server->fake_width : width),
(server->fake_height ? server->fake_height : height),
us_bool_to_string(online),
stream->dev->desired_fps,
stream->cap->desired_fps,
captured_fps,
ex->queued_fps,
run->stream_clients_count

View File

@@ -28,7 +28,7 @@
#include "../libs/tools.h"
#include "../libs/threading.h"
#include "../libs/logging.h"
#include "../libs/device.h"
#include "../libs/capture.h"
#include "../libs/signal.h"
#include "options.h"
@@ -84,12 +84,12 @@ int main(int argc, char *argv[]) {
US_THREAD_RENAME("main");
us_options_s *options = us_options_init(argc, argv);
us_device_s *dev = us_device_init();
us_capture_s *cap = us_capture_init();
us_encoder_s *enc = us_encoder_init();
_g_stream = us_stream_init(dev, enc);
_g_stream = us_stream_init(cap, enc);
_g_server = us_server_init(_g_stream);
if ((exit_code = options_parse(options, dev, enc, _g_stream, _g_server)) == 0) {
if ((exit_code = options_parse(options, cap, enc, _g_stream, _g_server)) == 0) {
# ifdef WITH_GPIO
us_gpio_init();
# endif
@@ -118,7 +118,7 @@ int main(int argc, char *argv[]) {
us_server_destroy(_g_server);
us_stream_destroy(_g_stream);
us_encoder_destroy(enc);
us_device_destroy(dev);
us_capture_destroy(cap);
us_options_destroy(options);
if (exit_code == 0) {

View File

@@ -61,6 +61,7 @@ enum _US_OPT_VALUES {
_O_DEVICE_TIMEOUT = 10000,
_O_DEVICE_ERROR_DELAY,
_O_FORMAT_SWAP_RGB,
_O_M2M_DEVICE,
_O_IMAGE_DEFAULT,
@@ -100,6 +101,10 @@ enum _US_OPT_VALUES {
_O_H264_M2M_DEVICE,
# undef ADD_SINK
# ifdef WITH_V4P
_O_V4P,
# endif
# ifdef WITH_GPIO
_O_GPIO_DEVICE,
_O_GPIO_CONSUMER_PREFIX,
@@ -132,6 +137,7 @@ static const struct option _LONG_OPTS[] = {
{"input", required_argument, NULL, _O_INPUT},
{"resolution", required_argument, NULL, _O_RESOLUTION},
{"format", required_argument, NULL, _O_FORMAT},
{"format-swap-rgb", required_argument, NULL, _O_FORMAT_SWAP_RGB},
{"tv-standard", required_argument, NULL, _O_TV_STANDARD},
{"io-method", required_argument, NULL, _O_IO_METHOD},
{"desired-fps", required_argument, NULL, _O_DESIRED_FPS},
@@ -204,6 +210,10 @@ static const struct option _LONG_OPTS[] = {
{"sink-client-ttl", required_argument, NULL, _O_JPEG_SINK_CLIENT_TTL},
{"sink-timeout", required_argument, NULL, _O_JPEG_SINK_TIMEOUT},
# ifdef WITH_V4P
{"v4p", no_argument, NULL, _O_V4P},
# endif
# ifdef WITH_GPIO
{"gpio-device", required_argument, NULL, _O_GPIO_DEVICE},
{"gpio-consumer-prefix", required_argument, NULL, _O_GPIO_CONSUMER_PREFIX},
@@ -240,7 +250,7 @@ static int _parse_resolution(const char *str, unsigned *width, unsigned *height,
static int _check_instance_id(const char *str);
static void _features(void);
static void _help(FILE *fp, const us_device_s *dev, const us_encoder_s *enc, const us_stream_s *stream, const us_server_s *server);
static void _help(FILE *fp, const us_capture_s *cap, const us_encoder_s *enc, const us_stream_s *stream, const us_server_s *server);
us_options_s *us_options_init(unsigned argc, char *argv[]) {
@@ -270,7 +280,7 @@ void us_options_destroy(us_options_s *options) {
}
int options_parse(us_options_s *options, us_device_s *dev, us_encoder_s *enc, us_stream_s *stream, us_server_s *server) {
int options_parse(us_options_s *options, us_capture_s *cap, us_encoder_s *enc, us_stream_s *stream, us_server_s *server) {
# define OPT_SET(x_dest, x_value) { \
x_dest = x_value; \
break; \
@@ -314,15 +324,15 @@ int options_parse(us_options_s *options, us_device_s *dev, us_encoder_s *enc, us
}
# define OPT_CTL_DEFAULT_NOBREAK(x_dest) { \
dev->ctl.x_dest.mode = CTL_MODE_DEFAULT; \
cap->ctl.x_dest.mode = CTL_MODE_DEFAULT; \
}
# define OPT_CTL_MANUAL(x_dest) { \
if (!strcasecmp(optarg, "default")) { \
OPT_CTL_DEFAULT_NOBREAK(x_dest); \
} else { \
dev->ctl.x_dest.mode = CTL_MODE_VALUE; \
OPT_NUMBER("--"#x_dest, dev->ctl.x_dest.value, INT_MIN, INT_MAX, 0); \
cap->ctl.x_dest.mode = CTL_MODE_VALUE; \
OPT_NUMBER("--"#x_dest, cap->ctl.x_dest.value, INT_MIN, INT_MAX, 0); \
} \
break; \
}
@@ -331,10 +341,10 @@ int options_parse(us_options_s *options, us_device_s *dev, us_encoder_s *enc, us
if (!strcasecmp(optarg, "default")) { \
OPT_CTL_DEFAULT_NOBREAK(x_dest); \
} else if (!strcasecmp(optarg, "auto")) { \
dev->ctl.x_dest.mode = CTL_MODE_AUTO; \
cap->ctl.x_dest.mode = CTL_MODE_AUTO; \
} else { \
dev->ctl.x_dest.mode = CTL_MODE_VALUE; \
OPT_NUMBER("--"#x_dest, dev->ctl.x_dest.value, INT_MIN, INT_MAX, 0); \
cap->ctl.x_dest.mode = CTL_MODE_VALUE; \
OPT_NUMBER("--"#x_dest, cap->ctl.x_dest.value, INT_MIN, INT_MAX, 0); \
} \
break; \
}
@@ -359,28 +369,29 @@ int options_parse(us_options_s *options, us_device_s *dev, us_encoder_s *enc, us
for (int ch; (ch = getopt_long(options->argc, options->argv_copy, short_opts, _LONG_OPTS, NULL)) >= 0;) {
switch (ch) {
case _O_DEVICE: OPT_SET(dev->path, optarg);
case _O_INPUT: OPT_NUMBER("--input", dev->input, 0, 128, 0);
case _O_RESOLUTION: OPT_RESOLUTION("--resolution", dev->width, dev->height, true);
case _O_DEVICE: OPT_SET(cap->path, optarg);
case _O_INPUT: OPT_NUMBER("--input", cap->input, 0, 128, 0);
case _O_RESOLUTION: OPT_RESOLUTION("--resolution", cap->width, cap->height, true);
# pragma GCC diagnostic ignored "-Wsign-compare"
# pragma GCC diagnostic push
case _O_FORMAT: OPT_PARSE_ENUM("pixel format", dev->format, us_device_parse_format, US_FORMATS_STR);
case _O_FORMAT: OPT_PARSE_ENUM("pixel format", cap->format, us_capture_parse_format, US_FORMATS_STR);
# pragma GCC diagnostic pop
case _O_TV_STANDARD: OPT_PARSE_ENUM("TV standard", dev->standard, us_device_parse_standard, US_STANDARDS_STR);
case _O_IO_METHOD: OPT_PARSE_ENUM("IO method", dev->io_method, us_device_parse_io_method, US_IO_METHODS_STR);
case _O_DESIRED_FPS: OPT_NUMBER("--desired-fps", dev->desired_fps, 0, US_VIDEO_MAX_FPS, 0);
case _O_MIN_FRAME_SIZE: OPT_NUMBER("--min-frame-size", dev->min_frame_size, 1, 8192, 0);
case _O_PERSISTENT: OPT_SET(dev->persistent, true);
case _O_DV_TIMINGS: OPT_SET(dev->dv_timings, true);
case _O_BUFFERS: OPT_NUMBER("--buffers", dev->n_bufs, 1, 32, 0);
case _O_FORMAT_SWAP_RGB: OPT_SET(cap->format_swap_rgb, true);
case _O_TV_STANDARD: OPT_PARSE_ENUM("TV standard", cap->standard, us_capture_parse_standard, US_STANDARDS_STR);
case _O_IO_METHOD: OPT_PARSE_ENUM("IO method", cap->io_method, us_capture_parse_io_method, US_IO_METHODS_STR);
case _O_DESIRED_FPS: OPT_NUMBER("--desired-fps", cap->desired_fps, 0, US_VIDEO_MAX_FPS, 0);
case _O_MIN_FRAME_SIZE: OPT_NUMBER("--min-frame-size", cap->min_frame_size, 1, 8192, 0);
case _O_PERSISTENT: OPT_SET(cap->persistent, true);
case _O_DV_TIMINGS: OPT_SET(cap->dv_timings, true);
case _O_BUFFERS: OPT_NUMBER("--buffers", cap->n_bufs, 1, 32, 0);
case _O_WORKERS: OPT_NUMBER("--workers", enc->n_workers, 1, 32, 0);
case _O_QUALITY: OPT_NUMBER("--quality", dev->jpeg_quality, 1, 100, 0);
case _O_QUALITY: OPT_NUMBER("--quality", cap->jpeg_quality, 1, 100, 0);
case _O_ENCODER: OPT_PARSE_ENUM("encoder type", enc->type, us_encoder_parse_type, ENCODER_TYPES_STR);
case _O_GLITCHED_RESOLUTIONS: break; // Deprecated
case _O_BLANK: break; // Deprecated
case _O_LAST_AS_BLANK: break; // Deprecated
case _O_SLOWDOWN: OPT_SET(stream->slowdown, true);
case _O_DEVICE_TIMEOUT: OPT_NUMBER("--device-timeout", dev->timeout, 1, 60, 0);
case _O_DEVICE_TIMEOUT: OPT_NUMBER("--device-timeout", cap->timeout, 1, 60, 0);
case _O_DEVICE_ERROR_DELAY: OPT_NUMBER("--device-error-delay", stream->error_delay, 1, 60, 0);
case _O_M2M_DEVICE: OPT_SET(enc->m2m_path, optarg);
@@ -451,6 +462,10 @@ int options_parse(us_options_s *options, us_device_s *dev, us_encoder_s *enc, us
case _O_H264_GOP: OPT_NUMBER("--h264-gop", stream->h264_gop, 0, 60, 0);
case _O_H264_M2M_DEVICE: OPT_SET(stream->h264_m2m_path, optarg);
# ifdef WITH_V4P
case _O_V4P: OPT_SET(stream->v4p, true);
# endif
# ifdef WITH_GPIO
case _O_GPIO_DEVICE: OPT_SET(us_g_gpio.path, optarg);
case _O_GPIO_CONSUMER_PREFIX: OPT_SET(us_g_gpio.consumer_prefix, optarg);
@@ -479,7 +494,7 @@ int options_parse(us_options_s *options, us_device_s *dev, us_encoder_s *enc, us
case _O_FORCE_LOG_COLORS: OPT_SET(us_g_log_colored, true);
case _O_NO_LOG_COLORS: OPT_SET(us_g_log_colored, false);
case _O_HELP: _help(stdout, dev, enc, stream, server); return 1;
case _O_HELP: _help(stdout, cap, enc, stream, server); return 1;
case _O_VERSION: puts(US_VERSION); return 1;
case _O_FEATURES: _features(); return 1;
@@ -588,7 +603,7 @@ static void _features(void) {
# endif
}
static void _help(FILE *fp, const us_device_s *dev, const us_encoder_s *enc, const us_stream_s *stream, const us_server_s *server) {
static void _help(FILE *fp, const us_capture_s *cap, const us_encoder_s *enc, const us_stream_s *stream, const us_server_s *server) {
# define SAY(x_msg, ...) fprintf(fp, x_msg "\n", ##__VA_ARGS__)
SAY("\nuStreamer - Lightweight and fast MJPEG-HTTP streamer");
SAY("═══════════════════════════════════════════════════");
@@ -596,11 +611,13 @@ static void _help(FILE *fp, const us_device_s *dev, const us_encoder_s *enc, con
SAY("Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com>\n");
SAY("Capturing options:");
SAY("══════════════════");
SAY(" -d|--device </dev/path> ───────────── Path to V4L2 device. Default: %s.\n", dev->path);
SAY(" -i|--input <N> ────────────────────── Input channel. Default: %u.\n", dev->input);
SAY(" -r|--resolution <WxH> ─────────────── Initial image resolution. Default: %ux%u.\n", dev->width, dev->height);
SAY(" -d|--device </dev/path> ───────────── Path to V4L2 device. Default: %s.\n", cap->path);
SAY(" -i|--input <N> ────────────────────── Input channel. Default: %u.\n", cap->input);
SAY(" -r|--resolution <WxH> ─────────────── Initial image resolution. Default: %ux%u.\n", cap->width, cap->height);
SAY(" -m|--format <fmt> ─────────────────── Image format.");
SAY(" Available: %s; default: YUYV.\n", US_FORMATS_STR);
SAY(" --format-swap-rgb ──────────────── Enable R-G-B order swapping: RGB to BGR and vice versa.");
SAY(" Default: disabled.\n");
SAY(" -a|--tv-standard <std> ────────────── Force TV standard.");
SAY(" Available: %s; default: disabled.\n", US_STANDARDS_STR);
SAY(" -I|--io-method <method> ───────────── Set V4L2 IO method (see kernel documentation).");
@@ -608,16 +625,16 @@ static void _help(FILE *fp, const us_device_s *dev, const us_encoder_s *enc, con
SAY(" Available: %s; default: MMAP.\n", US_IO_METHODS_STR);
SAY(" -f|--desired-fps <N> ──────────────── Desired FPS. Default: maximum possible.\n");
SAY(" -z|--min-frame-size <N> ───────────── Drop frames smaller then this limit. Useful if the device");
SAY(" produces small-sized garbage frames. Default: %zu bytes.\n", dev->min_frame_size);
SAY(" produces small-sized garbage frames. Default: %zu bytes.\n", cap->min_frame_size);
SAY(" -n|--persistent ───────────────────── Don't re-initialize device on timeout. Default: disabled.\n");
SAY(" -t|--dv-timings ───────────────────── Enable DV-timings querying and events processing");
SAY(" to automatic resolution change. Default: disabled.\n");
SAY(" -b|--buffers <N> ──────────────────── The number of buffers to receive data from the device.");
SAY(" Each buffer may processed using an independent thread.");
SAY(" Default: %u (the number of CPU cores (but not more than 4) + 1).\n", dev->n_bufs);
SAY(" Default: %u (the number of CPU cores (but not more than 4) + 1).\n", cap->n_bufs);
SAY(" -w|--workers <N> ──────────────────── The number of worker threads but not more than buffers.");
SAY(" Default: %u (the number of CPU cores (but not more than 4)).\n", enc->n_workers);
SAY(" -q|--quality <N> ──────────────────── Set quality of JPEG encoding from 1 to 100 (best). Default: %u.", dev->jpeg_quality);
SAY(" -q|--quality <N> ──────────────────── Set quality of JPEG encoding from 1 to 100 (best). Default: %u.", cap->jpeg_quality);
SAY(" Note: If HW encoding is used (JPEG source format selected),");
SAY(" this parameter attempts to configure the camera");
SAY(" or capture device hardware's internal encoder.");
@@ -635,7 +652,7 @@ static void _help(FILE *fp, const us_device_s *dev, const us_encoder_s *enc, con
SAY(" -K|--last-as-blank <sec> ──────────── It doesn't do anything. Still here for compatibility.\n");
SAY(" -l|--slowdown ─────────────────────── Slowdown capturing to 1 FPS or less when no stream or sink clients");
SAY(" are connected. Useful to reduce CPU consumption. Default: disabled.\n");
SAY(" --device-timeout <sec> ────────────── Timeout for device querying. Default: %u.\n", dev->timeout);
SAY(" --device-timeout <sec> ────────────── Timeout for device querying. Default: %u.\n", cap->timeout);
SAY(" --device-error-delay <sec> ────────── Delay before trying to connect to the device again");
SAY(" after an error (timeout for example). Default: %u.\n", stream->error_delay);
SAY(" --m2m-device </dev/path> ──────────── Path to V4L2 M2M encoder device. Default: auto select.\n");
@@ -698,6 +715,12 @@ static void _help(FILE *fp, const us_device_s *dev, const us_encoder_s *enc, con
SAY(" --h264-bitrate <kbps> ───────── H264 bitrate in Kbps. Default: %u.\n", stream->h264_bitrate);
SAY(" --h264-gop <N> ──────────────── Interval between keyframes. Default: %u.\n", stream->h264_gop);
SAY(" --h264-m2m-device </dev/path> ─ Path to V4L2 M2M encoder device. Default: auto select.\n");
# ifdef WITH_V4P
SAY("Passthrough options for PiKVM V4:");
SAY("═════════════════════════════════");
SAY(" --v4p ─ Enable HDMI passthrough to OUT2 on the device: https://docs.pikvm.org/pass");
SAY(" Default: disabled.\n");
# endif
# ifdef WITH_GPIO
SAY("GPIO options:");
SAY("═════════════");

View File

@@ -39,7 +39,7 @@
#include "../libs/frame.h"
#include "../libs/memsink.h"
#include "../libs/options.h"
#include "../libs/device.h"
#include "../libs/capture.h"
#include "encoder.h"
#include "stream.h"
@@ -62,4 +62,4 @@ typedef struct {
us_options_s *us_options_init(unsigned argc, char *argv[]);
void us_options_destroy(us_options_s *options);
int options_parse(us_options_s *options, us_device_s *dev, us_encoder_s *enc, us_stream_s *stream, us_server_s *server);
int options_parse(us_options_s *options, us_capture_s *cap, us_encoder_s *enc, us_stream_s *stream, us_server_s *server);

View File

@@ -38,7 +38,10 @@
#include "../libs/ring.h"
#include "../libs/frame.h"
#include "../libs/memsink.h"
#include "../libs/device.h"
#include "../libs/capture.h"
#ifdef WITH_V4P
# include "../libs/drm/drm.h"
#endif
#include "blank.h"
#include "encoder.h"
@@ -51,7 +54,7 @@
typedef struct {
pthread_t tid;
us_device_s *dev;
us_capture_s *cap;
us_queue_s *queue;
pthread_mutex_t *mutex;
atomic_bool *stop;
@@ -71,18 +74,24 @@ static void *_releaser_thread(void *v_ctx);
static void *_jpeg_thread(void *v_ctx);
static void *_h264_thread(void *v_ctx);
static void *_raw_thread(void *v_ctx);
#ifdef WITH_V4P
static void *_drm_thread(void *v_ctx);
#endif
static us_hw_buffer_s *_get_latest_hw(us_queue_s *queue);
static us_capture_hwbuf_s *_get_latest_hw(us_queue_s *queue);
static bool _stream_has_jpeg_clients_cached(us_stream_s *stream);
static bool _stream_has_any_clients_cached(us_stream_s *stream);
static int _stream_init_loop(us_stream_s *stream);
#ifdef WITH_V4P
static void _stream_drm_ensure_no_signal(us_stream_s *stream);
#endif
static void _stream_expose_jpeg(us_stream_s *stream, const us_frame_s *frame);
static void _stream_expose_raw(us_stream_s *stream, const us_frame_s *frame);
static void _stream_check_suicide(us_stream_s *stream);
us_stream_s *us_stream_init(us_device_s *dev, us_encoder_s *enc) {
us_stream_s *us_stream_init(us_capture_s *cap, us_encoder_s *enc) {
us_stream_runtime_s *run;
US_CALLOC(run, 1);
US_RING_INIT_WITH_ITEMS(run->http_jpeg_ring, 4, us_frame_init);
@@ -95,15 +104,15 @@ us_stream_s *us_stream_init(us_device_s *dev, us_encoder_s *enc) {
us_stream_s *stream;
US_CALLOC(stream, 1);
stream->dev = dev;
stream->cap = cap;
stream->enc = enc;
stream->error_delay = 1;
stream->h264_bitrate = 5000; // Kbps
stream->h264_gop = 30;
stream->run = run;
us_blank_draw(run->blank, "< NO SIGNAL >", dev->width, dev->height);
_stream_set_capture_state(stream, dev->width, dev->height, false, 0);
us_blank_draw(run->blank, "< NO SIGNAL >", cap->width, cap->height);
_stream_set_capture_state(stream, cap->width, cap->height, false, 0);
return stream;
}
@@ -116,10 +125,10 @@ void us_stream_destroy(us_stream_s *stream) {
void us_stream_loop(us_stream_s *stream) {
us_stream_runtime_s *const run = stream->run;
us_device_s *const dev = stream->dev;
us_capture_s *const cap = stream->cap;
US_LOG_INFO("Using V4L2 device: %s", dev->path);
US_LOG_INFO("Using desired FPS: %u", dev->desired_fps);
US_LOG_INFO("Using V4L2 device: %s", cap->path);
US_LOG_INFO("Using desired FPS: %u", cap->desired_fps);
atomic_store(&run->http_last_request_ts, us_get_now_monotonic());
@@ -127,18 +136,26 @@ void us_stream_loop(us_stream_s *stream) {
run->h264 = us_h264_stream_init(stream->h264_sink, stream->h264_m2m_path, stream->h264_bitrate, stream->h264_gop);
}
# ifdef WITH_V4P
if (stream->v4p) {
run->drm = us_drm_init();
run->drm_opened = -1;
US_LOG_INFO("Using passthrough: %s[%s]", run->drm->path, run->drm->port);
}
# endif
while (!_stream_init_loop(stream)) {
atomic_bool threads_stop;
atomic_init(&threads_stop, false);
pthread_mutex_t release_mutex;
US_MUTEX_INIT(release_mutex);
const uint n_releasers = dev->run->n_bufs;
const uint n_releasers = cap->run->n_bufs;
_releaser_context_s *releasers;
US_CALLOC(releasers, n_releasers);
for (uint index = 0; index < n_releasers; ++index) {
_releaser_context_s *ctx = &releasers[index];
ctx->dev = dev;
ctx->cap = cap;
ctx->queue = us_queue_init(1);
ctx->mutex = &release_mutex;
ctx->stop = &threads_stop;
@@ -146,7 +163,7 @@ void us_stream_loop(us_stream_s *stream) {
}
_worker_context_s jpeg_ctx = {
.queue = us_queue_init(dev->run->n_bufs),
.queue = us_queue_init(cap->run->n_bufs),
.stream = stream,
.stop = &threads_stop,
};
@@ -154,7 +171,7 @@ void us_stream_loop(us_stream_s *stream) {
_worker_context_s h264_ctx;
if (run->h264 != NULL) {
h264_ctx.queue = us_queue_init(dev->run->n_bufs);
h264_ctx.queue = us_queue_init(cap->run->n_bufs);
h264_ctx.stream = stream;
h264_ctx.stop = &threads_stop;
US_THREAD_CREATE(h264_ctx.tid, _h264_thread, &h264_ctx);
@@ -168,6 +185,16 @@ void us_stream_loop(us_stream_s *stream) {
US_THREAD_CREATE(raw_ctx.tid, _raw_thread, &raw_ctx);
}
# ifdef WITH_V4P
_worker_context_s drm_ctx;
if (stream->v4p) {
drm_ctx.queue = us_queue_init(cap->run->n_bufs);
drm_ctx.stream = stream;
drm_ctx.stop = &threads_stop;
US_THREAD_CREATE(drm_ctx.tid, _drm_thread, &drm_ctx); // cppcheck-suppress assertWithSideEffect
}
# endif
uint captured_fps_accum = 0;
sll captured_fps_ts = 0;
uint captured_fps = 0;
@@ -176,8 +203,8 @@ void us_stream_loop(us_stream_s *stream) {
uint slowdown_count = 0;
while (!atomic_load(&run->stop) && !atomic_load(&threads_stop)) {
us_hw_buffer_s *hw;
switch (us_device_grab_buffer(dev, &hw)) {
us_capture_hwbuf_s *hw;
switch (us_capture_grab_buffer(cap, &hw)) {
case -2: continue; // Broken frame
case -1: goto close; // Error
default: break; // Grabbed on >= 0
@@ -192,21 +219,27 @@ void us_stream_loop(us_stream_s *stream) {
}
captured_fps_accum += 1;
_stream_set_capture_state(stream, dev->run->width, dev->run->height, true, captured_fps);
_stream_set_capture_state(stream, cap->run->width, cap->run->height, true, captured_fps);
# ifdef WITH_GPIO
us_gpio_set_stream_online(true);
# endif
us_device_buffer_incref(hw); // JPEG
us_capture_buffer_incref(hw); // JPEG
us_queue_put(jpeg_ctx.queue, hw, 0);
if (run->h264 != NULL) {
us_device_buffer_incref(hw); // H264
us_capture_buffer_incref(hw); // H264
us_queue_put(h264_ctx.queue, hw, 0);
}
if (stream->raw_sink != NULL) {
us_device_buffer_incref(hw); // RAW
us_capture_buffer_incref(hw); // RAW
us_queue_put(raw_ctx.queue, hw, 0);
}
# ifdef WITH_V4P
if (stream->v4p) {
us_capture_buffer_incref(hw); // DRM
us_queue_put(drm_ctx.queue, hw, 0);
}
# endif
us_queue_put(releasers[hw->buf.index].queue, hw, 0); // Plan to release
// Мы не обновляем здесь состояние синков, потому что это происходит внутри обслуживающих их потоков
@@ -223,6 +256,13 @@ void us_stream_loop(us_stream_s *stream) {
close:
atomic_store(&threads_stop, true);
# ifdef WITH_V4P
if (stream->v4p) {
US_THREAD_JOIN(drm_ctx.tid);
us_queue_destroy(drm_ctx.queue);
}
# endif
if (stream->raw_sink != NULL) {
US_THREAD_JOIN(raw_ctx.tid);
us_queue_destroy(raw_ctx.queue);
@@ -246,13 +286,16 @@ void us_stream_loop(us_stream_s *stream) {
atomic_store(&threads_stop, false);
us_encoder_close(stream->enc);
us_device_close(dev);
us_capture_close(cap);
if (!atomic_load(&run->stop)) {
US_SEP_INFO('=');
}
}
# ifdef WITH_V4P
US_DELETE(run->drm, us_drm_destroy);
# endif
US_DELETE(run->h264, us_h264_stream_destroy);
}
@@ -283,7 +326,7 @@ static void *_releaser_thread(void *v_ctx) {
_releaser_context_s *ctx = v_ctx;
while (!atomic_load(ctx->stop)) {
us_hw_buffer_s *hw;
us_capture_hwbuf_s *hw;
if (us_queue_get(ctx->queue, (void**)&hw, 0.1) < 0) {
continue;
}
@@ -296,7 +339,7 @@ static void *_releaser_thread(void *v_ctx) {
}
US_MUTEX_LOCK(*ctx->mutex);
const int released = us_device_release_buffer(ctx->dev, hw);
const int released = us_capture_release_buffer(ctx->cap, hw);
US_MUTEX_UNLOCK(*ctx->mutex);
if (released < 0) {
goto done;
@@ -321,7 +364,7 @@ static void *_jpeg_thread(void *v_ctx) {
us_encoder_job_s *const ready_job = ready_wr->job;
if (ready_job->hw != NULL) {
us_device_buffer_decref(ready_job->hw);
us_capture_buffer_decref(ready_job->hw);
ready_job->hw = NULL;
if (ready_wr->job_failed) {
// pass
@@ -337,7 +380,7 @@ static void *_jpeg_thread(void *v_ctx) {
}
}
us_hw_buffer_s *hw = _get_latest_hw(ctx->queue);
us_capture_hwbuf_s *hw = _get_latest_hw(ctx->queue);
if (hw == NULL) {
continue;
}
@@ -345,7 +388,7 @@ static void *_jpeg_thread(void *v_ctx) {
const bool update_required = (stream->jpeg_sink != NULL && us_memsink_server_check(stream->jpeg_sink, NULL));
if (!update_required && !_stream_has_jpeg_clients_cached(stream)) {
US_LOG_VERBOSE("JPEG: Passed encoding because nobody is watching");
us_device_buffer_decref(hw);
us_capture_buffer_decref(hw);
continue;
}
@@ -354,7 +397,7 @@ static void *_jpeg_thread(void *v_ctx) {
fluency_passed += 1;
US_LOG_VERBOSE("JPEG: Passed %u frames for fluency: now=%.03Lf, grab_after=%.03Lf",
fluency_passed, now_ts, grab_after_ts);
us_device_buffer_decref(hw);
us_capture_buffer_decref(hw);
continue;
}
fluency_passed = 0;
@@ -379,19 +422,19 @@ static void *_h264_thread(void *v_ctx) {
ldf last_encode_ts = us_get_now_monotonic();
while (!atomic_load(ctx->stop)) {
us_hw_buffer_s *hw = _get_latest_hw(ctx->queue);
us_capture_hwbuf_s *hw = _get_latest_hw(ctx->queue);
if (hw == NULL) {
continue;
}
if (!us_memsink_server_check(h264->sink, NULL)) {
us_device_buffer_decref(hw);
us_capture_buffer_decref(hw);
US_LOG_VERBOSE("H264: Passed encoding because nobody is watching");
continue;
}
if (hw->raw.grab_ts < grab_after_ts) {
us_device_buffer_decref(hw);
us_capture_buffer_decref(hw);
US_LOG_VERBOSE("H264: Passed encoding for FPS limit: %u", h264->enc->run->fps_limit);
continue;
}
@@ -409,7 +452,7 @@ static void *_h264_thread(void *v_ctx) {
const ldf frame_interval = (ldf)1 / h264->enc->run->fps_limit;
grab_after_ts = hw->raw.grab_ts + frame_interval - 0.01;
us_device_buffer_decref(hw);
us_capture_buffer_decref(hw);
}
return NULL;
}
@@ -419,30 +462,89 @@ static void *_raw_thread(void *v_ctx) {
_worker_context_s *ctx = v_ctx;
while (!atomic_load(ctx->stop)) {
us_hw_buffer_s *hw = _get_latest_hw(ctx->queue);
us_capture_hwbuf_s *hw = _get_latest_hw(ctx->queue);
if (hw == NULL) {
continue;
}
if (!us_memsink_server_check(ctx->stream->raw_sink, NULL)) {
us_device_buffer_decref(hw);
us_capture_buffer_decref(hw);
US_LOG_VERBOSE("RAW: Passed publishing because nobody is watching");
continue;
}
us_memsink_server_put(ctx->stream->raw_sink, &hw->raw, false);
us_device_buffer_decref(hw);
us_capture_buffer_decref(hw);
}
return NULL;
}
static us_hw_buffer_s *_get_latest_hw(us_queue_s *queue) {
us_hw_buffer_s *hw;
#ifdef WITH_V4P
static void *_drm_thread(void *v_ctx) {
US_THREAD_SETTLE("str_drm");
_worker_context_s *ctx = v_ctx;
us_stream_runtime_s *run = ctx->stream->run;
// Close previously opened DRM for a stub
us_drm_close(run->drm);
run->drm_opened = -1;
us_capture_hwbuf_s *prev_hw = NULL;
while (!atomic_load(ctx->stop)) {
# define CHECK(x_arg) if ((x_arg) < 0) { goto close; }
# define SLOWDOWN { \
ldf m_next_ts = us_get_now_monotonic() + 1; \
while (!atomic_load(ctx->stop) && us_get_now_monotonic() < m_next_ts) { \
us_capture_hwbuf_s *m_pass_hw = _get_latest_hw(ctx->queue); \
if (m_pass_hw != NULL) { \
us_capture_buffer_decref(m_pass_hw); \
} \
} \
}
CHECK(run->drm_opened = us_drm_open(run->drm, ctx->stream->cap));
while (!atomic_load(ctx->stop)) {
CHECK(us_drm_wait_for_vsync(run->drm));
US_DELETE(prev_hw, us_capture_buffer_decref);
us_capture_hwbuf_s *hw = _get_latest_hw(ctx->queue);
if (hw == NULL) {
continue;
}
if (run->drm_opened == 0) {
CHECK(us_drm_expose_dma(run->drm, hw));
prev_hw = hw;
continue;
}
CHECK(us_drm_expose_stub(run->drm, run->drm_opened, ctx->stream->cap));
us_capture_buffer_decref(hw);
SLOWDOWN;
}
close:
us_drm_close(run->drm);
run->drm_opened = -1;
US_DELETE(prev_hw, us_capture_buffer_decref);
SLOWDOWN;
# undef SLOWDOWN
# undef CHECK
}
return NULL;
}
#endif
static us_capture_hwbuf_s *_get_latest_hw(us_queue_s *queue) {
us_capture_hwbuf_s *hw;
if (us_queue_get(queue, (void**)&hw, 0.1) < 0) {
return NULL;
}
while (!us_queue_is_empty(queue)) { // Берем только самый свежий кадр
us_device_buffer_decref(hw);
us_capture_buffer_decref(hw);
assert(!us_queue_get(queue, (void**)&hw, 0));
}
return hw;
@@ -460,6 +562,9 @@ static bool _stream_has_jpeg_clients_cached(us_stream_s *stream) {
static bool _stream_has_any_clients_cached(us_stream_s *stream) {
const us_stream_runtime_s *const run = stream->run;
return (
# ifdef WITH_V4P
stream->v4p ||
# endif
_stream_has_jpeg_clients_cached(stream)
|| (run->h264 != NULL && atomic_load(&run->h264->sink->has_clients))
|| (stream->raw_sink != NULL && atomic_load(&stream->raw_sink->has_clients))
@@ -489,24 +594,30 @@ static int _stream_init_loop(us_stream_s *stream) {
_stream_check_suicide(stream);
stream->dev->dma_export = (
stream->cap->dma_export = (
stream->enc->type == US_ENCODER_TYPE_M2M_VIDEO
|| stream->enc->type == US_ENCODER_TYPE_M2M_IMAGE
|| run->h264 != NULL
);
switch (us_device_open(stream->dev)) {
switch (us_capture_open(stream->cap)) {
case -2:
if (!waiting_reported) {
waiting_reported = true;
US_LOG_INFO("Waiting for the capture device ...");
}
# ifdef WITH_V4P
_stream_drm_ensure_no_signal(stream);
# endif
goto offline_and_retry;
case -1:
waiting_reported = false;
# ifdef WITH_V4P
_stream_drm_ensure_no_signal(stream);
# endif
goto offline_and_retry;
default: break;
}
us_encoder_open(stream->enc, stream->dev);
us_encoder_open(stream->enc, stream->cap);
return 0;
offline_and_retry:
@@ -516,11 +627,11 @@ static int _stream_init_loop(us_stream_s *stream) {
}
if (count % 10 == 0) {
// Каждую секунду повторяем blank
uint width = stream->dev->run->width;
uint height = stream->dev->run->height;
uint width = stream->cap->run->width;
uint height = stream->cap->run->height;
if (width == 0 || height == 0) {
width = stream->dev->width;
height = stream->dev->height;
width = stream->cap->width;
height = stream->cap->height;
}
us_blank_draw(run->blank, "< NO SIGNAL >", width, height);
@@ -538,6 +649,24 @@ static int _stream_init_loop(us_stream_s *stream) {
return -1;
}
#ifdef WITH_V4P
static void _stream_drm_ensure_no_signal(us_stream_s *stream) {
us_stream_runtime_s *const run = stream->run;
if (!stream->v4p) {
return;
}
if (run->drm_opened <= 0) {
us_drm_close(run->drm);
run->drm_opened = us_drm_open(run->drm, NULL);
}
if (run->drm_opened > 0) {
if (us_drm_wait_for_vsync(run->drm) == 0) {
us_drm_expose_stub(run->drm, US_DRM_STUB_NO_SIGNAL, NULL);
}
}
}
#endif
static void _stream_expose_jpeg(us_stream_s *stream, const us_frame_s *frame) {
us_stream_runtime_s *const run = stream->run;
int ri;

View File

@@ -30,7 +30,10 @@
#include "../libs/queue.h"
#include "../libs/ring.h"
#include "../libs/memsink.h"
#include "../libs/device.h"
#include "../libs/capture.h"
#ifdef WITH_V4P
# include "../libs/drm/drm.h"
#endif
#include "blank.h"
#include "encoder.h"
@@ -38,7 +41,12 @@
typedef struct {
us_h264_stream_s *h264;
us_h264_stream_s *h264;
# ifdef WITH_V4P
us_drm_s *drm;
int drm_opened;
# endif
us_ring_s *http_jpeg_ring;
atomic_bool http_has_clients;
@@ -52,7 +60,7 @@ typedef struct {
} us_stream_runtime_s;
typedef struct {
us_device_s *dev;
us_capture_s *cap;
us_encoder_s *enc;
int last_as_blank;
@@ -68,11 +76,15 @@ typedef struct {
uint h264_gop;
char *h264_m2m_path;
# ifdef WITH_V4P
bool v4p;
# endif
us_stream_runtime_s *run;
} us_stream_s;
us_stream_s *us_stream_init(us_device_s *dev, us_encoder_s *enc);
us_stream_s *us_stream_init(us_capture_s *cap, us_encoder_s *enc);
void us_stream_destroy(us_stream_s *stream);
void us_stream_loop(us_stream_s *stream);

View File

@@ -39,11 +39,11 @@
#include "../libs/const.h"
#include "../libs/tools.h"
#include "../libs/logging.h"
#include "../libs/device.h"
#include "../libs/capture.h"
#include "../libs/signal.h"
#include "../libs/options.h"
#include "drm.h"
#include "../libs/drm/drm.h"
enum _OPT_VALUES {
@@ -160,16 +160,16 @@ static void _signal_handler(int signum) {
static void _main_loop(void) {
us_drm_s *drm = us_drm_init();
drm->port = "HDMI-A-2";
us_device_s *dev = us_device_init();
dev->path = "/dev/kvmd-video";
dev->n_bufs = 6;
dev->format = V4L2_PIX_FMT_RGB24;
dev->dv_timings = true;
dev->persistent = true;
dev->dma_export = true;
dev->dma_required = true;
us_capture_s *cap = us_capture_init();
cap->path = "/dev/kvmd-video";
cap->n_bufs = 6;
cap->format = V4L2_PIX_FMT_RGB24;
cap->format_swap_rgb = true;
cap->dv_timings = true;
cap->persistent = true;
cap->dma_export = true;
cap->dma_required = true;
int once = 0;
ldf blank_at_ts = 0;
@@ -177,22 +177,19 @@ static void _main_loop(void) {
while (!atomic_load(&_g_stop)) {
# define CHECK(x_arg) if ((x_arg) < 0) { goto close; }
if (atomic_load(&_g_ustreamer_online)) {
blank_at_ts = 0;
US_ONCE({ US_LOG_INFO("DRM: Online stream is active, pausing the service ..."); });
goto close;
}
if (drm_opened <= 0) {
blank_at_ts = 0;
CHECK(drm_opened = us_drm_open(drm, NULL));
}
assert(drm_opened > 0);
if (atomic_load(&_g_ustreamer_online)) {
blank_at_ts = 0;
US_ONCE({ US_LOG_INFO("DRM: Online stream is active, stopping capture ..."); });
CHECK(us_drm_wait_for_vsync(drm));
CHECK(us_drm_expose_stub(drm, US_DRM_STUB_BUSY, NULL));
_slowdown();
continue;
}
if (us_device_open(dev) < 0) {
if (us_capture_open(cap) < 0) {
ldf now_ts = us_get_now_monotonic();
if (blank_at_ts == 0) {
blank_at_ts = now_ts + 5;
@@ -211,9 +208,9 @@ static void _main_loop(void) {
once = 0;
blank_at_ts = 0;
us_drm_close(drm);
CHECK(drm_opened = us_drm_open(drm, dev));
CHECK(drm_opened = us_drm_open(drm, cap));
us_hw_buffer_s *prev_hw = NULL;
us_capture_hwbuf_s *prev_hw = NULL;
while (!atomic_load(&_g_stop)) {
if (atomic_load(&_g_ustreamer_online)) {
goto close;
@@ -222,12 +219,12 @@ static void _main_loop(void) {
CHECK(us_drm_wait_for_vsync(drm));
if (prev_hw != NULL) {
CHECK(us_device_release_buffer(dev, prev_hw));
CHECK(us_capture_release_buffer(cap, prev_hw));
prev_hw = NULL;
}
us_hw_buffer_s *hw;
switch (us_device_grab_buffer(dev, &hw)) {
us_capture_hwbuf_s *hw;
switch (us_capture_grab_buffer(cap, &hw)) {
case -2: continue; // Broken frame
case -1: goto close; // Any error
default: break; // Grabbed on >= 0
@@ -236,28 +233,26 @@ static void _main_loop(void) {
if (drm_opened == 0) {
CHECK(us_drm_expose_dma(drm, hw));
prev_hw = hw;
} else {
CHECK(us_drm_expose_stub(drm, drm_opened, dev));
CHECK(us_device_release_buffer(dev, hw));
continue;
}
if (drm_opened > 0) {
_slowdown();
}
CHECK(us_drm_expose_stub(drm, drm_opened, cap));
CHECK(us_capture_release_buffer(cap, hw));
_slowdown();
}
close:
us_drm_close(drm);
drm_opened = -1;
us_device_close(dev);
us_capture_close(cap);
_slowdown();
# undef CHECK
}
us_device_destroy(dev);
us_capture_destroy(cap);
us_drm_destroy(drm);
}