Compare commits

...

21 Commits
v6.41 ... v6.46

Author SHA1 Message Date
Maxim Devaev
efbb2aa7ba Bump version: 6.45 → 6.46 2026-01-21 08:26:04 +02:00
Maxim Devaev
5692d81e46 lint fix 2026-01-21 08:21:44 +02:00
Maxim Devaev
4cec824b13 fixed fps limit for h264 2026-01-21 07:57:44 +02:00
Maxim Devaev
ac1989451c added help for --h264-boost 2026-01-21 07:07:41 +02:00
Maxim Devaev
e39d27309a Merge branch 'h264-boost' 2026-01-21 07:04:06 +02:00
Maxim Devaev
b983b6c355 new fps limiter 2026-01-21 07:03:58 +02:00
Maxim Devaev
5204f00812 h264 boost mode 2026-01-21 03:16:20 +02:00
Maxim Devaev
9eb39bbfc3 grab_begin_ts and grab_end_ts 2026-01-21 00:07:40 +02:00
Maxim Devaev
6adbb93e57 fpsi: optional meta arg in us_fpsi_get() 2026-01-20 11:52:19 +02:00
Maxim Devaev
4bd1465a10 janus: apply zero_playout_delay 2026-01-20 11:49:46 +02:00
Maxim Devaev
cf7f8947ef always capture maximum possible fps 2026-01-20 05:16:02 +02:00
Maxim Devaev
ec2e6c313b removed old fps regulation for jpeg encoders 2026-01-20 02:48:39 +02:00
Maxim Devaev
de2cfa36e1 Bump version: 6.44 → 6.45 2026-01-16 23:31:31 +02:00
Maxim Devaev
6c1a8f75a1 bumped python 2026-01-16 23:29:54 +02:00
Maxim Devaev
26ee5143ee Bump version: 6.43 → 6.44 2026-01-04 16:43:12 +02:00
Maxim Devaev
e2890e5851 janus: removed sync between video and audio 2026-01-04 16:03:42 +02:00
Maxim Devaev
e2b01e4d79 Bump version: 6.42 → 6.43 2026-01-03 19:43:43 +02:00
Maxim Devaev
903bc45bee lint fixes 2026-01-03 19:21:10 +02:00
Maxim Devaev
b2b1989c5b reduced preallocated us_frame_s size 2026-01-03 18:54:56 +02:00
Maxim Devaev
36b539c275 Bump version: 6.41 → 6.42 2025-11-11 00:00:35 +02:00
Maxim Devaev
38c6917644 janus: pkg-config 2025-11-10 23:58:42 +02:00
31 changed files with 159 additions and 116 deletions

View File

@@ -1,7 +1,7 @@
[bumpversion]
commit = True
tag = True
current_version = 6.41
current_version = 6.46
parse = (?P<major>\d+)\.(?P<minor>\d+)
serialize =
{major}.{minor}

View File

@@ -10,8 +10,8 @@ LDFLAGS ?=
# =====
_PLUGIN = libjanus_ustreamer.so
_CFLAGS = -fPIC -MD -c -std=c17 -Wall -Wextra -D_GNU_SOURCE $(shell $(PKG_CONFIG) --cflags glib-2.0) $(CFLAGS)
_LDFLAGS = -shared -lm -pthread -lrt -ljansson -lopus -lasound -lspeexdsp $(shell $(PKG_CONFIG) --libs glib-2.0) $(LDFLAGS)
_CFLAGS = -fPIC -MD -c -std=c17 -Wall -Wextra -D_GNU_SOURCE $(shell $(PKG_CONFIG) --cflags janus-gateway) $(CFLAGS)
_LDFLAGS = -shared -lm -pthread -lrt -ljansson -lopus -lasound -lspeexdsp $(shell $(PKG_CONFIG) --libs janus-gateway) $(LDFLAGS)
_SRCS = $(shell ls src/uslibs/*.c src/*.c)

View File

@@ -193,16 +193,16 @@ static void *_video_or_acap_thread(void *v_client, bool video) {
};
janus_plugin_rtp_extensions_reset(&packet.extensions);
/*if (rtp->zero_playout_delay) {
if (rtp.zero_playout_delay) {
// https://github.com/pikvm/pikvm/issues/784
packet.extensions.min_delay = 0;
packet.extensions.max_delay = 0;
} else {
// Эти дефолты используются в Chrome/Safari/Firefox.
// Работает всё одинаково, потому что у них общая кодовая база WebRTC.
packet.extensions.min_delay = 0;
// 10s - Chromium/WebRTC default
// 3s - Firefox default
packet.extensions.max_delay = 300; // == 3s, i.e. 10ms granularity
}*/
packet.extensions.max_delay = 1000; // == 10s, i.e. 10ms granularity
}
if (rtp.video) {
uint video_orient = atomic_load(&client->video_orient);

View File

@@ -54,6 +54,8 @@ char *us_rtpa_make_sdp(us_rtpa_s *rtpa, bool mic) {
"a=rtcp-fb:%u nack" RN
"a=rtcp-fb:%u nack pli" RN
"a=rtcp-fb:%u goog-remb" RN
"a=mid:a" RN
"a=msid:audio a" RN
"a=ssrc:%" PRIu32 " cname:ustreamer" RN
"a=%s" RN,
pl, pl,

View File

@@ -69,6 +69,8 @@ char *us_rtpv_make_sdp(us_rtpv_s *rtpv) {
"a=rtcp-fb:%u nack" RN
"a=rtcp-fb:%u nack pli" RN
"a=rtcp-fb:%u goog-remb" RN
"a=mid:v" RN
"a=msid:video v" RN
"a=ssrc:%" PRIu32 " cname:ustreamer" RN
"a=extmap:1 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay" RN
"a=extmap:2 urn:3gpp:video-orientation" RN

View File

@@ -1,5 +1,5 @@
[mypy]
python_version = 3.9
python_version = 3.14
ignore_missing_imports = true
disallow_untyped_defs = true
strict_optional = true

View File

@@ -3,7 +3,7 @@ envlist = cppcheck, flake8, pylint, mypy, vulture, htmlhint
skipsdist = true
[testenv]
basepython = python3.13
basepython = python3.14
changedir = /src
[testenv:cppcheck]

View File

@@ -1,6 +1,6 @@
.\" Manpage for ustreamer-dump.
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
.TH USTREAMER-DUMP 1 "version 6.41" "January 2021"
.TH USTREAMER-DUMP 1 "version 6.46" "January 2021"
.SH NAME
ustreamer-dump \- Dump uStreamer's memory sink to file

View File

@@ -1,6 +1,6 @@
.\" Manpage for ustreamer.
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
.TH USTREAMER 1 "version 6.41" "November 2020"
.TH USTREAMER 1 "version 6.46" "November 2020"
.SH NAME
ustreamer \- stream MJPEG video from any V4L2 device to the network
@@ -253,6 +253,9 @@ Interval between keyframes. Default: 30.
.TP
.BR \-\-h264\-m2m\-device\ \fI/dev/path
Path to V4L2 mem-to-mem encoder device. Default: auto-select.
.TP
.BR \-\-h264\-boost\-device
Increase encoder performance on PiKVM V4. Default: disabled.
.SS "RAW sink options"
.TP

View File

@@ -3,7 +3,7 @@
pkgname=ustreamer
pkgver=6.41
pkgver=6.46
pkgrel=1
pkgdesc="Lightweight and fast MJPEG-HTTP streamer"
url="https://github.com/pikvm/ustreamer"
@@ -18,7 +18,7 @@ md5sums=(SKIP)
_options="WITH_GPIO=1 WITH_SYSTEMD=1"
if [ -e /usr/bin/python3 ]; then
_options="$_options WITH_PYTHON=1"
depends+=("python>=3.13" "python<3.14")
depends+=("python>=3.14" "python<3.15")
makedepends+=(python-setuptools python-pip python-build python-wheel)
fi
if [ -e /usr/include/janus/plugins/plugin.h ];then

View File

@@ -6,7 +6,7 @@
include $(TOPDIR)/rules.mk
PKG_NAME:=ustreamer
PKG_VERSION:=6.41
PKG_VERSION:=6.46
PKG_RELEASE:=1
PKG_MAINTAINER:=Maxim Devaev <mdevaev@gmail.com>

View File

@@ -34,7 +34,7 @@ def main() -> None:
flags = _find_flags()
setup(
name="ustreamer",
version="6.41",
version="6.46",
description="uStreamer tools",
author="Maxim Devaev",
author_email="mdevaev@gmail.com",

View File

@@ -241,7 +241,8 @@ static PyObject *_MemsinkObject_wait_frame(_MemsinkObject *self, PyObject *args,
SET_NUMBER(online, Long, Bool);
SET_NUMBER(key, Long, Bool);
SET_NUMBER(gop, Long, Long);
SET_NUMBER(grab_ts, Double, Float);
SET_NUMBER(grab_begin_ts, Double, Float);
SET_NUMBER(grab_end_ts, Double, Float);
SET_NUMBER(encode_begin_ts, Double, Float);
SET_NUMBER(encode_end_ts, Double, Float);
SET_VALUE("data", PyBytes_FromStringAndSize((const char*)self->frame->data, self->frame->used));

View File

@@ -53,11 +53,13 @@ void us_output_file_write(void *v_output, const us_frame_s *frame) {
fprintf(output->fp,
"{\"size\": %zu, \"width\": %u, \"height\": %u,"
" \"format\": %u, \"stride\": %u, \"online\": %u, \"key\": %u, \"gop\": %u,"
" \"grab_ts\": %.3Lf, \"encode_begin_ts\": %.3Lf, \"encode_end_ts\": %.3Lf,"
" \"grab_begin_ts\": %.3Lf, \"grab_end_ts\": %.3Lf,"
" \"encode_begin_ts\": %.3Lf, \"encode_end_ts\": %.3Lf,"
" \"data\": \"%s\"}\n",
frame->used, frame->width, frame->height,
frame->format, frame->stride, frame->online, frame->key, frame->gop,
frame->grab_ts, frame->encode_begin_ts, frame->encode_end_ts,
frame->grab_begin_ts, frame->grab_end_ts,
frame->encode_begin_ts, frame->encode_end_ts,
output->base64_data);
} else {
fwrite(frame->data, 1, frame->used, output->fp);

View File

@@ -240,16 +240,22 @@ static int _dump_sink(
const long double now = us_get_now_monotonic();
char fourcc_str[8];
US_LOG_VERBOSE("Frame: %s - %ux%u -- online=%d, key=%d, kr=%d, gop=%u, latency=%.3Lf, backlog=%.3Lf, size=%zu",
US_LOG_VERBOSE("%s %.3Lf - %s %ux%u - gop=%u, key=%u, kr=%u - GRAB=%.3Lf ~~%.3Lf~~ ENC=%.3Lf ~~> LAT=%.3Lf - size=%zu",
(frame->online ? " ON" : "OFF"),
(last_ts ? now - last_ts : 0),
us_fourcc_to_string(frame->format, fourcc_str, 8),
frame->width, frame->height,
frame->online, frame->key, key_requested, frame->gop,
now - frame->grab_ts, (last_ts ? now - last_ts : 0),
frame->width,
frame->height,
frame->gop,
frame->key,
key_requested,
frame->grab_end_ts - frame->grab_begin_ts,
frame->encode_begin_ts - frame->grab_end_ts,
frame->encode_end_ts - frame->encode_begin_ts,
now - frame->grab_begin_ts,
frame->used);
last_ts = now;
US_LOG_DEBUG(" stride=%u, grab_ts=%.3Lf, encode_begin_ts=%.3Lf, encode_end_ts=%.3Lf",
frame->stride, frame->grab_ts, frame->encode_begin_ts, frame->encode_end_ts);
last_ts = now;
us_fpsi_update(fpsi, true, NULL);

View File

@@ -447,10 +447,16 @@ int us_capture_hwbuf_grab(us_capture_s *cap, us_capture_hwbuf_s **hw) {
(*hw)->raw.stride = run->stride;
(*hw)->raw.online = true;
_v4l2_buffer_copy(&buf, &(*hw)->buf);
(*hw)->raw.grab_ts = (ldf)((buf.timestamp.tv_sec * (u64)1000) + (buf.timestamp.tv_usec / 1000)) / 1000;
(*hw)->raw.grab_begin_ts = (ldf)((buf.timestamp.tv_sec * (u64)1000) + (buf.timestamp.tv_usec / 1000)) / 1000;
(*hw)->raw.grab_end_ts = us_get_now_monotonic();
_LOG_DEBUG("Grabbed HW buffer=%u: bytesused=%u, grab_begin_ts=%.3Lf, grab_end_ts=%.3Lf, latency=%.3Lf, skipped=%u",
buf.index, buf.bytesused,
(*hw)->raw.grab_begin_ts,
(*hw)->raw.grab_end_ts,
(*hw)->raw.grab_end_ts - (*hw)->raw.grab_begin_ts,
skipped);
_LOG_DEBUG("Grabbed HW buffer=%u: bytesused=%u, grab_ts=%.3Lf, latency=%.3Lf, skipped=%u",
buf.index, buf.bytesused, (*hw)->raw.grab_ts, us_get_now_monotonic() - (*hw)->raw.grab_ts, skipped);
return buf.index;
}
@@ -825,10 +831,8 @@ static int _capture_open_format(us_capture_s *cap, bool first) {
return 0;
}
static void _capture_open_hw_fps(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
run->hw_fps = 0;
static void _capture_open_hw_fps(us_capture_s *cap) { // cppcheck-suppress constParameterPointer
const us_capture_runtime_s *const run = cap->run;
struct v4l2_streamparm setfps = {.type = run->capture_type};
_LOG_DEBUG("Querying HW FPS ...");
@@ -851,7 +855,7 @@ static void _capture_open_hw_fps(us_capture_s *cap) {
US_MEMSET_ZERO(setfps);
setfps.type = run->capture_type;
SETFPS_TPF(numerator) = 1;
SETFPS_TPF(denominator) = (cap->desired_fps == 0 ? 255 : cap->desired_fps);
SETFPS_TPF(denominator) = -1; // Request maximum possible FPS
if (us_xioctl(run->fd, VIDIOC_S_PARM, &setfps) < 0) {
_LOG_PERROR("Can't set HW FPS");
@@ -868,12 +872,7 @@ static void _capture_open_hw_fps(us_capture_s *cap) {
return;
}
run->hw_fps = SETFPS_TPF(denominator);
if (cap->desired_fps != run->hw_fps) {
_LOG_INFO("Using HW FPS: %u -> %u (coerced)", cap->desired_fps, run->hw_fps);
} else {
_LOG_INFO("Using HW FPS: %u", run->hw_fps);
}
_LOG_INFO("Using HW FPS: %u/%u", SETFPS_TPF(numerator), SETFPS_TPF(denominator));
# undef SETFPS_TPF
}

View File

@@ -58,7 +58,6 @@ typedef struct {
uint format;
uint stride;
float hz;
uint hw_fps;
uint jpeg_quality;
uz raw_size;
uint n_bufs;
@@ -113,7 +112,6 @@ typedef struct {
uint n_bufs;
bool dma_export;
bool dma_required;
uint desired_fps;
uz min_frame_size;
bool allow_truncated_frames;
bool persistent;

View File

@@ -26,7 +26,7 @@
#define US_VERSION_MAJOR 6
#define US_VERSION_MINOR 41
#define US_VERSION_MINOR 46
#define US_MAKE_VERSION2(_major, _minor) #_major "." #_minor
#define US_MAKE_VERSION1(_major, _minor) US_MAKE_VERSION2(_major, _minor)

View File

@@ -85,8 +85,6 @@ void us_fpsi_update(us_fpsi_s *fpsi, bool bump, const us_fpsi_meta_s *meta) {
uint us_fpsi_get(us_fpsi_s *fpsi, us_fpsi_meta_s *meta) {
if (meta != NULL) {
assert(fpsi->with_meta);
} else {
assert(!fpsi->with_meta);
}
// Между чтением инфы и времени может быть гонка,
@@ -97,8 +95,7 @@ uint us_fpsi_get(us_fpsi_s *fpsi, us_fpsi_meta_s *meta) {
const ull state = atomic_load(&fpsi->state); // Потом инфа
uint current = state & 0xFFFF;
if (fpsi->with_meta) {
assert(meta != NULL);
if (fpsi->with_meta && meta != NULL) {
meta->width = (state >> 16) & 0xFFFF;
meta->height = (state >> 32) & 0xFFFF;
meta->online = (state >> 48) & 1;

View File

@@ -36,7 +36,7 @@
us_frame_s *us_frame_init(void) {
us_frame_s *frame;
US_CALLOC(frame, 1);
us_frame_realloc_data(frame, 512 * 1024);
us_frame_realloc_data(frame, 32 * 1024);
frame->dma_fd = -1;
return frame;
}

View File

@@ -38,7 +38,8 @@
bool key; \
uint gop; \
\
ldf grab_ts; \
ldf grab_begin_ts; \
ldf grab_end_ts; \
ldf encode_begin_ts; \
ldf encode_end_ts;
@@ -62,7 +63,8 @@ typedef struct {
(x_dest)->key = (x_src)->key; \
(x_dest)->gop = (x_src)->gop; \
\
(x_dest)->grab_ts = (x_src)->grab_ts; \
(x_dest)->grab_begin_ts = (x_src)->grab_begin_ts; \
(x_dest)->grab_end_ts = (x_src)->grab_end_ts; \
(x_dest)->encode_begin_ts = (x_src)->encode_begin_ts; \
(x_dest)->encode_end_ts = (x_src)->encode_end_ts; \
}

View File

@@ -131,7 +131,7 @@ void us_encoder_open(us_encoder_s *enc, us_capture_s *cap) {
} else {
US_LOG_INFO("Switching to CPU encoder: the input format is not (M)JPEG ...");
type = US_ENCODER_TYPE_CPU;
quality = cap->jpeg_quality;
quality = cap->jpeg_quality; // cppcheck-suppress redundantAssignment
}
} else if (type == US_ENCODER_TYPE_M2M_VIDEO || type == US_ENCODER_TYPE_M2M_IMAGE) {
@@ -162,14 +162,8 @@ void us_encoder_open(us_encoder_s *enc, us_capture_s *cap) {
run->quality = quality;
US_MUTEX_UNLOCK(run->mutex);
const ldf desired_interval = (
cap->desired_fps > 0 && (cap->desired_fps < cap->run->hw_fps || cap->run->hw_fps == 0)
? (ldf)1 / cap->desired_fps
: 0
);
enc->run->pool = us_workers_pool_init(
"JPEG", "jw", n_workers, desired_interval,
"JPEG", "jw", n_workers,
_worker_job_init, (void*)enc,
_worker_job_destroy,
_worker_run_job);

View File

@@ -198,16 +198,8 @@ int us_server_listen(us_server_s *server) {
us_frame_copy(stream->run->blank->jpeg, ex->frame);
{
struct timeval interval = {0};
if (stream->cap->desired_fps > 0) {
interval.tv_usec = 1000000 / (stream->cap->desired_fps * 2);
} else {
interval.tv_usec = 16000; // ~60fps
}
assert((run->refresher = event_new(run->base, -1, EV_PERSIST, _http_refresher, server)) != NULL);
assert(!event_add(run->refresher, &interval));
}
assert((run->refresher = event_new(run->base, -1, 0, _http_refresher, server)) != NULL);
stream->run->http->jpeg_refresher = run->refresher;
evhttp_set_timeout(run->http, server->timeout);
@@ -519,7 +511,7 @@ static void _http_callback_state(struct evhttp_request *request, void *v_server)
(server->fake_width ? server->fake_width : captured_meta.width),
(server->fake_height ? server->fake_height : captured_meta.height),
us_bool_to_string(captured_meta.online),
stream->cap->desired_fps,
stream->desired_fps,
captured_fps,
us_fpsi_get(ex->queued_fpsi, NULL),
run->stream_clients_count
@@ -730,7 +722,8 @@ static void _http_callback_stream_write(struct bufferevent *buf_event, void *v_c
"X-UStreamer-Width: %u" RN
"X-UStreamer-Height: %u" RN
"X-UStreamer-Client-FPS: %u" RN
"X-UStreamer-Grab-Time: %.06Lf" RN
"X-UStreamer-Grab-Begin-Time: %.06Lf" RN
"X-UStreamer-Grab-End-Time: %.06Lf" RN
"X-UStreamer-Encode-Begin-Time: %.06Lf" RN
"X-UStreamer-Encode-End-Time: %.06Lf" RN
"X-UStreamer-Expose-Begin-Time: %.06Lf" RN
@@ -744,14 +737,15 @@ static void _http_callback_stream_write(struct bufferevent *buf_event, void *v_c
ex->frame->width,
ex->frame->height,
us_fpsi_get(client->fpsi, NULL),
ex->frame->grab_ts,
ex->frame->grab_begin_ts,
ex->frame->grab_end_ts,
ex->frame->encode_begin_ts,
ex->frame->encode_end_ts,
ex->expose_begin_ts,
ex->expose_cmp_ts,
ex->expose_end_ts,
now_ts,
now_ts - ex->frame->grab_ts
now_ts - ex->frame->grab_begin_ts
);
}
}
@@ -900,7 +894,8 @@ static void _http_send_snapshot(us_server_s *server) {
_A_ADD_HEADER(request, "X-UStreamer-Online", us_bool_to_string(frame->online));
ADD_UNSIGNED_HEADER("X-UStreamer-Width", frame->width);
ADD_UNSIGNED_HEADER("X-UStreamer-Height", frame->height);
ADD_TIME_HEADER("X-UStreamer-Grab-Timestamp", frame->grab_ts);
ADD_TIME_HEADER("X-UStreamer-Grab-Begin-Timestamp", frame->grab_begin_ts);
ADD_TIME_HEADER("X-UStreamer-Grab-End-Timestamp", frame->grab_end_ts);
ADD_TIME_HEADER("X-UStreamer-Encode-Begin-Timestamp", frame->encode_begin_ts);
ADD_TIME_HEADER("X-UStreamer-Encode-End-Timestamp", frame->encode_end_ts);
ADD_TIME_HEADER("X-UStreamer-Send-Timestamp", us_get_now_monotonic());
@@ -932,13 +927,15 @@ static void _http_refresher(int fd, short what, void *v_server) {
bool stream_updated = false;
bool frame_updated = false;
const int ri = us_ring_consumer_acquire(ring, 0);
if (ri >= 0) {
int ri;
while ((ri = us_ring_consumer_acquire(ring, 0)) >= 0) {
const us_frame_s *const frame = ring->items[ri];
frame_updated = _expose_frame(server, frame);
stream_updated = true;
us_ring_consumer_release(ring, ri);
} else if (ex->expose_end_ts + 1 < us_get_now_monotonic()) {
}
if (!stream_updated && (ex->expose_end_ts + 1 < us_get_now_monotonic())) {
_LOG_DEBUG("Repeating exposed ...");
ex->expose_begin_ts = us_get_now_monotonic();
ex->expose_cmp_ts = ex->expose_begin_ts;

View File

@@ -43,7 +43,7 @@
static us_m2m_encoder_s *_m2m_encoder_init(
const char *name, const char *path, uint output_format,
uint bitrate, uint gop, uint quality, bool allow_dma);
uint bitrate, uint gop, uint quality, bool allow_dma, bool boost);
static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame);
@@ -63,9 +63,9 @@ static int _m2m_encoder_compress_raw(us_m2m_encoder_s *enc, const us_frame_s *sr
#define _LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("%s: " x_msg, enc->name, ##__VA_ARGS__)
us_m2m_encoder_s *us_m2m_h264_encoder_init(const char *name, const char *path, uint bitrate, uint gop) {
us_m2m_encoder_s *us_m2m_h264_encoder_init(const char *name, const char *path, uint bitrate, uint gop, bool boost) {
bitrate *= 1000; // From Kbps
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_H264, bitrate, gop, 0, true);
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_H264, bitrate, gop, 0, true, boost);
}
us_m2m_encoder_s *us_m2m_mjpeg_encoder_init(const char *name, const char *path, uint quality) {
@@ -76,12 +76,12 @@ us_m2m_encoder_s *us_m2m_mjpeg_encoder_init(const char *name, const char *path,
bitrate = step * round(bitrate / step);
bitrate *= 1000; // From Kbps
assert(bitrate > 0);
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_MJPEG, bitrate, 0, 0, true);
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_MJPEG, bitrate, 0, 0, true, false);
}
us_m2m_encoder_s *us_m2m_jpeg_encoder_init(const char *name, const char *path, uint quality) {
// FIXME: DMA не работает
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_JPEG, 0, 0, quality, false);
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_JPEG, 0, 0, quality, false, false);
}
void us_m2m_encoder_destroy(us_m2m_encoder_s *enc) {
@@ -139,7 +139,7 @@ int us_m2m_encoder_compress(us_m2m_encoder_s *enc, const us_frame_s *src, us_fra
static us_m2m_encoder_s *_m2m_encoder_init(
const char *name, const char *path, uint output_format,
uint bitrate, uint gop, uint quality, bool allow_dma) {
uint bitrate, uint gop, uint quality, bool allow_dma, bool boost) {
US_LOG_INFO("%s: Initializing encoder ...", name);
@@ -161,6 +161,7 @@ static us_m2m_encoder_s *_m2m_encoder_init(
enc->gop = gop;
enc->quality = quality;
enc->allow_dma = allow_dma;
enc->boost = boost;
enc->run = run;
return enc;
}
@@ -222,7 +223,11 @@ static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame)
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_I_PERIOD, enc->gop);
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_PROFILE, V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE);
if (run->p_width * run->p_height <= 1920 * 1080) { // https://forums.raspberrypi.com/viewtopic.php?t=291447#p1762296
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_LEVEL, V4L2_MPEG_VIDEO_H264_LEVEL_4_0);
if (enc->boost) {
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_LEVEL, V4L2_MPEG_VIDEO_H264_LEVEL_4_2);
} else {
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_LEVEL, V4L2_MPEG_VIDEO_H264_LEVEL_4_0);
}
} else {
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_LEVEL, V4L2_MPEG_VIDEO_H264_LEVEL_5_1);
}
@@ -276,10 +281,13 @@ static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame)
}
}
if (run->p_width * run->p_height <= 1280 * 720) {
if (
(run->p_width * run->p_height <= 1280 * 720)
|| ((enc->output_format == V4L2_PIX_FMT_H264) && enc->boost)
) {
// H264 требует каких-то лимитов. Больше 30 не поддерживается, а при 0
// через какое-то время начинает производить некорректные фреймы.
// Если же привысить fps, то резко увеличивается время кодирования.
// Если же превысить fps, то резко увеличивается время кодирования.
run->fps_limit = 60;
} else {
run->fps_limit = 30;

View File

@@ -58,12 +58,13 @@ typedef struct {
uint gop;
uint quality;
bool allow_dma;
bool boost;
us_m2m_encoder_runtime_s *run;
} us_m2m_encoder_s;
us_m2m_encoder_s *us_m2m_h264_encoder_init(const char *name, const char *path, uint bitrate, uint gop);
us_m2m_encoder_s *us_m2m_h264_encoder_init(const char *name, const char *path, uint bitrate, uint gop, bool boost);
us_m2m_encoder_s *us_m2m_mjpeg_encoder_init(const char *name, const char *path, uint quality);
us_m2m_encoder_s *us_m2m_jpeg_encoder_init(const char *name, const char *path, uint quality);
void us_m2m_encoder_destroy(us_m2m_encoder_s *enc);

View File

@@ -100,6 +100,7 @@ enum _US_OPT_VALUES {
_O_H264_BITRATE,
_O_H264_GOP,
_O_H264_M2M_DEVICE,
_O_H264_BOOST,
# undef ADD_SINK
# ifdef WITH_V4P
@@ -206,6 +207,7 @@ static const struct option _LONG_OPTS[] = {
{"h264-bitrate", required_argument, NULL, _O_H264_BITRATE},
{"h264-gop", required_argument, NULL, _O_H264_GOP},
{"h264-m2m-device", required_argument, NULL, _O_H264_M2M_DEVICE},
{"h264-boost", no_argument, NULL, _O_H264_BOOST},
// Compatibility
{"sink", required_argument, NULL, _O_JPEG_SINK},
{"sink-mode", required_argument, NULL, _O_JPEG_SINK_MODE},
@@ -386,7 +388,7 @@ int options_parse(us_options_s *options, us_capture_s *cap, us_encoder_s *enc, u
case _O_FORMAT_SWAP_RGB: OPT_SET(cap->format_swap_rgb, true);
case _O_TV_STANDARD: OPT_PARSE_ENUM("TV standard", cap->standard, us_capture_parse_standard, US_STANDARDS_STR);
case _O_IO_METHOD: OPT_PARSE_ENUM("IO method", cap->io_method, us_capture_parse_io_method, US_IO_METHODS_STR);
case _O_DESIRED_FPS: OPT_NUMBER("--desired-fps", cap->desired_fps, 0, US_VIDEO_MAX_FPS, 0);
case _O_DESIRED_FPS: OPT_NUMBER("--desired-fps", stream->desired_fps, 0, US_VIDEO_MAX_FPS, 0);
case _O_MIN_FRAME_SIZE: OPT_NUMBER("--min-frame-size", cap->min_frame_size, 1, 8192, 0);
case _O_ALLOW_TRUNCATED_FRAMES: OPT_SET(cap->allow_truncated_frames, true);
case _O_PERSISTENT: OPT_SET(cap->persistent, true);
@@ -469,6 +471,7 @@ int options_parse(us_options_s *options, us_capture_s *cap, us_encoder_s *enc, u
case _O_H264_BITRATE: OPT_NUMBER("--h264-bitrate", stream->h264_bitrate, 25, 20000, 0);
case _O_H264_GOP: OPT_NUMBER("--h264-gop", stream->h264_gop, 0, 60, 0);
case _O_H264_M2M_DEVICE: OPT_SET(stream->h264_m2m_path, optarg);
case _O_H264_BOOST: OPT_SET(stream->h264_boost, true);
# ifdef WITH_V4P
case _O_V4P:
@@ -746,6 +749,7 @@ static void _help(FILE *fp, const us_capture_s *cap, const us_encoder_s *enc, co
SAY(" --h264-bitrate <kbps> ───────── H264 bitrate in Kbps. Default: %u.\n", stream->h264_bitrate);
SAY(" --h264-gop <N> ──────────────── Interval between keyframes. Default: %u.\n", stream->h264_gop);
SAY(" --h264-m2m-device </dev/path> ─ Path to V4L2 M2M encoder device. Default: auto select.\n");
SAY(" --h264-boost ────────────────── Increase encoder performance on PiKVM V4. Default: disabled.\n");
# ifdef WITH_V4P
SAY("Passthrough options for PiKVM V4:");
SAY("═════════════════════════════════");

View File

@@ -28,9 +28,12 @@
#include <unistd.h>
#include <errno.h>
#include <assert.h>
#include <math.h>
#include <pthread.h>
#include <event2/event.h> // jpeg_refresher
#include "../libs/types.h"
#include "../libs/errors.h"
#include "../libs/tools.h"
@@ -154,7 +157,13 @@ void us_stream_loop(us_stream_s *stream) {
atomic_store(&run->http->last_request_ts, us_get_now_monotonic());
if (stream->h264_sink != NULL) {
run->h264_enc = us_m2m_h264_encoder_init("H264", stream->h264_m2m_path, stream->h264_bitrate, stream->h264_gop);
run->h264_enc = us_m2m_h264_encoder_init(
"H264",
stream->h264_m2m_path,
stream->h264_bitrate,
stream->h264_gop,
stream->h264_boost);
run->h264_tmp_src = us_frame_init();
run->h264_dest = us_frame_init();
}
@@ -312,6 +321,9 @@ static void *_jpeg_thread(void *v_ctx) {
_worker_context_s *ctx = v_ctx;
us_stream_s *stream = ctx->stream;
uint take = 1;
uint step = 1;
ldf grab_after_ts = 0;
uint fluency_passed = 0;
@@ -330,7 +342,7 @@ static void *_jpeg_thread(void *v_ctx) {
atomic_fetch_sub(&stream->run->http->snapshot_requested, 1);
}
US_LOG_PERF("JPEG: ##### Encoded JPEG exposed; worker=%s, latency=%.3Lf",
wr->name, us_get_now_monotonic() - job->dest->grab_ts);
wr->name, us_get_now_monotonic() - job->dest->grab_begin_ts);
} else {
US_LOG_PERF("JPEG: ----- Encoded JPEG dropped; worker=%s", wr->name);
}
@@ -348,6 +360,19 @@ static void *_jpeg_thread(void *v_ctx) {
continue;
}
if (stream->desired_fps > 0) {
const uint captured_fps = us_fpsi_get(stream->run->http->captured_fpsi, NULL);
take = ceilf((float)captured_fps / (float)stream->desired_fps);
if (step < take) {
US_LOG_DEBUG("JPEG: Passed encoding for FPS limit: step=%u, take=%u", step, take);
++step;
us_capture_hwbuf_decref(hw);
continue;
} else {
step = 1;
}
}
const ldf now_ts = us_get_now_monotonic();
if (now_ts < grab_after_ts) {
fluency_passed += 1;
@@ -394,7 +419,9 @@ static void *_h264_thread(void *v_ctx) {
_worker_context_s *ctx = v_ctx;
us_stream_s *stream = ctx->stream;
ldf grab_after_ts = 0;
uint take = 1;
uint step = 1;
while (!atomic_load(ctx->stop)) {
us_capture_hwbuf_s *hw = _get_latest_hw(ctx->queue);
if (hw == NULL) {
@@ -405,23 +432,25 @@ static void *_h264_thread(void *v_ctx) {
US_LOG_VERBOSE("H264: Passed encoding because nobody is watching");
goto decref;
}
if (hw->raw.grab_ts < grab_after_ts) {
US_LOG_DEBUG("H264: Passed encoding for FPS limit");
goto decref;
uint fps_limit = stream->run->h264_enc->run->fps_limit;
if (stream->desired_fps > 0 && (fps_limit == 0 || stream->desired_fps < fps_limit)) {
fps_limit = stream->desired_fps;
}
if (fps_limit > 0) {
const uint captured_fps = us_fpsi_get(stream->run->http->captured_fpsi, NULL);
take = ceilf((float)captured_fps / (float)fps_limit);
if (step < take) {
US_LOG_DEBUG("H264: Passed encoding for FPS limit: step=%u, take=%u", step, take);
++step;
goto decref;
} else {
step = 1;
}
}
_stream_encode_expose_h264(ctx->stream, &hw->raw, false);
// M2M-енкодер увеличивает задержку на 100 милисекунд при 1080p, если скормить ему больше 30 FPS.
// Поэтому у нас есть два режима: 60 FPS для маленьких видео и 30 для 1920x1080(1200).
// Следующй фрейм захватывается не раньше, чем это требуется по FPS, минус небольшая
// погрешность (если захват неравномерный) - немного меньше 1/60, и примерно треть от 1/30.
const uint fps_limit = stream->run->h264_enc->run->fps_limit;
if (fps_limit > 0) {
const ldf frame_interval = (ldf)1 / fps_limit;
grab_after_ts = hw->raw.grab_ts + frame_interval - 0.01;
}
decref:
us_capture_hwbuf_decref(hw);
}
@@ -696,6 +725,7 @@ static void _stream_expose_jpeg(us_stream_s *stream, const us_frame_s *frame) {
us_frame_s *const dest = run->http->jpeg_ring->items[ri];
us_frame_copy(frame, dest);
us_ring_producer_release(run->http->jpeg_ring, ri);
event_active(run->http->jpeg_refresher, 0, 0);
if (stream->jpeg_sink != NULL) {
us_memsink_server_put(stream->jpeg_sink, dest, NULL);
}

View File

@@ -26,6 +26,8 @@
#include <pthread.h>
#include <event2/event.h> // jpeg_refresher
#include "../libs/types.h"
#include "../libs/queue.h"
#include "../libs/ring.h"
@@ -51,6 +53,7 @@ typedef struct {
atomic_bool h264_online;
us_fpsi_s *h264_fpsi;
struct event *jpeg_refresher;
us_ring_s *jpeg_ring;
atomic_bool has_clients;
atomic_uint snapshot_requested;
@@ -77,6 +80,7 @@ typedef struct {
us_capture_s *cap;
us_encoder_s *enc;
uint desired_fps;
bool notify_parent;
bool slowdown;
uint error_delay;
@@ -90,6 +94,7 @@ typedef struct {
uint h264_bitrate;
uint h264_gop;
char *h264_m2m_path;
bool h264_boost;
# ifdef WITH_V4P
us_drm_s *drm;

View File

@@ -37,7 +37,7 @@ static void *_worker_thread(void *v_worker);
us_workers_pool_s *us_workers_pool_init(
const char *name, const char *wr_prefix, uint n_workers, ldf desired_interval,
const char *name, const char *wr_prefix, uint n_workers,
us_workers_pool_job_init_f job_init, void *job_init_arg,
us_workers_pool_job_destroy_f job_destroy,
us_workers_pool_run_job_f run_job) {
@@ -47,7 +47,6 @@ us_workers_pool_s *us_workers_pool_init(
us_workers_pool_s *pool;
US_CALLOC(pool, 1);
pool->name = name;
pool->desired_interval = desired_interval;
pool->job_destroy = job_destroy;
pool->run_job = run_job;
@@ -147,14 +146,8 @@ ldf us_workers_pool_get_fluency_delay(us_workers_pool_s *pool, const us_worker_s
pool->approx_job_time = approx_job_time;
const ldf min_delay = pool->approx_job_time / pool->n_workers; // Среднее время работы размазывается на N воркеров
if (pool->desired_interval > 0 && min_delay > 0 && pool->desired_interval > min_delay) {
// Искусственное время задержки на основе желаемого FPS, если включен --desired-fps
// и аппаратный fps не попадает точно в желаемое значение
return pool->desired_interval;
}
return min_delay;
// Среднее время работы размазывается на N воркеров
return (pool->approx_job_time / pool->n_workers);
}
static void *_worker_thread(void *v_worker) {

View File

@@ -56,7 +56,6 @@ typedef bool (*us_workers_pool_run_job_f)(us_worker_s *wr);
typedef struct us_workers_pool_sx {
const char *name;
ldf desired_interval;
us_workers_pool_job_destroy_f job_destroy;
us_workers_pool_run_job_f run_job;
@@ -76,7 +75,7 @@ typedef struct us_workers_pool_sx {
us_workers_pool_s *us_workers_pool_init(
const char *name, const char *wr_prefix, uint n_workers, ldf desired_interval,
const char *name, const char *wr_prefix, uint n_workers,
us_workers_pool_job_init_f job_init, void *job_init_arg,
us_workers_pool_job_destroy_f job_destroy,
us_workers_pool_run_job_f run_job);

View File

@@ -242,7 +242,7 @@ static void _main_loop(void) {
us_drm_destroy(drm);
}
static void *_follower_thread(void *v_unix_follow) {
static void *_follower_thread(void *v_unix_follow) { // cppcheck-suppress constParameterCallback
US_THREAD_SETTLE("follower");
const char *path = v_unix_follow;
assert(path != NULL);