Compare commits

...

97 Commits
v6.17 ... v6.46

Author SHA1 Message Date
Maxim Devaev
efbb2aa7ba Bump version: 6.45 → 6.46 2026-01-21 08:26:04 +02:00
Maxim Devaev
5692d81e46 lint fix 2026-01-21 08:21:44 +02:00
Maxim Devaev
4cec824b13 fixed fps limit for h264 2026-01-21 07:57:44 +02:00
Maxim Devaev
ac1989451c added help for --h264-boost 2026-01-21 07:07:41 +02:00
Maxim Devaev
e39d27309a Merge branch 'h264-boost' 2026-01-21 07:04:06 +02:00
Maxim Devaev
b983b6c355 new fps limiter 2026-01-21 07:03:58 +02:00
Maxim Devaev
5204f00812 h264 boost mode 2026-01-21 03:16:20 +02:00
Maxim Devaev
9eb39bbfc3 grab_begin_ts and grab_end_ts 2026-01-21 00:07:40 +02:00
Maxim Devaev
6adbb93e57 fpsi: optional meta arg in us_fpsi_get() 2026-01-20 11:52:19 +02:00
Maxim Devaev
4bd1465a10 janus: apply zero_playout_delay 2026-01-20 11:49:46 +02:00
Maxim Devaev
cf7f8947ef always capture maximum possible fps 2026-01-20 05:16:02 +02:00
Maxim Devaev
ec2e6c313b removed old fps regulation for jpeg encoders 2026-01-20 02:48:39 +02:00
Maxim Devaev
de2cfa36e1 Bump version: 6.44 → 6.45 2026-01-16 23:31:31 +02:00
Maxim Devaev
6c1a8f75a1 bumped python 2026-01-16 23:29:54 +02:00
Maxim Devaev
26ee5143ee Bump version: 6.43 → 6.44 2026-01-04 16:43:12 +02:00
Maxim Devaev
e2890e5851 janus: removed sync between video and audio 2026-01-04 16:03:42 +02:00
Maxim Devaev
e2b01e4d79 Bump version: 6.42 → 6.43 2026-01-03 19:43:43 +02:00
Maxim Devaev
903bc45bee lint fixes 2026-01-03 19:21:10 +02:00
Maxim Devaev
b2b1989c5b reduced preallocated us_frame_s size 2026-01-03 18:54:56 +02:00
Maxim Devaev
36b539c275 Bump version: 6.41 → 6.42 2025-11-11 00:00:35 +02:00
Maxim Devaev
38c6917644 janus: pkg-config 2025-11-10 23:58:42 +02:00
Maxim Devaev
05a5d3fed4 Bump version: 6.40 → 6.41 2025-10-23 16:28:07 +03:00
Maxim Devaev
0e4bf31325 janus: non-tc358743 devices for acap suppurted
An alternative implementation of pikvm/ustreamer#304.
Thanks for the idea.
2025-10-23 00:50:19 +03:00
Maxim Devaev
9a5cce3b92 janus: deprecated aplay/check option 2025-10-22 21:35:56 +03:00
Maxim Devaev
c4ac67acba janus: plug audio devices dynamically 2025-10-22 19:35:35 +03:00
Maxim Devaev
472673ea90 Bump version: 6.39 → 6.40 2025-07-28 21:32:04 +03:00
Maxim Devaev
f7ebe31c71 refactoring 2025-07-28 21:29:27 +03:00
Maxim Devaev
3a831817f4 pikvm/pikvm#1558: Discard JPEGs with invalid headers 2025-07-28 21:26:08 +03:00
Maxim Devaev
913cdac7a6 Bump version: 6.38 → 6.39 2025-07-03 04:19:51 +03:00
Maxim Devaev
777697dc1e improved logging on --exit-on-device-error 2025-07-03 04:17:48 +03:00
Maxim Devaev
5f437b9a35 Bump version: 6.37 → 6.38 2025-07-03 03:51:05 +03:00
Maxim Devaev
b089f896da pikvm/pikvm#312: --exit-on-device-error 2025-07-03 03:49:02 +03:00
Maxim Devaev
0e521ad0c6 Bump version: 6.36 → 6.37 2025-05-27 19:42:34 +03:00
Maxim Devaev
620a0ec847 Fixed #290: improved blank diagnostics 2025-05-27 19:30:07 +03:00
Maxim Devaev
7a1d4816ed frametext: more improvements 2025-05-26 22:34:19 +03:00
Maxim Devaev
aec8431024 verbose on-screen error messages 2025-05-26 20:06:06 +03:00
Maxim Devaev
5b18e29555 frametext: improved proportions 2025-05-26 20:04:35 +03:00
Maxim Devaev
2717248581 Bump version: 6.35 → 6.36 2025-03-27 04:38:28 +02:00
Maxim Devaev
afd305e87d v4p: fix for some DOS device 2025-03-27 04:36:35 +02:00
Maxim Devaev
e3d8132237 fixed --format-swap-rgb 2025-03-27 04:33:17 +02:00
Maxim Devaev
1f32e875c3 openwrt: +libatomic 2025-03-09 06:45:37 +02:00
Maxim Devaev
2e88fb9294 Bump version: 6.34 → 6.35 2025-03-08 20:16:11 +02:00
Maxim Devaev
d68f8e6d86 added missing formats 2025-03-08 20:14:17 +02:00
gudvinr
b380beba6d Add GREY pixelformat (#171)
Fixes #170

Monochrome cameras send only Y component of YUV image
2025-03-08 20:01:49 +02:00
Maxim Devaev
3a06a484ce Bump version: 6.33 → 6.34 2025-03-05 17:34:18 +02:00
Maxim Devaev
0307d3bdb6 Issue #287: Don't add -latomic on FreeBSD 2025-03-05 17:32:01 +02:00
Maxim Devaev
f2dd9c3c5a pikvm/ustreamer#306: Added ifdef for linux 2025-02-28 22:24:31 +02:00
Maxim Devaev
4e3f873f0d Added pkg-config to README 2025-02-27 22:21:23 +02:00
Maxim Devaev
029440cf82 Bump version: 6.32 → 6.33 2025-02-24 18:47:04 +02:00
Maxim Devaev
df74f5cf18 janus: added default ICE url 2025-02-24 18:41:40 +02:00
Maxim Devaev
97494c3531 janus: replaces STUN variables to ICE_URL 2025-02-24 18:21:46 +02:00
Maxim Devaev
71544880d1 janus: changed env prefix 2025-02-24 17:16:24 +02:00
Maxim Devaev
83127e58ff Bump version: 6.31 → 6.32 2025-02-24 05:19:22 +02:00
Maxim Devaev
604a8f7cb4 janus: STUN env 2025-02-24 05:17:32 +02:00
Maxim Devaev
602c1747d5 Bump version: 6.30 → 6.31 2025-02-08 15:46:31 +02:00
Maxim Devaev
a2b8b35070 improved build system 2025-02-08 15:44:40 +02:00
Maxim Devaev
dd7701be38 Bump version: 6.29 → 6.30 2025-02-08 13:03:01 +02:00
Maxim Devaev
1c9bd91b31 lint fix 2025-02-08 13:01:32 +02:00
Maxim Devaev
e19a3ca7ff report about all WITH_* flags in --features 2025-02-08 02:21:26 +02:00
Maxim Devaev
b2d1a5612d manual WITH_PDEATHSIG 2025-02-08 01:56:59 +02:00
Maxim Devaev
f3e0613de3 python: expose FEATURES variable 2025-02-08 00:25:17 +02:00
Maxim Devaev
5baf921660 common WITH_* flags 2025-02-07 23:31:36 +02:00
Maxim Devaev
6cabcd39f1 python: fixed uninitialized fd 2025-02-07 23:24:05 +02:00
Maxim Devaev
3df3658e4f python: version constants 2025-02-07 23:20:45 +02:00
Maxim Devaev
f21fc5f6d3 added missing WITH_V4P flag to --features 2025-02-07 18:02:04 +02:00
Maxim Devaev
b70ed98af9 Bump version: 6.28 → 6.29 2025-02-03 08:55:13 +02:00
Maxim Devaev
52cdabe150 janus: counterclockwise video rotation 2025-02-03 08:52:42 +02:00
Maxim Devaev
fe86997d08 Bump version: 6.27 → 6.28 2025-01-28 15:59:57 +02:00
Maxim Devaev
df39b824c6 refactoring 2025-01-27 06:32:26 +02:00
Sam Listopad
db297db52e Add Support for YUV420 and YVU variants. (#276)
* Add Support fo YUV420 and 410 and YVU variants.

* Add new formats to the help messaging

* Remove YUV410 supprt since M2M encoder on Pi cannot convert it

* Cleanups requested by @mdevaev

* Change to use u8 per @mdevaev
2025-01-27 06:14:18 +02:00
Jack Wilsdon
b304364af9 Allow overriding pkg-config (#301) 2025-01-27 02:53:39 +02:00
Maxim Devaev
ddec4e8478 Bump version: 6.26 → 6.27 2025-01-21 05:44:36 +02:00
Maxim Devaev
28ca658621 moved to python-3.13 2025-01-21 05:43:04 +02:00
Maxim Devaev
270d3ae3a9 Bump version: 6.25 → 6.26 2025-01-20 16:41:44 +02:00
Maxim Devaev
c1f080f29f check file flag for aplay 2025-01-20 16:39:50 +02:00
Maxim Devaev
b1e7c82131 Bump version: 6.24 → 6.25 2025-01-20 00:23:11 +02:00
Maxim Devaev
3d7685ac48 bunch of mic fixes 2025-01-20 00:21:36 +02:00
Maxim Devaev
37e79995fe Bump version: 6.23 → 6.24 2025-01-19 18:19:30 +02:00
Maxim Devaev
1ee096b17c mic support 2025-01-19 18:15:08 +02:00
Maxim Devaev
918688e91d refactoring 2025-01-18 18:32:41 +02:00
Maxim Devaev
a94ff667b0 refactoring, increased bitrate, reduced buffers 2025-01-18 17:16:55 +02:00
Maxim Devaev
10595a13e9 refactoring 2025-01-18 05:09:32 +02:00
Maxim Devaev
80ffc8b2bd Bump version: 6.22 → 6.23 2025-01-17 20:53:21 +02:00
Maxim Devaev
ba246d90c0 refactoring 2025-01-17 20:40:18 +02:00
Maxim Devaev
29c98e3908 Bump version: 6.21 → 6.22 2025-01-13 17:17:27 +02:00
Maxim Devaev
acc8cecbe4 lint fix 2025-01-13 17:15:55 +02:00
Maxim Devaev
8c31af2f03 janus: sendonly/sendrecv audio flag 2025-01-13 17:10:42 +02:00
Maxim Devaev
a727c9b7c5 Bump version: 6.20 → 6.21 2024-12-27 05:22:35 +02:00
Maxim Devaev
eabc8d8343 fixed bug with reversed logic of parent notification 2024-12-27 05:20:22 +02:00
Maxim Devaev
4e4ae21a83 Bump version: 6.19 → 6.20 2024-12-26 04:31:23 +02:00
Maxim Devaev
412a1775a6 hotfixed online flag 2024-12-26 04:29:15 +02:00
Maxim Devaev
c404c49c6d Bump version: 6.18 → 6.19 2024-12-26 04:09:49 +02:00
Maxim Devaev
481e359153 janus: reduces opus frame length to 20ms 2024-12-26 04:05:53 +02:00
Maxim Devaev
04114bba86 refactoring 2024-12-15 11:34:41 +02:00
Maxim Devaev
c848756d53 Bump version: 6.17 → 6.18 2024-11-29 22:26:02 +02:00
Maxim Devaev
2a8aaabe48 janus: Fixed return value of message handler + memory leak with transaction 2024-11-29 22:03:49 +02:00
Maxim Devaev
239db92a85 Issue #295: Fixed double json_decref() 2024-11-27 16:08:29 +02:00
63 changed files with 1697 additions and 933 deletions

View File

@@ -1,7 +1,7 @@
[bumpversion]
commit = True
tag = True
current_version = 6.17
current_version = 6.46
parse = (?P<major>\d+)\.(?P<minor>\d+)
serialize =
{major}.{minor}

View File

@@ -1,43 +1,64 @@
-include config.mk
# =====
DESTDIR ?=
PREFIX ?= /usr/local
MANPREFIX ?= $(PREFIX)/share/man
CC ?= gcc
PY ?= python3
PKG_CONFIG ?= pkg-config
CFLAGS ?= -O3
LDFLAGS ?=
R_DESTDIR = $(if $(DESTDIR),$(shell realpath "$(DESTDIR)"),)
WITH_PYTHON ?= 0
WITH_JANUS ?= 0
WITH_V4P ?= 0
WITH_GPIO ?= 0
WITH_SYSTEMD ?= 0
WITH_PTHREAD_NP ?= 1
WITH_SETPROCTITLE ?= 1
WITH_PDEATHSIG ?= 1
define optbool
$(filter $(shell echo $(1) | tr A-Z a-z), yes on 1)
endef
MK_WITH_PYTHON = $(call optbool,$(WITH_PYTHON))
MK_WITH_JANUS = $(call optbool,$(WITH_JANUS))
MK_WITH_V4P = $(call optbool,$(WITH_V4P))
MK_WITH_GPIO = $(call optbool,$(WITH_GPIO))
MK_WITH_SYSTEMD = $(call optbool,$(WITH_SYSTEMD))
MK_WITH_PTHREAD_NP = $(call optbool,$(WITH_PTHREAD_NP))
MK_WITH_SETPROCTITLE = $(call optbool,$(WITH_SETPROCTITLE))
MK_WITH_PDEATHSIG = $(call optbool,$(WITH_PDEATHSIG))
export
_LINTERS_IMAGE ?= ustreamer-linters
# =====
ifeq (__not_found__,$(shell which pkg-config 2>/dev/null || echo "__not_found__"))
$(error "No pkg-config found in $(PATH)")
ifeq (__not_found__,$(shell which $(PKG_CONFIG) 2>/dev/null || echo "__not_found__"))
$(error "No $(PKG_CONFIG) found in $(PATH)")
endif
# =====
define optbool
$(filter $(shell echo $(1) | tr A-Z a-z), yes on 1)
endef
ifeq ($(V),)
ECHO = @
endif
# =====
all:
+ $(MAKE) apps
ifneq ($(call optbool,$(WITH_PYTHON)),)
ifneq ($(MK_WITH_PYTHON),)
+ $(MAKE) python
endif
ifneq ($(call optbool,$(WITH_JANUS)),)
ifneq ($(MK_WITH_JANUS),)
+ $(MAKE) janus
endif
@@ -61,10 +82,10 @@ janus:
install: all
$(MAKE) -C src install
ifneq ($(call optbool,$(WITH_PYTHON)),)
ifneq ($(MK_WITH_PYTHON),)
$(MAKE) -C python install
endif
ifneq ($(call optbool,$(WITH_JANUS)),)
ifneq ($(MK_WITH_JANUS),)
$(MAKE) -C janus install
endif
mkdir -p $(R_DESTDIR)$(MANPREFIX)/man1

View File

@@ -11,7 +11,7 @@
|----------|---------------|-------------------|
| Multithreaded JPEG encoding | ✔ | ✘ |
| Hardware image encoding<br>on Raspberry Pi | ✔ | ✘ |
| Behavior when the device<br>is disconnected while streaming | ✔ Shows a black screen<br>with ```NO SIGNAL``` on it<br>until reconnected | ✘ Stops the streaming <sup>1</sup> |
| Behavior when the device<br>is disconnected while streaming | ✔ Shows a black screen<br>with ```NO LIVE VIDEO``` on it<br>until reconnected | ✘ Stops the streaming <sup>1</sup> |
| [DV-timings](https://linuxtv.org/downloads/v4l-dvb-apis-new/userspace-api/v4l/dv-timings.html) support -<br>the ability to change resolution<br>on the fly by source signal | ✔ | ☹ Partially yes <sup>1</sup> |
| Option to skip frames when streaming<br>static images by HTTP to save the traffic | ✔ <sup>2</sup> | ✘ |
| Streaming via UNIX domain socket | ✔ | ✘ |
@@ -44,7 +44,7 @@ You need to download the µStreamer onto your system and build it from the sourc
* FreeBSD port: https://www.freshports.org/multimedia/ustreamer.
### Preconditions
You'll need ```make```, ```gcc```, ```libevent``` with ```pthreads``` support, ```libjpeg9```/```libjpeg-turbo``` and ```libbsd``` (only for Linux).
You'll need ```make```, ```gcc```, ```pkg-config```, ```libevent``` with ```pthreads``` support, ```libjpeg9```/```libjpeg-turbo``` and ```libbsd``` (only for Linux).
* Arch: `sudo pacman -S libevent libjpeg-turbo libutil-linux libbsd`.
* Raspberry OS Bullseye: `sudo apt install libevent-dev libjpeg62-turbo libbsd-dev`. Add `libgpiod-dev` for `WITH_GPIO=1` and `libsystemd-dev` for `WITH_SYSTEMD=1` and `libasound2-dev libspeex-dev libspeexdsp-dev libopus-dev` for `WITH_JANUS=1`.

View File

@@ -2,6 +2,7 @@ R_DESTDIR ?=
PREFIX ?= /usr/local
CC ?= gcc
PKG_CONFIG ?= pkg-config
CFLAGS ?= -O3
LDFLAGS ?=
@@ -9,21 +10,20 @@ LDFLAGS ?=
# =====
_PLUGIN = libjanus_ustreamer.so
_CFLAGS = -fPIC -MD -c -std=c17 -Wall -Wextra -D_GNU_SOURCE $(shell pkg-config --cflags glib-2.0) $(CFLAGS)
_LDFLAGS = -shared -lm -pthread -lrt -ljansson -lopus -lasound -lspeexdsp $(shell pkg-config --libs glib-2.0) $(LDFLAGS)
_CFLAGS = -fPIC -MD -c -std=c17 -Wall -Wextra -D_GNU_SOURCE $(shell $(PKG_CONFIG) --cflags janus-gateway) $(CFLAGS)
_LDFLAGS = -shared -lm -pthread -lrt -ljansson -lopus -lasound -lspeexdsp $(shell $(PKG_CONFIG) --libs janus-gateway) $(LDFLAGS)
_SRCS = $(shell ls src/uslibs/*.c src/*.c)
_BUILD = build
define optbool
$(filter $(shell echo $(1) | tr A-Z a-z), yes on 1)
endef
# =====
ifneq ($(shell sh -c 'uname 2>/dev/null || echo Unknown'),FreeBSD)
override _LDFLAGS += -latomic
endif
WITH_PTHREAD_NP ?= 1
ifneq ($(call optbool,$(WITH_PTHREAD_NP)),)
ifneq ($(MK_WITH_PTHREAD_NP),)
override _CFLAGS += -DWITH_PTHREAD_NP
endif

243
janus/src/acap.c Normal file
View File

@@ -0,0 +1,243 @@
/*****************************************************************************
# #
# uStreamer - Lightweight and fast MJPEG-HTTP streamer. #
# #
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
# #
*****************************************************************************/
#include "acap.h"
#include <stdlib.h>
#include <stdatomic.h>
#include <assert.h>
#include <pthread.h>
#include <alsa/asoundlib.h>
#include <speex/speex_resampler.h>
#include <opus/opus.h>
#include "uslibs/types.h"
#include "uslibs/errors.h"
#include "uslibs/tools.h"
#include "uslibs/array.h"
#include "uslibs/ring.h"
#include "uslibs/threading.h"
#include "rtp.h"
#include "au.h"
#include "logging.h"
static void *_pcm_thread(void *v_acap);
static void *_encoder_thread(void *v_acap);
us_acap_s *us_acap_init(const char *name, uint pcm_hz) {
us_acap_s *acap;
US_CALLOC(acap, 1);
acap->pcm_hz = pcm_hz;
US_RING_INIT_WITH_ITEMS(acap->pcm_ring, 8, us_au_pcm_init);
US_RING_INIT_WITH_ITEMS(acap->enc_ring, 8, us_au_encoded_init);
atomic_init(&acap->stop, false);
int err;
{
if ((err = snd_pcm_open(&acap->dev, name, SND_PCM_STREAM_CAPTURE, 0)) < 0) {
acap->dev = NULL;
US_JLOG_PERROR_ALSA(err, "acap", "Can't open PCM capture");
goto error;
}
assert(!snd_pcm_hw_params_malloc(&acap->dev_params));
# define SET_PARAM(_msg, _func, ...) { \
if ((err = _func(acap->dev, acap->dev_params, ##__VA_ARGS__)) < 0) { \
US_JLOG_PERROR_ALSA(err, "acap", _msg); \
goto error; \
} \
}
SET_PARAM("Can't initialize PCM params", snd_pcm_hw_params_any);
SET_PARAM("Can't set PCM access type", snd_pcm_hw_params_set_access, SND_PCM_ACCESS_RW_INTERLEAVED);
SET_PARAM("Can't set PCM channels number", snd_pcm_hw_params_set_channels, US_RTP_OPUS_CH);
SET_PARAM("Can't set PCM sampling format", snd_pcm_hw_params_set_format, SND_PCM_FORMAT_S16_LE);
SET_PARAM("Can't set PCM sampling rate", snd_pcm_hw_params_set_rate_near, &acap->pcm_hz, 0);
if (acap->pcm_hz < US_AU_MIN_PCM_HZ || acap->pcm_hz > US_AU_MAX_PCM_HZ) {
US_JLOG_ERROR("acap", "Unsupported PCM freq: %u; should be: %u <= F <= %u",
acap->pcm_hz, US_AU_MIN_PCM_HZ, US_AU_MAX_PCM_HZ);
goto error;
}
acap->pcm_frames = US_AU_HZ_TO_FRAMES(acap->pcm_hz);
acap->pcm_size = US_AU_HZ_TO_BUF8(acap->pcm_hz);
SET_PARAM("Can't apply PCM params", snd_pcm_hw_params);
# undef SET_PARAM
}
if (acap->pcm_hz != US_RTP_OPUS_HZ) {
acap->res = speex_resampler_init(US_RTP_OPUS_CH, acap->pcm_hz, US_RTP_OPUS_HZ, SPEEX_RESAMPLER_QUALITY_DESKTOP, &err);
if (err < 0) {
acap->res = NULL;
US_JLOG_PERROR_RES(err, "acap", "Can't create resampler");
goto error;
}
}
{
// OPUS_APPLICATION_VOIP, OPUS_APPLICATION_RESTRICTED_LOWDELAY
acap->enc = opus_encoder_create(US_RTP_OPUS_HZ, US_RTP_OPUS_CH, OPUS_APPLICATION_AUDIO, &err);
assert(err == 0);
// https://github.com/meetecho/janus-gateway/blob/3cdd6ff/src/plugins/janus_audiobridge.c#L2272
// https://datatracker.ietf.org/doc/html/rfc7587#section-3.1.1
assert(!opus_encoder_ctl(acap->enc, OPUS_SET_BITRATE(128000)));
assert(!opus_encoder_ctl(acap->enc, OPUS_SET_MAX_BANDWIDTH(OPUS_BANDWIDTH_FULLBAND)));
assert(!opus_encoder_ctl(acap->enc, OPUS_SET_SIGNAL(OPUS_SIGNAL_MUSIC)));
// OPUS_SET_INBAND_FEC(1), OPUS_SET_PACKET_LOSS_PERC(10): see rtpa.c
}
US_JLOG_INFO("acap", "Capture configured on %uHz; capturing ...", acap->pcm_hz);
acap->tids_created = true;
US_THREAD_CREATE(acap->enc_tid, _encoder_thread, acap);
US_THREAD_CREATE(acap->pcm_tid, _pcm_thread, acap);
return acap;
error:
us_acap_destroy(acap);
return NULL;
}
void us_acap_destroy(us_acap_s *acap) {
if (acap->tids_created) {
atomic_store(&acap->stop, true);
US_THREAD_JOIN(acap->pcm_tid);
US_THREAD_JOIN(acap->enc_tid);
}
US_DELETE(acap->enc, opus_encoder_destroy);
US_DELETE(acap->res, speex_resampler_destroy);
US_DELETE(acap->dev, snd_pcm_close);
US_DELETE(acap->dev_params, snd_pcm_hw_params_free);
US_RING_DELETE_WITH_ITEMS(acap->enc_ring, us_au_encoded_destroy);
US_RING_DELETE_WITH_ITEMS(acap->pcm_ring, us_au_pcm_destroy);
if (acap->tids_created) {
US_JLOG_INFO("acap", "Capture closed");
}
free(acap);
}
int us_acap_get_encoded(us_acap_s *acap, u8 *data, uz *size, u64 *pts) {
if (atomic_load(&acap->stop)) {
return -1;
}
const int ri = us_ring_consumer_acquire(acap->enc_ring, 0.1);
if (ri < 0) {
return US_ERROR_NO_DATA;
}
const us_au_encoded_s *const buf = acap->enc_ring->items[ri];
if (buf->used == 0 || *size < buf->used) {
us_ring_consumer_release(acap->enc_ring, ri);
return US_ERROR_NO_DATA;
}
memcpy(data, buf->data, buf->used);
*size = buf->used;
*pts = buf->pts;
us_ring_consumer_release(acap->enc_ring, ri);
return 0;
}
static void *_pcm_thread(void *v_acap) {
US_THREAD_SETTLE("us_ac_pcm");
us_acap_s *const acap = v_acap;
u8 in[US_AU_MAX_BUF8];
while (!atomic_load(&acap->stop)) {
const int frames = snd_pcm_readi(acap->dev, in, acap->pcm_frames);
if (frames < 0) {
US_JLOG_PERROR_ALSA(frames, "acap", "Fatal: Can't capture PCM frames");
break;
} else if (frames < (int)acap->pcm_frames) {
US_JLOG_ERROR("acap", "Fatal: Too few PCM frames captured");
break;
}
const int ri = us_ring_producer_acquire(acap->pcm_ring, 0);
if (ri >= 0) {
us_au_pcm_s *const out = acap->pcm_ring->items[ri];
memcpy(out->data, in, acap->pcm_size);
us_ring_producer_release(acap->pcm_ring, ri);
} else {
US_JLOG_ERROR("acap", "PCM ring is full");
}
}
atomic_store(&acap->stop, true);
return NULL;
}
static void *_encoder_thread(void *v_acap) {
US_THREAD_SETTLE("us_ac_enc");
us_acap_s *const acap = v_acap;
s16 in_res[US_AU_MAX_BUF16];
while (!atomic_load(&acap->stop)) {
const int in_ri = us_ring_consumer_acquire(acap->pcm_ring, 0.1);
if (in_ri < 0) {
continue;
}
us_au_pcm_s *const in = acap->pcm_ring->items[in_ri];
s16 *in_ptr;
if (acap->res != NULL) {
assert(acap->pcm_hz != US_RTP_OPUS_HZ);
u32 in_count = acap->pcm_frames;
u32 out_count = US_AU_HZ_TO_FRAMES(US_RTP_OPUS_HZ);
speex_resampler_process_interleaved_int(acap->res, in->data, &in_count, in_res, &out_count);
in_ptr = in_res;
} else {
assert(acap->pcm_hz == US_RTP_OPUS_HZ);
in_ptr = in->data;
}
const int out_ri = us_ring_producer_acquire(acap->enc_ring, 0);
if (out_ri < 0) {
US_JLOG_ERROR("acap", "OPUS encoder queue is full");
us_ring_consumer_release(acap->pcm_ring, in_ri);
continue;
}
us_au_encoded_s *const out = acap->enc_ring->items[out_ri];
const int size = opus_encode(acap->enc, in_ptr, US_AU_HZ_TO_FRAMES(US_RTP_OPUS_HZ), out->data, US_ARRAY_LEN(out->data));
us_ring_consumer_release(acap->pcm_ring, in_ri);
if (size > 0) {
out->used = size;
out->pts = acap->pts;
// https://datatracker.ietf.org/doc/html/rfc7587#section-4.2
acap->pts += US_AU_HZ_TO_FRAMES(US_RTP_OPUS_HZ);
} else {
out->used = 0;
US_JLOG_PERROR_OPUS(size, "acap", "Fatal: Can't encode PCM frame to OPUS");
}
us_ring_producer_release(acap->enc_ring, out_ri);
}
atomic_store(&acap->stop, true);
return NULL;
}

View File

@@ -34,11 +34,11 @@
typedef struct {
snd_pcm_t *pcm;
snd_pcm_t *dev;
uint pcm_hz;
uint pcm_frames;
uz pcm_size;
snd_pcm_hw_params_t *pcm_params;
snd_pcm_hw_params_t *dev_params;
SpeexResamplerState *res;
OpusEncoder *enc;
@@ -50,12 +50,10 @@ typedef struct {
pthread_t enc_tid;
bool tids_created;
atomic_bool stop;
} us_audio_s;
} us_acap_s;
bool us_audio_probe(const char *name);
us_acap_s *us_acap_init(const char *name, uint pcm_hz);
void us_acap_destroy(us_acap_s *acap);
us_audio_s *us_audio_init(const char *name, uint pcm_hz);
void us_audio_destroy(us_audio_s *audio);
int us_audio_get_encoded(us_audio_s *audio, u8 *data, uz *size, u64 *pts);
int us_acap_get_encoded(us_acap_s *acap, u8 *data, uz *size, u64 *pts);

148
janus/src/au.c Normal file
View File

@@ -0,0 +1,148 @@
/*****************************************************************************
# #
# uStreamer - Lightweight and fast MJPEG-HTTP streamer. #
# #
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
# #
*****************************************************************************/
#include "au.h"
#include <stdlib.h>
#include <string.h>
#include <ctype.h>
#include <sys/stat.h>
#include "uslibs/tools.h"
bool us_au_probe(const char *name) {
// This function is very limited. It takes something like:
// hw:0,0 or hw:tc358743,0 or plughw:UAC2Gadget,0
// parses card name (0, tc358743, UAC2Gadget) and checks
// the existence of it in /proc/asound/.
// It's enough for our case.
if (name == NULL) {
return false;
}
if (strchr(name, '/') || strchr(name, '.')) {
return false;
}
const char *begin = strchr(name, ':');
if (begin == NULL) {
return false;
}
begin += 1;
if (*begin == '\0') {
return false;
}
const char *end = strchr(begin, ',');
if (end == NULL) {
return false;
}
if (end - begin < 1) {
return false;
}
char *card = us_strdup(begin);
card[end - begin] = '\0';
bool numeric = true;
for (uz index = 0; card[index] != '\0'; ++index) {
if (!isdigit(card[index])) {
numeric = false;
break;
}
}
char *path;
if (numeric) {
US_ASPRINTF(path, "/proc/asound/card%s", card);
} else {
US_ASPRINTF(path, "/proc/asound/%s", card);
}
bool ok = false;
struct stat st;
if (lstat(path, &st) == 0) {
if (numeric && S_ISDIR(st.st_mode)) {
ok = true;
} else if (!numeric && S_ISLNK(st.st_mode)) {
ok = true;
}
}
free(path);
free(card);
return ok;
}
us_au_pcm_s *us_au_pcm_init(void) {
us_au_pcm_s *pcm;
US_CALLOC(pcm, 1);
return pcm;
}
void us_au_pcm_destroy(us_au_pcm_s *pcm) {
free(pcm);
}
void us_au_pcm_mix(us_au_pcm_s *dest, us_au_pcm_s *src) {
const uz size = src->frames * US_RTP_OPUS_CH * 2; // 2 for 16 bit
if (src->frames == 0) {
return;
} else if (dest->frames == 0) {
memcpy(dest->data, src->data, size);
dest->frames = src->frames;
} else if (dest->frames == src->frames) {
// https://stackoverflow.com/questions/12089662
for (uz index = 0; index < size; ++index) {
int a = dest->data[index];
int b = src->data[index];
int m;
a += 32768;
b += 32768;
if ((a < 32768) && (b < 32768)) {
m = a * b / 32768;
} else {
m = 2 * (a + b) - (a * b) / 32768 - 65536;
}
if (m == 65536) {
m = 65535;
}
m -= 32768;
dest->data[index] = m;
}
}
}
us_au_encoded_s *us_au_encoded_init(void) {
us_au_encoded_s *enc;
US_CALLOC(enc, 1);
return enc;
}
void us_au_encoded_destroy(us_au_encoded_s *enc) {
free(enc);
}

View File

@@ -22,10 +22,40 @@
#pragma once
#include <event2/keyvalq_struct.h>
#include "uslibs/types.h"
#include "../../libs/types.h"
#include "rtp.h"
// A number of frames per 1 channel:
// - https://github.com/xiph/opus/blob/7b05f44/src/opus_demo.c#L368
#define US_AU_FRAME_MS 20
// #define _HZ_TO_FRAMES(_hz) (6 * (_hz) / 50) // 120ms
#define US_AU_HZ_TO_FRAMES(_hz) ((_hz) / 50) // 20ms
#define US_AU_HZ_TO_BUF16(_hz) (US_AU_HZ_TO_FRAMES(_hz) * US_RTP_OPUS_CH) // ... * 2: One stereo frame = (16bit L) + (16bit R)
#define US_AU_HZ_TO_BUF8(_hz) (US_AU_HZ_TO_BUF16(_hz) * sizeof(s16))
#define US_AU_MIN_PCM_HZ 8000
#define US_AU_MAX_PCM_HZ 192000
#define US_AU_MAX_BUF16 US_AU_HZ_TO_BUF16(US_AU_MAX_PCM_HZ)
#define US_AU_MAX_BUF8 US_AU_HZ_TO_BUF8(US_AU_MAX_PCM_HZ)
bool us_uri_get_true(struct evkeyvalq *params, const char *key);
char *us_uri_get_string(struct evkeyvalq *params, const char *key);
typedef struct {
s16 data[US_AU_MAX_BUF16];
uz frames;
} us_au_pcm_s;
typedef struct {
u8 data[US_RTP_PAYLOAD_SIZE];
uz used;
u64 pts;
} us_au_encoded_s;
bool us_au_probe(const char *name);
us_au_pcm_s *us_au_pcm_init(void);
void us_au_pcm_destroy(us_au_pcm_s *pcm);
void us_au_pcm_mix(us_au_pcm_s *a, us_au_pcm_s *b);
us_au_encoded_s *us_au_encoded_init(void);
void us_au_encoded_destroy(us_au_encoded_s *enc);

View File

@@ -1,294 +0,0 @@
/*****************************************************************************
# #
# uStreamer - Lightweight and fast MJPEG-HTTP streamer. #
# #
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
# #
*****************************************************************************/
#include "audio.h"
#include <stdlib.h>
#include <stdatomic.h>
#include <assert.h>
#include <pthread.h>
#include <alsa/asoundlib.h>
#include <speex/speex_resampler.h>
#include <opus/opus.h>
#include "uslibs/types.h"
#include "uslibs/errors.h"
#include "uslibs/tools.h"
#include "uslibs/array.h"
#include "uslibs/ring.h"
#include "uslibs/threading.h"
#include "logging.h"
#define _JLOG_PERROR_ALSA(_err, _prefix, _msg, ...) US_JLOG_ERROR(_prefix, _msg ": %s", ##__VA_ARGS__, snd_strerror(_err))
#define _JLOG_PERROR_RES(_err, _prefix, _msg, ...) US_JLOG_ERROR(_prefix, _msg ": %s", ##__VA_ARGS__, speex_resampler_strerror(_err))
#define _JLOG_PERROR_OPUS(_err, _prefix, _msg, ...) US_JLOG_ERROR(_prefix, _msg ": %s", ##__VA_ARGS__, opus_strerror(_err))
// A number of frames per 1 channel:
// - https://github.com/xiph/opus/blob/7b05f44/src/opus_demo.c#L368
#define _HZ_TO_FRAMES(_hz) (6 * (_hz) / 50) // 120ms
#define _HZ_TO_BUF16(_hz) (_HZ_TO_FRAMES(_hz) * 2) // One stereo frame = (16bit L) + (16bit R)
#define _HZ_TO_BUF8(_hz) (_HZ_TO_BUF16(_hz) * sizeof(s16))
#define _MIN_PCM_HZ 8000
#define _MAX_PCM_HZ 192000
#define _MAX_BUF16 _HZ_TO_BUF16(_MAX_PCM_HZ)
#define _MAX_BUF8 _HZ_TO_BUF8(_MAX_PCM_HZ)
#define _ENCODER_INPUT_HZ 48000
typedef struct {
s16 data[_MAX_BUF16];
} _pcm_buffer_s;
typedef struct {
u8 data[_MAX_BUF8]; // Worst case
uz used;
u64 pts;
} _enc_buffer_s;
static _pcm_buffer_s *_pcm_buffer_init(void);
static _enc_buffer_s *_enc_buffer_init(void);
static void *_pcm_thread(void *v_audio);
static void *_encoder_thread(void *v_audio);
bool us_audio_probe(const char *name) {
snd_pcm_t *pcm;
int err;
US_JLOG_INFO("audio", "Probing PCM capture ...");
if ((err = snd_pcm_open(&pcm, name, SND_PCM_STREAM_CAPTURE, 0)) < 0) {
_JLOG_PERROR_ALSA(err, "audio", "Can't probe PCM capture");
return false;
}
snd_pcm_close(pcm);
US_JLOG_INFO("audio", "PCM capture is available");
return true;
}
us_audio_s *us_audio_init(const char *name, uint pcm_hz) {
us_audio_s *audio;
US_CALLOC(audio, 1);
audio->pcm_hz = pcm_hz;
US_RING_INIT_WITH_ITEMS(audio->pcm_ring, 8, _pcm_buffer_init);
US_RING_INIT_WITH_ITEMS(audio->enc_ring, 8, _enc_buffer_init);
atomic_init(&audio->stop, false);
int err;
{
if ((err = snd_pcm_open(&audio->pcm, name, SND_PCM_STREAM_CAPTURE, 0)) < 0) {
audio->pcm = NULL;
_JLOG_PERROR_ALSA(err, "audio", "Can't open PCM capture");
goto error;
}
assert(!snd_pcm_hw_params_malloc(&audio->pcm_params));
# define SET_PARAM(_msg, _func, ...) { \
if ((err = _func(audio->pcm, audio->pcm_params, ##__VA_ARGS__)) < 0) { \
_JLOG_PERROR_ALSA(err, "audio", _msg); \
goto error; \
} \
}
SET_PARAM("Can't initialize PCM params", snd_pcm_hw_params_any);
SET_PARAM("Can't set PCM access type", snd_pcm_hw_params_set_access, SND_PCM_ACCESS_RW_INTERLEAVED);
SET_PARAM("Can't set PCM channels numbre", snd_pcm_hw_params_set_channels, 2);
SET_PARAM("Can't set PCM sampling format", snd_pcm_hw_params_set_format, SND_PCM_FORMAT_S16_LE);
SET_PARAM("Can't set PCM sampling rate", snd_pcm_hw_params_set_rate_near, &audio->pcm_hz, 0);
if (audio->pcm_hz < _MIN_PCM_HZ || audio->pcm_hz > _MAX_PCM_HZ) {
US_JLOG_ERROR("audio", "Unsupported PCM freq: %u; should be: %u <= F <= %u",
audio->pcm_hz, _MIN_PCM_HZ, _MAX_PCM_HZ);
goto error;
}
audio->pcm_frames = _HZ_TO_FRAMES(audio->pcm_hz);
audio->pcm_size = _HZ_TO_BUF8(audio->pcm_hz);
SET_PARAM("Can't apply PCM params", snd_pcm_hw_params);
# undef SET_PARAM
}
if (audio->pcm_hz != _ENCODER_INPUT_HZ) {
audio->res = speex_resampler_init(2, audio->pcm_hz, _ENCODER_INPUT_HZ, SPEEX_RESAMPLER_QUALITY_DESKTOP, &err);
if (err < 0) {
audio->res = NULL;
_JLOG_PERROR_RES(err, "audio", "Can't create resampler");
goto error;
}
}
{
// OPUS_APPLICATION_VOIP, OPUS_APPLICATION_RESTRICTED_LOWDELAY
audio->enc = opus_encoder_create(_ENCODER_INPUT_HZ, 2, OPUS_APPLICATION_AUDIO, &err);
assert(err == 0);
assert(!opus_encoder_ctl(audio->enc, OPUS_SET_BITRATE(48000)));
assert(!opus_encoder_ctl(audio->enc, OPUS_SET_MAX_BANDWIDTH(OPUS_BANDWIDTH_FULLBAND)));
assert(!opus_encoder_ctl(audio->enc, OPUS_SET_SIGNAL(OPUS_SIGNAL_MUSIC)));
// OPUS_SET_INBAND_FEC(1), OPUS_SET_PACKET_LOSS_PERC(10): see rtpa.c
}
US_JLOG_INFO("audio", "Pipeline configured on %uHz; capturing ...", audio->pcm_hz);
audio->tids_created = true;
US_THREAD_CREATE(audio->enc_tid, _encoder_thread, audio);
US_THREAD_CREATE(audio->pcm_tid, _pcm_thread, audio);
return audio;
error:
us_audio_destroy(audio);
return NULL;
}
void us_audio_destroy(us_audio_s *audio) {
if (audio->tids_created) {
atomic_store(&audio->stop, true);
US_THREAD_JOIN(audio->pcm_tid);
US_THREAD_JOIN(audio->enc_tid);
}
US_DELETE(audio->enc, opus_encoder_destroy);
US_DELETE(audio->res, speex_resampler_destroy);
US_DELETE(audio->pcm, snd_pcm_close);
US_DELETE(audio->pcm_params, snd_pcm_hw_params_free);
US_RING_DELETE_WITH_ITEMS(audio->enc_ring, free);
US_RING_DELETE_WITH_ITEMS(audio->pcm_ring, free);
if (audio->tids_created) {
US_JLOG_INFO("audio", "Pipeline closed");
}
free(audio);
}
int us_audio_get_encoded(us_audio_s *audio, u8 *data, uz *size, u64 *pts) {
if (atomic_load(&audio->stop)) {
return -1;
}
const int ri = us_ring_consumer_acquire(audio->enc_ring, 0.1);
if (ri < 0) {
return US_ERROR_NO_DATA;
}
const _enc_buffer_s *const buf = audio->enc_ring->items[ri];
if (*size < buf->used) {
us_ring_consumer_release(audio->enc_ring, ri);
return US_ERROR_NO_DATA;
}
memcpy(data, buf->data, buf->used);
*size = buf->used;
*pts = buf->pts;
us_ring_consumer_release(audio->enc_ring, ri);
return 0;
}
static _pcm_buffer_s *_pcm_buffer_init(void) {
_pcm_buffer_s *buf;
US_CALLOC(buf, 1);
return buf;
}
static _enc_buffer_s *_enc_buffer_init(void) {
_enc_buffer_s *buf;
US_CALLOC(buf, 1);
return buf;
}
static void *_pcm_thread(void *v_audio) {
US_THREAD_SETTLE("us_a_pcm");
us_audio_s *const audio = v_audio;
u8 in[_MAX_BUF8];
while (!atomic_load(&audio->stop)) {
const int frames = snd_pcm_readi(audio->pcm, in, audio->pcm_frames);
if (frames < 0) {
_JLOG_PERROR_ALSA(frames, "audio", "Fatal: Can't capture PCM frames");
break;
} else if (frames < (int)audio->pcm_frames) {
US_JLOG_ERROR("audio", "Fatal: Too few PCM frames captured");
break;
}
const int ri = us_ring_producer_acquire(audio->pcm_ring, 0);
if (ri >= 0) {
_pcm_buffer_s *const out = audio->pcm_ring->items[ri];
memcpy(out->data, in, audio->pcm_size);
us_ring_producer_release(audio->pcm_ring, ri);
} else {
US_JLOG_ERROR("audio", "PCM ring is full");
}
}
atomic_store(&audio->stop, true);
return NULL;
}
static void *_encoder_thread(void *v_audio) {
US_THREAD_SETTLE("us_a_enc");
us_audio_s *const audio = v_audio;
s16 in_res[_MAX_BUF16];
while (!atomic_load(&audio->stop)) {
const int in_ri = us_ring_consumer_acquire(audio->pcm_ring, 0.1);
if (in_ri < 0) {
continue;
}
_pcm_buffer_s *const in = audio->pcm_ring->items[in_ri];
s16 *in_ptr;
if (audio->res != NULL) {
assert(audio->pcm_hz != _ENCODER_INPUT_HZ);
u32 in_count = audio->pcm_frames;
u32 out_count = _HZ_TO_FRAMES(_ENCODER_INPUT_HZ);
speex_resampler_process_interleaved_int(audio->res, in->data, &in_count, in_res, &out_count);
in_ptr = in_res;
} else {
assert(audio->pcm_hz == _ENCODER_INPUT_HZ);
in_ptr = in->data;
}
const int out_ri = us_ring_producer_acquire(audio->enc_ring, 0);
if (out_ri < 0) {
US_JLOG_ERROR("audio", "OPUS encoder queue is full");
us_ring_consumer_release(audio->pcm_ring, in_ri);
continue;
}
_enc_buffer_s *const out = audio->enc_ring->items[out_ri];
const int size = opus_encode(audio->enc, in_ptr, _HZ_TO_FRAMES(_ENCODER_INPUT_HZ), out->data, US_ARRAY_LEN(out->data));
us_ring_consumer_release(audio->pcm_ring, in_ri);
if (size >= 0) {
out->used = size;
out->pts = audio->pts;
// https://datatracker.ietf.org/doc/html/rfc7587#section-4.2
audio->pts += _HZ_TO_FRAMES(_ENCODER_INPUT_HZ);
} else {
_JLOG_PERROR_OPUS(size, "audio", "Fatal: Can't encode PCM frame to OPUS");
}
us_ring_producer_release(audio->enc_ring, out_ri);
}
atomic_store(&audio->stop, true);
return NULL;
}

View File

@@ -25,23 +25,29 @@
#include <stdlib.h>
#include <stdatomic.h>
#include <string.h>
#include <assert.h>
#include <pthread.h>
#include <janus/plugins/plugin.h>
#include <janus/rtp.h>
#include <opus/opus.h>
#include "uslibs/types.h"
#include "uslibs/tools.h"
#include "uslibs/threading.h"
#include "uslibs/array.h"
#include "uslibs/list.h"
#include "uslibs/ring.h"
#include "logging.h"
#include "au.h"
#include "rtp.h"
static void *_video_thread(void *v_client);
static void *_audio_thread(void *v_client);
static void *_common_thread(void *v_client, bool video);
static void *_acap_thread(void *v_client);
static void *_video_or_acap_thread(void *v_client, bool video);
static void *_aplay_thread(void *v_client);
us_janus_client_s *us_janus_client_init(janus_callbacks *gw, janus_plugin_session *session) {
@@ -50,7 +56,8 @@ us_janus_client_s *us_janus_client_init(janus_callbacks *gw, janus_plugin_sessio
client->gw = gw;
client->session = session;
atomic_init(&client->transmit, false);
atomic_init(&client->transmit_audio, false);
atomic_init(&client->transmit_acap, false);
atomic_init(&client->transmit_aplay, false);
atomic_init(&client->video_orient, 0);
atomic_init(&client->stop, false);
@@ -58,8 +65,12 @@ us_janus_client_s *us_janus_client_init(janus_callbacks *gw, janus_plugin_sessio
US_RING_INIT_WITH_ITEMS(client->video_ring, 2048, us_rtp_init);
US_THREAD_CREATE(client->video_tid, _video_thread, client);
US_RING_INIT_WITH_ITEMS(client->audio_ring, 64, us_rtp_init);
US_THREAD_CREATE(client->audio_tid, _audio_thread, client);
US_RING_INIT_WITH_ITEMS(client->acap_ring, 64, us_rtp_init);
US_THREAD_CREATE(client->acap_tid, _acap_thread, client);
US_RING_INIT_WITH_ITEMS(client->aplay_enc_ring, 64, us_au_encoded_init);
US_RING_INIT_WITH_ITEMS(client->aplay_pcm_ring, 64, us_au_pcm_init);
US_THREAD_CREATE(client->aplay_tid, _aplay_thread, client);
return client;
}
@@ -70,8 +81,12 @@ void us_janus_client_destroy(us_janus_client_s *client) {
US_THREAD_JOIN(client->video_tid);
US_RING_DELETE_WITH_ITEMS(client->video_ring, us_rtp_destroy);
US_THREAD_JOIN(client->audio_tid);
US_RING_DELETE_WITH_ITEMS(client->audio_ring, us_rtp_destroy);
US_THREAD_JOIN(client->acap_tid);
US_RING_DELETE_WITH_ITEMS(client->acap_ring, us_rtp_destroy);
US_THREAD_JOIN(client->aplay_tid);
US_RING_DELETE_WITH_ITEMS(client->aplay_enc_ring, us_au_encoded_destroy);
US_RING_DELETE_WITH_ITEMS(client->aplay_pcm_ring, us_au_pcm_destroy);
free(client);
}
@@ -79,13 +94,13 @@ void us_janus_client_destroy(us_janus_client_s *client) {
void us_janus_client_send(us_janus_client_s *client, const us_rtp_s *rtp) {
if (
atomic_load(&client->transmit)
&& (rtp->video || atomic_load(&client->transmit_audio))
&& (rtp->video || atomic_load(&client->transmit_acap))
) {
us_ring_s *const ring = (rtp->video ? client->video_ring : client->audio_ring);
us_ring_s *const ring = (rtp->video ? client->video_ring : client->acap_ring);
const int ri = us_ring_producer_acquire(ring, 0);
if (ri < 0) {
US_JLOG_ERROR("client", "Session %p %s ring is full",
client->session, (rtp->video ? "video" : "audio"));
client->session, (rtp->video ? "video" : "acap"));
return;
}
memcpy(ring->items[ri], rtp, sizeof(us_rtp_s));
@@ -93,20 +108,65 @@ void us_janus_client_send(us_janus_client_s *client, const us_rtp_s *rtp) {
}
}
void us_janus_client_recv(us_janus_client_s *client, janus_plugin_rtp *packet) {
if (
packet->video
|| packet->length < sizeof(janus_rtp_header)
|| !atomic_load(&client->transmit)
|| !atomic_load(&client->transmit_aplay)
) {
return;
}
const janus_rtp_header *const header = (janus_rtp_header*)packet->buffer;
if (header->type != US_RTP_OPUS_PAYLOAD) {
return;
}
const u16 seq = ntohs(header->seq_number);
if (
seq >= client->aplay_seq_next // In order or missing
|| (client->aplay_seq_next - seq) > 50 // In late sequence or sequence wrapped
) {
client->aplay_seq_next = seq + 1;
int size = 0;
const char *const data = janus_rtp_payload(packet->buffer, packet->length, &size);
if (data == NULL || size <= 0) {
return;
}
us_ring_s *const ring = client->aplay_enc_ring;
const int ri = us_ring_producer_acquire(ring, 0);
if (ri < 0) {
// US_JLOG_ERROR("client", "Session %p aplay ring is full", client->session);
return;
}
us_au_encoded_s *enc = ring->items[ri];
if ((uz)size < US_ARRAY_LEN(enc->data)) {
memcpy(enc->data, data, size);
enc->used = size;
} else {
enc->used = 0;
}
us_ring_producer_release(ring, ri);
}
}
static void *_video_thread(void *v_client) {
US_THREAD_SETTLE("us_c_video");
return _common_thread(v_client, true);
US_THREAD_SETTLE("us_cx_vid");
return _video_or_acap_thread(v_client, true);
}
static void *_audio_thread(void *v_client) {
US_THREAD_SETTLE("us_c_audio");
return _common_thread(v_client, false);
static void *_acap_thread(void *v_client) {
US_THREAD_SETTLE("us_cx_ac");
return _video_or_acap_thread(v_client, false);
}
static void *_common_thread(void *v_client, bool video) {
static void *_video_or_acap_thread(void *v_client, bool video) {
us_janus_client_s *const client = v_client;
us_ring_s *const ring = (video ? client->video_ring : client->audio_ring);
assert(ring != NULL); // Audio may be NULL
us_ring_s *const ring = (video ? client->video_ring : client->acap_ring);
assert(ring != NULL);
while (!atomic_load(&client->stop)) {
const int ri = us_ring_consumer_acquire(ring, 0.1);
@@ -119,7 +179,7 @@ static void *_common_thread(void *v_client, bool video) {
if (
atomic_load(&client->transmit)
&& (video || atomic_load(&client->transmit_audio))
&& (video || atomic_load(&client->transmit_acap))
) {
janus_plugin_rtp packet = {
.video = rtp.video,
@@ -133,20 +193,27 @@ static void *_common_thread(void *v_client, bool video) {
};
janus_plugin_rtp_extensions_reset(&packet.extensions);
/*if (rtp->zero_playout_delay) {
if (rtp.zero_playout_delay) {
// https://github.com/pikvm/pikvm/issues/784
packet.extensions.min_delay = 0;
packet.extensions.max_delay = 0;
} else {
// Эти дефолты используются в Chrome/Safari/Firefox.
// Работает всё одинаково, потому что у них общая кодовая база WebRTC.
packet.extensions.min_delay = 0;
// 10s - Chromium/WebRTC default
// 3s - Firefox default
packet.extensions.max_delay = 300; // == 3s, i.e. 10ms granularity
}*/
packet.extensions.max_delay = 1000; // == 10s, i.e. 10ms granularity
}
if (rtp.video) {
const uint video_orient = atomic_load(&client->video_orient);
uint video_orient = atomic_load(&client->video_orient);
if (video_orient != 0) {
// The extension rotates the video clockwise, but want it counterclockwise.
// It's more intuitive for people who have seen a protractor at least once in their life.
if (video_orient == 90) {
video_orient = 270;
} else if (video_orient == 270) {
video_orient = 90;
}
packet.extensions.video_rotation = video_orient;
}
}
@@ -156,3 +223,48 @@ static void *_common_thread(void *v_client, bool video) {
}
return NULL;
}
static void *_aplay_thread(void *v_client) {
US_THREAD_SETTLE("us_cx_ap");
us_janus_client_s *const client = v_client;
int err;
OpusDecoder *dec = opus_decoder_create(US_RTP_OPUS_HZ, US_RTP_OPUS_CH, &err);
assert(err == 0);
while (!atomic_load(&client->stop)) {
const int in_ri = us_ring_consumer_acquire(client->aplay_enc_ring, 0.1);
if (in_ri < 0) {
continue;
}
us_au_encoded_s *in = client->aplay_enc_ring->items[in_ri];
if (in->used == 0) {
us_ring_consumer_release(client->aplay_enc_ring, in_ri);
continue;
}
const int out_ri = us_ring_producer_acquire(client->aplay_pcm_ring, 0);
if (out_ri < 0) {
US_JLOG_ERROR("aplay", "OPUS decoder queue is full");
us_ring_consumer_release(client->aplay_enc_ring, in_ri);
continue;
}
us_au_pcm_s *out = client->aplay_pcm_ring->items[out_ri];
const int frames = opus_decode(dec, in->data, in->used, out->data, US_AU_HZ_TO_FRAMES(US_RTP_OPUS_HZ), 0);
us_ring_consumer_release(client->aplay_enc_ring, in_ri);
if (frames > 0) {
out->frames = frames;
} else {
out->frames = 0;
US_JLOG_PERROR_OPUS(frames, "aplay", "Fatal: Can't decode OPUS to PCM frame");
}
us_ring_producer_release(client->aplay_pcm_ring, out_ri);
}
opus_decoder_destroy(dec);
return NULL;
}

View File

@@ -38,15 +38,21 @@ typedef struct {
janus_callbacks *gw;
janus_plugin_session *session;
atomic_bool transmit;
atomic_bool transmit_audio;
atomic_bool transmit_acap;
atomic_bool transmit_aplay;
atomic_uint video_orient;
pthread_t video_tid;
pthread_t audio_tid;
pthread_t acap_tid;
pthread_t aplay_tid;
atomic_bool stop;
us_ring_s *video_ring;
us_ring_s *audio_ring;
us_ring_s *acap_ring;
us_ring_s *aplay_enc_ring;
u16 aplay_seq_next;
us_ring_s *aplay_pcm_ring;
US_LIST_DECLARE;
} us_janus_client_s;
@@ -56,3 +62,4 @@ us_janus_client_s *us_janus_client_init(janus_callbacks *gw, janus_plugin_sessio
void us_janus_client_destroy(us_janus_client_s *client);
void us_janus_client_send(us_janus_client_s *client, const us_rtp_s *rtp);
void us_janus_client_recv(us_janus_client_s *client, janus_plugin_rtp *packet);

View File

@@ -28,6 +28,7 @@
#include <janus/config.h>
#include <janus/plugins/plugin.h>
#include "uslibs/types.h"
#include "uslibs/tools.h"
#include "const.h"
@@ -35,6 +36,7 @@
static char *_get_value(janus_config *jcfg, const char *section, const char *option);
static uint _get_uint(janus_config *jcfg, const char *section, const char *option, uint def);
// static bool _get_bool(janus_config *jcfg, const char *section, const char *option, bool def);
@@ -55,18 +57,18 @@ us_config_s *us_config_init(const char *config_dir_path) {
}
janus_config_print(jcfg);
if (
(config->video_sink_name = _get_value(jcfg, "memsink", "object")) == NULL
&& (config->video_sink_name = _get_value(jcfg, "video", "sink")) == NULL
) {
US_JLOG_ERROR("config", "Missing config value: video.sink (ex. memsink.object)");
if ((config->video_sink_name = _get_value(jcfg, "video", "sink")) == NULL) {
US_JLOG_ERROR("config", "Missing config value: video.sink");
goto error;
}
if ((config->audio_dev_name = _get_value(jcfg, "audio", "device")) != NULL) {
if ((config->tc358743_dev_path = _get_value(jcfg, "audio", "tc358743")) == NULL) {
US_JLOG_INFO("config", "Missing config value: audio.tc358743");
if ((config->acap_dev_name = _get_value(jcfg, "acap", "device")) != NULL) {
config->acap_hz = _get_uint(jcfg, "acap", "sampling_rate", 0);
config->tc358743_dev_path = _get_value(jcfg, "acap", "tc358743");
if (config->acap_hz == 0 && config->tc358743_dev_path == NULL) {
US_JLOG_ERROR("config", "Either acap.sampling_rate or acap.tc358743 required");
goto error;
}
config->aplay_dev_name = _get_value(jcfg, "aplay", "device");
}
goto ok;
@@ -82,8 +84,9 @@ ok:
void us_config_destroy(us_config_s *config) {
US_DELETE(config->video_sink_name, free);
US_DELETE(config->audio_dev_name, free);
US_DELETE(config->acap_dev_name, free);
US_DELETE(config->tc358743_dev_path, free);
US_DELETE(config->aplay_dev_name, free);
free(config);
}
@@ -96,6 +99,20 @@ static char *_get_value(janus_config *jcfg, const char *section, const char *opt
return us_strdup(option_obj->value);
}
static uint _get_uint(janus_config *jcfg, const char *section, const char *option, uint def) {
char *const tmp = _get_value(jcfg, section, option);
uint value = def;
if (tmp != NULL) {
errno = 0;
value = (uint)strtoul(tmp, NULL, 10);
if (errno != 0) {
value = def;
}
free(tmp);
}
return value;
}
/*static bool _get_bool(janus_config *jcfg, const char *section, const char *option, bool def) {
char *const tmp = _get_value(jcfg, section, option);
bool value = def;

View File

@@ -23,11 +23,17 @@
#pragma once
#include "uslibs/types.h"
typedef struct {
char *video_sink_name;
char *audio_dev_name;
char *acap_dev_name;
uint acap_hz;
char *tc358743_dev_path;
char *aplay_dev_name;
} us_config_s;

View File

@@ -36,3 +36,8 @@
JANUS_LOG(LOG_ERR, "[%s/%-9s] " x_msg ": %s\n", US_PLUGIN_NAME, x_prefix, ##__VA_ARGS__, m_perror_str); \
free(m_perror_str); \
}
// We don't include alsa, speex and opus headers here
#define US_JLOG_PERROR_ALSA(_err, _prefix, _msg, ...) US_JLOG_ERROR(_prefix, _msg ": %s", ##__VA_ARGS__, snd_strerror(_err))
#define US_JLOG_PERROR_RES(_err, _prefix, _msg, ...) US_JLOG_ERROR(_prefix, _msg ": %s", ##__VA_ARGS__, speex_resampler_strerror(_err))
#define US_JLOG_PERROR_OPUS(_err, _prefix, _msg, ...) US_JLOG_ERROR(_prefix, _msg ": %s", ##__VA_ARGS__, opus_strerror(_err))

View File

@@ -25,6 +25,7 @@
#include <inttypes.h>
#include <unistd.h>
#include <fcntl.h>
#include <errno.h>
#include <assert.h>
#include <sys/mman.h>
@@ -33,7 +34,9 @@
#include <pthread.h>
#include <jansson.h>
#include <janus/plugins/plugin.h>
#include <janus/rtp.h>
#include <janus/rtcp.h>
#include <alsa/asoundlib.h>
#include "uslibs/types.h"
#include "uslibs/const.h"
@@ -48,7 +51,8 @@
#include "const.h"
#include "logging.h"
#include "client.h"
#include "audio.h"
#include "au.h"
#include "acap.h"
#include "rtp.h"
#include "rtpv.h"
#include "rtpa.h"
@@ -56,6 +60,8 @@
#include "config.h"
static const char *const default_ice_url = "stun:stun.l.google.com:19302";
static us_config_s *_g_config = NULL;
static const useconds_t _g_watchers_polling = 100000;
@@ -69,31 +75,41 @@ static pthread_t _g_video_rtp_tid;
static atomic_bool _g_video_rtp_tid_created = false;
static pthread_t _g_video_sink_tid;
static atomic_bool _g_video_sink_tid_created = false;
static pthread_t _g_audio_tid;
static atomic_bool _g_audio_tid_created = false;
static pthread_t _g_acap_tid;
static atomic_bool _g_acap_tid_created = false;
static pthread_t _g_aplay_tid;
static atomic_bool _g_aplay_tid_created = false;
static pthread_mutex_t _g_video_lock = PTHREAD_MUTEX_INITIALIZER;
static pthread_mutex_t _g_audio_lock = PTHREAD_MUTEX_INITIALIZER;
static pthread_mutex_t _g_acap_lock = PTHREAD_MUTEX_INITIALIZER;
static pthread_mutex_t _g_aplay_lock = PTHREAD_MUTEX_INITIALIZER;
static atomic_bool _g_ready = false;
static atomic_bool _g_stop = false;
static atomic_bool _g_has_watchers = false;
static atomic_bool _g_has_listeners = false;
static atomic_bool _g_has_speakers = false;
static atomic_bool _g_key_required = false;
#define _LOCK_VIDEO US_MUTEX_LOCK(_g_video_lock)
#define _UNLOCK_VIDEO US_MUTEX_UNLOCK(_g_video_lock)
#define _LOCK_AUDIO US_MUTEX_LOCK(_g_audio_lock)
#define _UNLOCK_AUDIO US_MUTEX_UNLOCK(_g_audio_lock)
#define _LOCK_ACAP US_MUTEX_LOCK(_g_acap_lock)
#define _UNLOCK_ACAP US_MUTEX_UNLOCK(_g_acap_lock)
#define _LOCK_ALL { _LOCK_VIDEO; _LOCK_AUDIO; }
#define _UNLOCK_ALL { _UNLOCK_AUDIO; _UNLOCK_VIDEO; }
#define _LOCK_APLAY US_MUTEX_LOCK(_g_aplay_lock)
#define _UNLOCK_APLAY US_MUTEX_UNLOCK(_g_aplay_lock)
#define _LOCK_ALL { _LOCK_VIDEO; _LOCK_ACAP; _LOCK_APLAY; }
#define _UNLOCK_ALL { _UNLOCK_APLAY; _UNLOCK_ACAP; _UNLOCK_VIDEO; }
#define _READY atomic_load(&_g_ready)
#define _STOP atomic_load(&_g_stop)
#define _HAS_WATCHERS atomic_load(&_g_has_watchers)
#define _HAS_LISTENERS atomic_load(&_g_has_listeners)
#define _HAS_SPEAKERS atomic_load(&_g_has_speakers)
#define _IF_DISABLED(...) { if (!_READY || _STOP) { __VA_ARGS__ } }
janus_plugin *create(void);
@@ -101,7 +117,7 @@ janus_plugin *create(void);
static void *_video_rtp_thread(void *arg) {
(void)arg;
US_THREAD_SETTLE("us_video_rtp");
US_THREAD_SETTLE("us_p_rtpv");
atomic_store(&_g_video_rtp_tid_created, true);
while (!_STOP) {
@@ -120,7 +136,7 @@ static void *_video_rtp_thread(void *arg) {
static void *_video_sink_thread(void *arg) {
(void)arg;
US_THREAD_SETTLE("us_video_sink");
US_THREAD_SETTLE("us_p_vsink");
atomic_store(&_g_video_sink_tid_created, true);
us_frame_s *drop = us_frame_init();
@@ -198,15 +214,23 @@ static void *_video_sink_thread(void *arg) {
return NULL;
}
static int _check_tc358743_audio(uint *audio_hz) {
int fd;
if ((fd = open(_g_config->tc358743_dev_path, O_RDWR)) < 0) {
US_JLOG_PERROR("audio", "Can't open TC358743 V4L2 device");
static int _get_acap_hz(uint *hz) {
if (_g_config->acap_hz != 0) {
*hz = _g_config->acap_hz;
return 0;
}
if (_g_config->tc358743_dev_path == NULL) {
US_JLOG_ERROR("acap", "No configured sampling rate");
return -1;
}
const int checked = us_tc358743_xioctl_get_audio_hz(fd, audio_hz);
int fd;
if ((fd = open(_g_config->tc358743_dev_path, O_RDWR)) < 0) {
US_JLOG_PERROR("acap", "Can't open TC358743 V4L2 device");
return -1;
}
const int checked = us_tc358743_xioctl_get_audio_hz(fd, hz);
if (checked < 0) {
US_JLOG_PERROR("audio", "Can't check TC358743 audio state (%d)", checked);
US_JLOG_PERROR("acap", "Can't check TC358743 audio state (%d)", checked);
close(fd);
return -1;
}
@@ -214,13 +238,13 @@ static int _check_tc358743_audio(uint *audio_hz) {
return 0;
}
static void *_audio_thread(void *arg) {
static void *_acap_thread(void *arg) {
(void)arg;
US_THREAD_SETTLE("us_audio");
atomic_store(&_g_audio_tid_created, true);
US_THREAD_SETTLE("us_p_ac");
atomic_store(&_g_acap_tid_created, true);
assert(_g_config->audio_dev_name != NULL);
assert(_g_config->tc358743_dev_path != NULL);
assert(_g_config->acap_dev_name != NULL);
assert(_g_rtpa != NULL);
int once = 0;
@@ -230,53 +254,175 @@ static void *_audio_thread(void *arg) {
continue;
}
uint audio_hz = 0;
us_audio_s *audio = NULL;
uint hz = 0;
us_acap_s *acap = NULL;
if (_check_tc358743_audio(&audio_hz) < 0) {
goto close_audio;
if (!us_au_probe(_g_config->acap_dev_name)) {
US_ONCE({ US_JLOG_ERROR("acap", "No PCM capture device"); });
goto close_acap;
}
if (audio_hz == 0) {
US_ONCE({ US_JLOG_INFO("audio", "No audio presented from the host"); });
goto close_audio;
if (_get_acap_hz(&hz) < 0) {
goto close_acap;
}
US_ONCE({ US_JLOG_INFO("audio", "Detected host audio"); });
if ((audio = us_audio_init(_g_config->audio_dev_name, audio_hz)) == NULL) {
goto close_audio;
if (hz == 0) {
US_ONCE({ US_JLOG_INFO("acap", "No audio presented from the host"); });
goto close_acap;
}
US_ONCE({ US_JLOG_INFO("acap", "Detected host audio"); });
if ((acap = us_acap_init(_g_config->acap_dev_name, hz)) == NULL) {
goto close_acap;
}
once = 0;
while (!_STOP && _HAS_WATCHERS && _HAS_LISTENERS) {
if (_check_tc358743_audio(&audio_hz) < 0 || audio->pcm_hz != audio_hz) {
goto close_audio;
if (_get_acap_hz(&hz) < 0 || acap->pcm_hz != hz) {
goto close_acap;
}
uz size = US_RTP_DATAGRAM_SIZE - US_RTP_HEADER_SIZE;
u8 data[size];
u64 pts;
const int result = us_audio_get_encoded(audio, data, &size, &pts);
const int result = us_acap_get_encoded(acap, data, &size, &pts);
if (result == 0) {
_LOCK_AUDIO;
_LOCK_ACAP;
us_rtpa_wrap(_g_rtpa, data, size, pts);
_UNLOCK_AUDIO;
_UNLOCK_ACAP;
} else if (result == -1) {
goto close_audio;
goto close_acap;
}
}
close_audio:
US_DELETE(audio, us_audio_destroy);
close_acap:
US_DELETE(acap, us_acap_destroy);
sleep(1); // error_delay
}
return NULL;
}
static void *_aplay_thread(void *arg) {
(void)arg;
US_THREAD_SETTLE("us_p_ap");
atomic_store(&_g_aplay_tid_created, true);
assert(_g_config->aplay_dev_name != NULL);
int once = 0;
while (!_STOP) {
snd_pcm_t *dev = NULL;
bool skip = true;
while (!_STOP) {
usleep((US_AU_FRAME_MS / 4) * 1000);
us_au_pcm_s mixed = {0};
_LOCK_APLAY;
US_LIST_ITERATE(_g_clients, client, {
us_au_pcm_s last = {0};
do {
const int ri = us_ring_consumer_acquire(client->aplay_pcm_ring, 0);
if (ri >= 0) {
const us_au_pcm_s *pcm = client->aplay_pcm_ring->items[ri];
memcpy(&last, pcm, sizeof(us_au_pcm_s));
us_ring_consumer_release(client->aplay_pcm_ring, ri);
} else {
break;
}
} while (skip && !_STOP);
us_au_pcm_mix(&mixed, &last);
// US_JLOG_INFO("++++++", "mixed %p", client);
});
_UNLOCK_APLAY;
// US_JLOG_INFO("++++++", "--------------");
if (skip) {
static uint skipped = 0;
if (skipped < (1000 / (US_AU_FRAME_MS / 4))) {
++skipped;
continue;
} else {
skipped = 0;
}
}
if (!_HAS_WATCHERS || !_HAS_LISTENERS || !_HAS_SPEAKERS) {
goto close_aplay;
}
if (dev == NULL) {
if (!us_au_probe(_g_config->aplay_dev_name)) {
US_ONCE({ US_JLOG_ERROR("aplay", "No PCM playback device"); });
goto close_aplay;
}
int err = snd_pcm_open(&dev, _g_config->aplay_dev_name, SND_PCM_STREAM_PLAYBACK, 0);
if (err < 0) {
US_ONCE({ US_JLOG_PERROR_ALSA(err, "aplay", "Can't open PCM playback"); });
goto close_aplay;
}
err = snd_pcm_set_params(dev, SND_PCM_FORMAT_S16_LE, SND_PCM_ACCESS_RW_INTERLEAVED,
US_RTP_OPUS_CH, US_RTP_OPUS_HZ, 1 /* soft resample */, 50000 /* 50000 = 0.05sec */
);
if (err < 0) {
US_ONCE({ US_JLOG_PERROR_ALSA(err, "aplay", "Can't configure PCM playback"); });
goto close_aplay;
}
US_JLOG_INFO("aplay", "Playback opened, playing ...");
once = 0;
}
if (dev != NULL && mixed.frames > 0) {
snd_pcm_sframes_t frames = snd_pcm_writei(dev, mixed.data, mixed.frames);
if (frames < 0) {
frames = snd_pcm_recover(dev, frames, 1);
} else {
if (once != 0) {
US_JLOG_INFO("aplay", "Playing resumed (snd_pcm_writei) ...");
}
once = 0;
skip = false;
}
if (frames < 0) {
US_ONCE({ US_JLOG_PERROR_ALSA(frames, "aplay", "Can't play to PCM playback"); });
if (frames == -ENODEV) {
goto close_aplay;
}
skip = true;
} else {
if (once != 0) {
US_JLOG_INFO("aplay", "Playing resumed (snd_pcm_recover) ...");
}
once = 0;
skip = false;
}
}
}
close_aplay:
if (dev != NULL) {
US_DELETE(dev, snd_pcm_close);
US_JLOG_INFO("aplay", "Playback closed");
}
}
return NULL;
}
static void _relay_rtp_clients(const us_rtp_s *rtp) {
US_LIST_ITERATE(_g_clients, client, {
us_janus_client_send(client, rtp);
});
}
static void _alsa_quiet(const char *file, int line, const char *func, int err, const char *fmt, ...) {
(void)file;
(void)line;
(void)func;
(void)err;
(void)fmt;
}
static int _plugin_init(janus_callbacks *gw, const char *config_dir_path) {
// https://groups.google.com/g/meetecho-janus/c/xoWIQfaoJm8
// sysctl -w net.core.rmem_default=500000
@@ -290,11 +436,16 @@ static int _plugin_init(janus_callbacks *gw, const char *config_dir_path) {
}
_g_gw = gw;
snd_lib_error_set_handler(_alsa_quiet);
US_RING_INIT_WITH_ITEMS(_g_video_ring, 64, us_frame_init);
_g_rtpv = us_rtpv_init(_relay_rtp_clients);
if (_g_config->audio_dev_name != NULL && us_audio_probe(_g_config->audio_dev_name)) {
if (_g_config->acap_dev_name != NULL) {
_g_rtpa = us_rtpa_init(_relay_rtp_clients);
US_THREAD_CREATE(_g_audio_tid, _audio_thread, NULL);
US_THREAD_CREATE(_g_acap_tid, _acap_thread, NULL);
if (_g_config->aplay_dev_name != NULL) {
US_THREAD_CREATE(_g_aplay_tid, _aplay_thread, NULL);
}
}
US_THREAD_CREATE(_g_video_rtp_tid, _video_rtp_thread, NULL);
US_THREAD_CREATE(_g_video_sink_tid, _video_sink_thread, NULL);
@@ -310,7 +461,8 @@ static void _plugin_destroy(void) {
# define JOIN(_tid) { if (atomic_load(&_tid##_created)) { US_THREAD_JOIN(_tid); } }
JOIN(_g_video_sink_tid);
JOIN(_g_video_rtp_tid);
JOIN(_g_audio_tid);
JOIN(_g_acap_tid);
JOIN(_g_aplay_tid);
# undef JOIN
US_LIST_ITERATE(_g_clients, client, {
@@ -325,8 +477,6 @@ static void _plugin_destroy(void) {
US_DELETE(_g_config, us_config_destroy);
}
#define _IF_DISABLED(...) { if (!_READY || _STOP) { __VA_ARGS__ } }
static void _plugin_create_session(janus_plugin_session *session, int *err) {
_IF_DISABLED({ *err = -1; return; });
_LOCK_ALL;
@@ -343,6 +493,7 @@ static void _plugin_destroy_session(janus_plugin_session* session, int *err) {
bool found = false;
bool has_watchers = false;
bool has_listeners = false;
bool has_speakers = false;
US_LIST_ITERATE(_g_clients, client, {
if (client->session == session) {
US_JLOG_INFO("main", "Removing session %p ...", session);
@@ -351,7 +502,8 @@ static void _plugin_destroy_session(janus_plugin_session* session, int *err) {
found = true;
} else {
has_watchers = (has_watchers || atomic_load(&client->transmit));
has_listeners = (has_listeners || atomic_load(&client->transmit_audio));
has_listeners = (has_listeners || atomic_load(&client->transmit_acap));
has_speakers = (has_speakers || atomic_load(&client->transmit_aplay));
}
});
if (!found) {
@@ -360,6 +512,7 @@ static void _plugin_destroy_session(janus_plugin_session* session, int *err) {
}
atomic_store(&_g_has_watchers, has_watchers);
atomic_store(&_g_has_listeners, has_listeners);
atomic_store(&_g_has_speakers, has_speakers);
_UNLOCK_ALL;
}
@@ -398,25 +551,19 @@ static void _set_transmit(janus_plugin_session *session, const char *msg, bool t
_UNLOCK_ALL;
}
#undef _IF_DISABLED
static void _plugin_setup_media(janus_plugin_session *session) { _set_transmit(session, "Unmuted", true); }
static void _plugin_hangup_media(janus_plugin_session *session) { _set_transmit(session, "Muted", false); }
static struct janus_plugin_result *_plugin_handle_message(
janus_plugin_session *session, char *transaction, json_t *msg, json_t *jsep) {
assert(transaction != NULL);
# define FREE_MSG_JSEP { \
US_DELETE(msg, json_decref); \
US_DELETE(jsep, json_decref); \
}
janus_plugin_result_type result_type = JANUS_PLUGIN_OK;
char *result_msg = NULL;
if (session == NULL || msg == NULL) {
free(transaction);
FREE_MSG_JSEP;
return janus_plugin_result_new(JANUS_PLUGIN_ERROR, (msg ? "No session" : "No message"), NULL);
result_type = JANUS_PLUGIN_ERROR;
result_msg = (msg ? "No session" : "No message");
goto done;
}
# define PUSH_ERROR(x_error, x_reason) { \
@@ -425,20 +572,20 @@ static struct janus_plugin_result *_plugin_handle_message(
json_object_set_new(m_event, "ustreamer", json_string("event")); \
json_object_set_new(m_event, "error_code", json_integer(x_error)); \
json_object_set_new(m_event, "error", json_string(x_reason)); \
_g_gw->push_event(session, create(), transaction, m_event, NULL); \
_g_gw->push_event(session, create(), NULL, m_event, NULL); \
json_decref(m_event); \
}
json_t *const request = json_object_get(msg, "request");
if (request == NULL) {
PUSH_ERROR(400, "Request missing");
goto ok_wait;
goto done;
}
const char *const request_str = json_string_value(request);
if (request_str == NULL) {
PUSH_ERROR(400, "Request not a string");
goto ok_wait;
goto done;
}
// US_JLOG_INFO("main", "Message: %s", request_str);
@@ -448,10 +595,10 @@ static struct janus_plugin_result *_plugin_handle_message(
json_t *const m_result = json_object(); \
json_object_set_new(m_result, "status", json_string(x_status)); \
if (x_payload != NULL) { \
json_object_set_new(m_result, x_status, x_payload); \
json_object_set(m_result, x_status, x_payload); \
} \
json_object_set_new(m_event, "result", m_result); \
_g_gw->push_event(session, create(), transaction, m_event, x_jsep); \
_g_gw->push_event(session, create(), NULL, m_event, x_jsep); \
json_decref(m_event); \
}
@@ -462,15 +609,22 @@ static struct janus_plugin_result *_plugin_handle_message(
PUSH_STATUS("stopped", NULL, NULL);
} else if (!strcmp(request_str, "watch")) {
bool with_audio = false;
uint video_orient = 0;
bool with_acap = false;
bool with_aplay = false;
{
json_t *const params = json_object_get(msg, "params");
if (params != NULL) {
{
json_t *const obj = json_object_get(params, "audio");
if (obj != NULL && json_is_boolean(obj)) {
with_audio = (_g_rtpa != NULL && json_boolean_value(obj));
with_acap = (us_au_probe(_g_config->acap_dev_name) && json_boolean_value(obj));
}
}
{
json_t *const obj = json_object_get(params, "mic");
if (obj != NULL && json_is_boolean(obj)) {
with_aplay = (us_au_probe(_g_config->aplay_dev_name) && json_boolean_value(obj));
}
}
{
@@ -489,7 +643,7 @@ static struct janus_plugin_result *_plugin_handle_message(
{
char *sdp;
char *const video_sdp = us_rtpv_make_sdp(_g_rtpv);
char *const audio_sdp = (with_audio ? us_rtpa_make_sdp(_g_rtpa) : us_strdup(""));
char *const audio_sdp = (with_acap ? us_rtpa_make_sdp(_g_rtpa, with_aplay) : us_strdup(""));
US_ASPRINTF(sdp,
"v=0" RN
"o=- %" PRIu64 " 1 IN IP4 0.0.0.0" RN
@@ -518,19 +672,30 @@ static struct janus_plugin_result *_plugin_handle_message(
{
_LOCK_ALL;
bool has_listeners = false;
bool has_speakers = false;
US_LIST_ITERATE(_g_clients, client, {
if (client->session == session) {
atomic_store(&client->transmit_audio, with_audio);
atomic_store(&client->transmit_acap, with_acap);
atomic_store(&client->transmit_aplay, with_aplay);
atomic_store(&client->video_orient, video_orient);
}
has_listeners = (has_listeners || atomic_load(&client->transmit_audio));
has_listeners = (has_listeners || atomic_load(&client->transmit_acap));
has_speakers = (has_speakers || atomic_load(&client->transmit_aplay));
});
atomic_store(&_g_has_listeners, has_listeners);
atomic_store(&_g_has_speakers, has_speakers);
_UNLOCK_ALL;
}
} else if (!strcmp(request_str, "features")) {
json_t *const features = json_pack("{sb}", "audio", (_g_rtpa != NULL));
const char *const ice_url = getenv("JANUS_USTREAMER_WEB_ICE_URL");
const bool acap_avail = us_au_probe(_g_config->acap_dev_name);
json_t *const features = json_pack(
"{s:b, s:b, s:{s:s?}}",
"audio", acap_avail,
"mic", (acap_avail && us_au_probe(_g_config->aplay_dev_name)),
"ice", "url", (ice_url != NULL ? ice_url : default_ice_url)
);
PUSH_STATUS("features", features, NULL);
json_decref(features);
@@ -542,19 +707,40 @@ static struct janus_plugin_result *_plugin_handle_message(
PUSH_ERROR(405, "Not implemented");
}
ok_wait:
FREE_MSG_JSEP;
return janus_plugin_result_new(JANUS_PLUGIN_OK_WAIT, NULL, NULL);
done:
US_DELETE(transaction, free);
US_DELETE(msg, json_decref);
US_DELETE(jsep, json_decref);
return janus_plugin_result_new(
result_type, result_msg,
(result_type == JANUS_PLUGIN_OK ? json_pack("{sb}", "ok", 1) : NULL));
# undef PUSH_STATUS
# undef PUSH_ERROR
# undef FREE_MSG_JSEP
}
static void _plugin_incoming_rtcp(janus_plugin_session *handle, janus_plugin_rtcp *packet) {
(void)handle;
(void)packet;
if (packet->video && janus_rtcp_has_pli(packet->buffer, packet->length)) {
static void _plugin_incoming_rtp(janus_plugin_session *session, janus_plugin_rtp *packet) {
_IF_DISABLED({ return; });
if (session == NULL || packet == NULL || packet->video) {
return; // Accept only valid audio
}
_LOCK_APLAY;
US_LIST_ITERATE(_g_clients, client, {
if (client->session == session) {
us_janus_client_recv(client, packet);
break;
}
});
_UNLOCK_APLAY;
}
static void _plugin_incoming_rtcp(janus_plugin_session *session, janus_plugin_rtcp *packet) {
_IF_DISABLED({ return; });
if (session == NULL || packet == NULL || !packet->video) {
return; // Accept only valid video
}
if (janus_rtcp_has_pli(packet->buffer, packet->length)) {
// US_JLOG_INFO("main", "Got video PLI");
atomic_store(&_g_key_required, true);
}
@@ -595,6 +781,7 @@ janus_plugin *create(void) {
.get_author = _plugin_get_author,
.get_package = _plugin_get_package,
.incoming_rtp = _plugin_incoming_rtp,
.incoming_rtcp = _plugin_incoming_rtcp,
);
# pragma GCC diagnostic pop

View File

@@ -28,6 +28,13 @@
// https://stackoverflow.com/questions/47635545/why-webrtc-chose-rtp-max-packet-size-to-1200-bytes
#define US_RTP_DATAGRAM_SIZE 1200
#define US_RTP_HEADER_SIZE 12
#define US_RTP_PAYLOAD_SIZE (US_RTP_DATAGRAM_SIZE - US_RTP_HEADER_SIZE)
#define US_RTP_H264_PAYLOAD 96
#define US_RTP_OPUS_PAYLOAD 111
#define US_RTP_OPUS_HZ 48000
#define US_RTP_OPUS_CH 2
typedef struct {

View File

@@ -33,7 +33,7 @@ us_rtpa_s *us_rtpa_init(us_rtp_callback_f callback) {
us_rtpa_s *rtpa;
US_CALLOC(rtpa, 1);
rtpa->rtp = us_rtp_init();
us_rtp_assign(rtpa->rtp, 111, false);
us_rtp_assign(rtpa->rtp, US_RTP_OPUS_PAYLOAD, false);
rtpa->callback = callback;
return rtpa;
}
@@ -43,21 +43,26 @@ void us_rtpa_destroy(us_rtpa_s *rtpa) {
free(rtpa);
}
char *us_rtpa_make_sdp(us_rtpa_s *rtpa) {
char *us_rtpa_make_sdp(us_rtpa_s *rtpa, bool mic) {
const uint pl = rtpa->rtp->payload;
char *sdp;
US_ASPRINTF(sdp,
"m=audio 1 RTP/SAVPF %u" RN
"c=IN IP4 0.0.0.0" RN
"a=rtpmap:%u OPUS/48000/2" RN
"a=rtpmap:%u OPUS/%u/%u" RN
"a=fmtp:%u sprop-stereo=1" RN // useinbandfec=1
"a=rtcp-fb:%u nack" RN
"a=rtcp-fb:%u nack pli" RN
"a=rtcp-fb:%u goog-remb" RN
"a=mid:a" RN
"a=msid:audio a" RN
"a=ssrc:%" PRIu32 " cname:ustreamer" RN
"a=sendonly" RN,
pl, pl, pl, pl, pl, pl,
rtpa->rtp->ssrc
"a=%s" RN,
pl, pl,
US_RTP_OPUS_HZ, US_RTP_OPUS_CH,
pl, pl, pl, pl,
rtpa->rtp->ssrc,
(mic ? "sendrecv" : "sendonly")
);
return sdp;
}

View File

@@ -36,5 +36,5 @@ typedef struct {
us_rtpa_s *us_rtpa_init(us_rtp_callback_f callback);
void us_rtpa_destroy(us_rtpa_s *rtpa);
char *us_rtpa_make_sdp(us_rtpa_s *rtpa);
char *us_rtpa_make_sdp(us_rtpa_s *rtpa, bool mic);
void us_rtpa_wrap(us_rtpa_s *rtpa, const u8 *data, uz size, u32 pts);

View File

@@ -45,7 +45,7 @@ us_rtpv_s *us_rtpv_init(us_rtp_callback_f callback) {
us_rtpv_s *rtpv;
US_CALLOC(rtpv, 1);
rtpv->rtp = us_rtp_init();
us_rtp_assign(rtpv->rtp, 96, true);
us_rtp_assign(rtpv->rtp, US_RTP_H264_PAYLOAD, true);
rtpv->callback = callback;
return rtpv;
}
@@ -69,6 +69,8 @@ char *us_rtpv_make_sdp(us_rtpv_s *rtpv) {
"a=rtcp-fb:%u nack" RN
"a=rtcp-fb:%u nack pli" RN
"a=rtcp-fb:%u goog-remb" RN
"a=mid:v" RN
"a=msid:video v" RN
"a=ssrc:%" PRIu32 " cname:ustreamer" RN
"a=extmap:1 http://www.webrtc.org/experiments/rtp-hdrext/playout-delay" RN
"a=extmap:2 urn:3gpp:video-orientation" RN

View File

@@ -1,5 +1,5 @@
[mypy]
python_version = 3.9
python_version = 3.14
ignore_missing_imports = true
disallow_untyped_defs = true
strict_optional = true

View File

@@ -3,7 +3,7 @@ envlist = cppcheck, flake8, pylint, mypy, vulture, htmlhint
skipsdist = true
[testenv]
basepython = python3.12
basepython = python3.14
changedir = /src
[testenv:cppcheck]

View File

@@ -1,6 +1,6 @@
.\" Manpage for ustreamer-dump.
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
.TH USTREAMER-DUMP 1 "version 6.17" "January 2021"
.TH USTREAMER-DUMP 1 "version 6.46" "January 2021"
.SH NAME
ustreamer-dump \- Dump uStreamer's memory sink to file

View File

@@ -1,6 +1,6 @@
.\" Manpage for ustreamer.
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
.TH USTREAMER 1 "version 6.17" "November 2020"
.TH USTREAMER 1 "version 6.46" "November 2020"
.SH NAME
ustreamer \- stream MJPEG video from any V4L2 device to the network
@@ -52,7 +52,7 @@ Initial image resolution. Default: 640x480.
.TP
.BR \-m\ \fIfmt ", " \-\-format\ \fIfmt
Image format.
Available: YUYV, YVYU, UYVY, RGB565, RGB24, JPEG; default: YUYV.
Available: YUYV, YVYU, UYVY, YUV420, YVU420, RGB565, RGB24, GREY, MJPEG, JPEG; default: YUYV.
.TP
.BR \-a\ \fIstd ", " \-\-tv\-standard\ \fIstd
Force TV standard.
@@ -253,6 +253,9 @@ Interval between keyframes. Default: 30.
.TP
.BR \-\-h264\-m2m\-device\ \fI/dev/path
Path to V4L2 mem-to-mem encoder device. Default: auto-select.
.TP
.BR \-\-h264\-boost\-device
Increase encoder performance on PiKVM V4. Default: disabled.
.SS "RAW sink options"
.TP
@@ -274,7 +277,10 @@ Timeout for lock. Default: 1.
.SS "Process options"
.TP
.BR \-\-exit\-on\-parent\-death
Exit the program if the parent process is dead. Required \fBHAS_PDEATHSIG\fR feature. Default: disabled.
Exit the program if the parent process is dead. Required \fBWITH_PDEATHSIG\fR feature. Default: disabled.
.TP
.BR \-\-exit\-on\-device\-error
Exit on any device error instead of polling until success. Default: disabled.
.TP
.BR \-\-exit\-on\-no\-clients \fIsec
Exit the program if there have been no stream or sink clients or any HTTP requests in the last N seconds. Default: 0 (disabled).

View File

@@ -3,7 +3,7 @@
pkgname=ustreamer
pkgver=6.17
pkgver=6.46
pkgrel=1
pkgdesc="Lightweight and fast MJPEG-HTTP streamer"
url="https://github.com/pikvm/ustreamer"
@@ -18,7 +18,7 @@ md5sums=(SKIP)
_options="WITH_GPIO=1 WITH_SYSTEMD=1"
if [ -e /usr/bin/python3 ]; then
_options="$_options WITH_PYTHON=1"
depends+=(python)
depends+=("python>=3.14" "python<3.15")
makedepends+=(python-setuptools python-pip python-build python-wheel)
fi
if [ -e /usr/include/janus/plugins/plugin.h ];then

View File

@@ -6,7 +6,7 @@
include $(TOPDIR)/rules.mk
PKG_NAME:=ustreamer
PKG_VERSION:=6.17
PKG_VERSION:=6.46
PKG_RELEASE:=1
PKG_MAINTAINER:=Maxim Devaev <mdevaev@gmail.com>
@@ -25,7 +25,7 @@ define Package/ustreamer
SECTION:=multimedia
CATEGORY:=Multimedia
TITLE:=uStreamer
DEPENDS:=+libpthread +libjpeg +libv4l +libbsd +libevent2 +libevent2-core +libevent2-extra +libevent2-pthreads
DEPENDS:=+libatomic +libpthread +libjpeg +libv4l +libbsd +libevent2 +libevent2-core +libevent2-extra +libevent2-pthreads
URL:=https://github.com/pikvm/ustreamer
endef

View File

@@ -1,5 +1,3 @@
-include ../config.mk
R_DESTDIR ?=
PREFIX ?= /usr/local
@@ -8,7 +6,7 @@ PY ?= python3
# =====
all: root
root: $(shell find src -type f,l)
root: $(shell find src -type f,l) setup.py
$(info == PY_BUILD ustreamer-*.so)
rm -rf root
$(ECHO) $(PY) -m build --skip-dependency-check --no-isolation

View File

@@ -5,19 +5,36 @@ from setuptools import setup
# =====
def _find_sources(suffix: str) -> list[str]:
def _find_sources() -> list[str]:
sources: list[str] = []
for (root_path, _, names) in os.walk("src"):
for name in names:
if name.endswith(suffix):
if name.endswith(".c"):
sources.append(os.path.join(root_path, name))
return sources
if __name__ == "__main__":
def _find_flags() -> dict[str, bool]:
return {
key[3:]: (value.strip().lower() in ["true", "on", "1"])
for (key, value) in sorted(os.environ.items())
if key.startswith("MK_WITH_")
}
def _make_d_features(flags: dict[str, bool]) -> str:
features = " ".join([
f"{key}={int(value)}"
for (key, value) in flags.items()
])
return f"-DUS_FEATURES=\"{features}\""
def main() -> None:
flags = _find_flags()
setup(
name="ustreamer",
version="6.17",
version="6.46",
description="uStreamer tools",
author="Maxim Devaev",
author_email="mdevaev@gmail.com",
@@ -26,9 +43,16 @@ if __name__ == "__main__":
Extension(
"ustreamer",
libraries=["rt", "m", "pthread"],
extra_compile_args=["-std=c17", "-D_GNU_SOURCE"],
extra_compile_args=[
"-std=c17", "-D_GNU_SOURCE",
_make_d_features(flags),
],
undef_macros=["NDEBUG"],
sources=_find_sources(".c"),
sources=_find_sources(),
),
],
)
if __name__ == "__main__":
main()

1
python/src/uslibs/const.h Symbolic link
View File

@@ -0,0 +1 @@
../../../src/libs/const.h

View File

@@ -13,6 +13,7 @@
#include <Python.h>
#include "uslibs/const.h"
#include "uslibs/types.h"
#include "uslibs/errors.h"
#include "uslibs/tools.h"
@@ -48,6 +49,8 @@ static void _MemsinkObject_destroy_internals(_MemsinkObject *self) {
}
static int _MemsinkObject_init(_MemsinkObject *self, PyObject *args, PyObject *kwargs) {
self->fd = -1;
self->lock_timeout = 1;
self->wait_timeout = 1;
@@ -228,7 +231,8 @@ static PyObject *_MemsinkObject_wait_frame(_MemsinkObject *self, PyObject *args,
} \
Py_DECREF(m_tmp); \
}
# define SET_NUMBER(x_key, x_from, x_to) SET_VALUE(#x_key, Py##x_to##_From##x_from(self->frame->x_key))
# define SET_NUMBER(x_key, x_from, x_to) \
SET_VALUE(#x_key, Py##x_to##_From##x_from(self->frame->x_key))
SET_NUMBER(width, Long, Long);
SET_NUMBER(height, Long, Long);
@@ -237,7 +241,8 @@ static PyObject *_MemsinkObject_wait_frame(_MemsinkObject *self, PyObject *args,
SET_NUMBER(online, Long, Bool);
SET_NUMBER(key, Long, Bool);
SET_NUMBER(gop, Long, Long);
SET_NUMBER(grab_ts, Double, Float);
SET_NUMBER(grab_begin_ts, Double, Float);
SET_NUMBER(grab_end_ts, Double, Float);
SET_NUMBER(encode_begin_ts, Double, Float);
SET_NUMBER(encode_end_ts, Double, Float);
SET_VALUE("data", PyBytes_FromStringAndSize((const char*)self->frame->data, self->frame->used));
@@ -275,7 +280,8 @@ static PyMethodDef _MemsinkObject_methods[] = {
};
static PyGetSetDef _MemsinkObject_getsets[] = {
# define ADD_GETTER(x_field) {.name = #x_field, .get = (getter)_MemsinkObject_getter_##x_field}
# define ADD_GETTER(x_field) \
{.name = #x_field, .get = (getter)_MemsinkObject_getter_##x_field}
ADD_GETTER(obj),
ADD_GETTER(lock_timeout),
ADD_GETTER(wait_timeout),
@@ -304,20 +310,30 @@ static PyModuleDef _Module = {
};
PyMODINIT_FUNC PyInit_ustreamer(void) {
PyObject *module = PyModule_Create(&_Module);
if (module == NULL) {
return NULL;
}
PyObject *module = NULL;
if (PyType_Ready(&_MemsinkType) < 0) {
return NULL;
goto error;
}
Py_INCREF(&_MemsinkType);
if (PyModule_AddObject(module, "Memsink", (PyObject*)&_MemsinkType) < 0) {
return NULL;
if ((module = PyModule_Create(&_Module)) == NULL) {
goto error;
}
# define ADD(x_what, x_key, x_value) \
{ if (PyModule_Add##x_what(module, x_key, x_value) < 0) { goto error; } }
ADD(StringConstant, "__version__", US_VERSION);
ADD(StringConstant, "VERSION", US_VERSION);
ADD(IntConstant, "VERSION_MAJOR", US_VERSION_MAJOR);
ADD(IntConstant, "VERSION_MINOR", US_VERSION_MINOR);
ADD(StringConstant, "FEATURES", US_FEATURES); // Defined in setup.py
ADD(ObjectRef, "Memsink", (PyObject*)&_MemsinkType);
# undef ADD
return module;
error:
if (module != NULL) {
Py_DECREF(module);
}
return NULL;
}

View File

@@ -2,6 +2,7 @@ R_DESTDIR ?=
PREFIX ?= /usr/local
CC ?= gcc
PKG_CONFIG ?= pkg-config
CFLAGS ?= -O3
LDFLAGS ?=
@@ -13,9 +14,9 @@ _V4P = ustreamer-v4p.bin
_CFLAGS = -MD -c -std=c17 -Wall -Wextra -D_GNU_SOURCE $(CFLAGS)
_USTR_LDFLAGS = $(LDFLAGS) -lm -ljpeg -pthread -lrt -latomic -levent -levent_pthreads
_DUMP_LDFLAGS = $(LDFLAGS) -lm -ljpeg -pthread -lrt -latomic
_V4P_LDFLAGS = $(LDFLAGS) -lm -ljpeg -pthread -lrt -latomic
_USTR_LDFLAGS = $(LDFLAGS) -lm -ljpeg -pthread -lrt -levent -levent_pthreads
_DUMP_LDFLAGS = $(LDFLAGS) -lm -ljpeg -pthread -lrt
_V4P_LDFLAGS = $(LDFLAGS) -lm -ljpeg -pthread -lrt
_USTR_SRCS = $(shell ls \
libs/*.c \
@@ -40,53 +41,59 @@ _V4P_SRCS = $(shell ls \
_BUILD = build
_TARGETS = $(_USTR) $(_DUMP)
_OBJS = $(_USTR_SRCS:%.c=$(_BUILD)/%.o) $(_DUMP_SRCS:%.c=$(_BUILD)/%.o)
define optbool
$(filter $(shell echo $(1) | tr A-Z a-z), yes on 1)
endef
# =====
ifneq ($(shell sh -c 'uname 2>/dev/null || echo Unknown'),FreeBSD)
override _USTR_LDFLAGS += -latomic
override _DUMP_LDFLAGS += -latomic
override _V4P_LDFLAGS += -latomic
endif
ifneq ($(MK_WITH_PYTHON),)
override _CFLAGS += -DMK_WITH_PYTHON
endif
ifneq ($(call optbool,$(WITH_GPIO)),)
override _CFLAGS += -DWITH_GPIO $(shell pkg-config --atleast-version=2 libgpiod 2> /dev/null && echo -DHAVE_GPIOD2)
ifneq ($(MK_WITH_JANUS),)
override _CFLAGS += -DMK_WITH_JANUS
endif
ifneq ($(MK_WITH_GPIO),)
override _CFLAGS += -DMK_WITH_GPIO -DWITH_GPIO $(shell $(PKG_CONFIG) --atleast-version=2 libgpiod 2> /dev/null && echo -DHAVE_GPIOD2)
override _USTR_LDFLAGS += -lgpiod
override _USTR_SRCS += $(shell ls ustreamer/gpio/*.c)
endif
ifneq ($(call optbool,$(WITH_SYSTEMD)),)
override _CFLAGS += -DWITH_SYSTEMD
ifneq ($(MK_WITH_SYSTEMD),)
override _CFLAGS += -DMK_WITH_SYSTEMD -DWITH_SYSTEMD
override _USTR_LDFLAGS += -lsystemd
override _USTR_SRCS += $(shell ls ustreamer/http/systemd/*.c)
endif
WITH_PTHREAD_NP ?= 1
ifneq ($(call optbool,$(WITH_PTHREAD_NP)),)
override _CFLAGS += -DWITH_PTHREAD_NP
ifneq ($(MK_WITH_PTHREAD_NP),)
override _CFLAGS += -DMK_WITH_PTHREAD_NP -DWITH_PTHREAD_NP
endif
WITH_SETPROCTITLE ?= 1
ifneq ($(call optbool,$(WITH_SETPROCTITLE)),)
override _CFLAGS += -DWITH_SETPROCTITLE
ifneq ($(MK_WITH_SETPROCTITLE),)
override _CFLAGS += -DMK_WITH_SETPROCTITLE -DWITH_SETPROCTITLE
ifeq ($(shell uname -s | tr A-Z a-z),linux)
override _USTR_LDFLAGS += -lbsd
endif
endif
ifneq ($(MK_WITH_PDEATHSIG),)
override _CFLAGS += -DMK_WITH_PDEATHSIG -DWITH_PDEATHSIG
endif
WITH_V4P ?= 0
ifneq ($(call optbool,$(WITH_V4P)),)
ifneq ($(MK_WITH_V4P),)
override _TARGETS += $(_V4P)
override _OBJS += $(_V4P_SRCS:%.c=$(_BUILD)/%.o)
override _CFLAGS += -DWITH_V4P $(shell pkg-config --cflags libdrm)
override _V4P_LDFLAGS += $(shell pkg-config --libs libdrm)
override _CFLAGS += -DMK_WITH_V4P -DWITH_V4P $(shell $(PKG_CONFIG) --cflags libdrm)
override _V4P_LDFLAGS += $(shell $(PKG_CONFIG) --libs libdrm)
override _USTR_SRCS += $(shell ls libs/drm/*.c)
override _USTR_LDFLAGS += $(shell pkg-config --libs libdrm)
override _USTR_LDFLAGS += $(shell $(PKG_CONFIG) --libs libdrm)
endif

View File

@@ -53,11 +53,13 @@ void us_output_file_write(void *v_output, const us_frame_s *frame) {
fprintf(output->fp,
"{\"size\": %zu, \"width\": %u, \"height\": %u,"
" \"format\": %u, \"stride\": %u, \"online\": %u, \"key\": %u, \"gop\": %u,"
" \"grab_ts\": %.3Lf, \"encode_begin_ts\": %.3Lf, \"encode_end_ts\": %.3Lf,"
" \"grab_begin_ts\": %.3Lf, \"grab_end_ts\": %.3Lf,"
" \"encode_begin_ts\": %.3Lf, \"encode_end_ts\": %.3Lf,"
" \"data\": \"%s\"}\n",
frame->used, frame->width, frame->height,
frame->format, frame->stride, frame->online, frame->key, frame->gop,
frame->grab_ts, frame->encode_begin_ts, frame->encode_end_ts,
frame->grab_begin_ts, frame->grab_end_ts,
frame->encode_begin_ts, frame->encode_end_ts,
output->base64_data);
} else {
fwrite(frame->data, 1, frame->used, output->fp);

View File

@@ -240,16 +240,22 @@ static int _dump_sink(
const long double now = us_get_now_monotonic();
char fourcc_str[8];
US_LOG_VERBOSE("Frame: %s - %ux%u -- online=%d, key=%d, kr=%d, gop=%u, latency=%.3Lf, backlog=%.3Lf, size=%zu",
US_LOG_VERBOSE("%s %.3Lf - %s %ux%u - gop=%u, key=%u, kr=%u - GRAB=%.3Lf ~~%.3Lf~~ ENC=%.3Lf ~~> LAT=%.3Lf - size=%zu",
(frame->online ? " ON" : "OFF"),
(last_ts ? now - last_ts : 0),
us_fourcc_to_string(frame->format, fourcc_str, 8),
frame->width, frame->height,
frame->online, frame->key, key_requested, frame->gop,
now - frame->grab_ts, (last_ts ? now - last_ts : 0),
frame->width,
frame->height,
frame->gop,
frame->key,
key_requested,
frame->grab_end_ts - frame->grab_begin_ts,
frame->encode_begin_ts - frame->grab_end_ts,
frame->encode_end_ts - frame->encode_begin_ts,
now - frame->grab_begin_ts,
frame->used);
last_ts = now;
US_LOG_DEBUG(" stride=%u, grab_ts=%.3Lf, encode_begin_ts=%.3Lf, encode_end_ts=%.3Lf",
frame->stride, frame->grab_ts, frame->encode_begin_ts, frame->encode_end_ts);
last_ts = now;
us_fpsi_update(fpsi, true, NULL);

View File

@@ -48,6 +48,7 @@
#include "threading.h"
#include "frame.h"
#include "xioctl.h"
#include "tc358743.h"
static const struct {
@@ -67,6 +68,9 @@ static const struct {
{"YUYV", V4L2_PIX_FMT_YUYV},
{"YVYU", V4L2_PIX_FMT_YVYU},
{"UYVY", V4L2_PIX_FMT_UYVY},
{"YUV420", V4L2_PIX_FMT_YUV420},
{"YVU420", V4L2_PIX_FMT_YVU420},
{"GREY", V4L2_PIX_FMT_GREY},
{"RGB565", V4L2_PIX_FMT_RGB565},
{"RGB24", V4L2_PIX_FMT_RGB24},
{"BGR24", V4L2_PIX_FMT_BGR24},
@@ -190,12 +194,18 @@ int us_capture_open(us_capture_s *cap) {
_LOG_DEBUG("Capture device fd=%d opened", run->fd);
if (cap->dv_timings && cap->persistent) {
struct v4l2_control ctl = {.id = V4L2_CID_DV_RX_POWER_PRESENT};
if (!us_xioctl(run->fd, VIDIOC_G_CTRL, &ctl)) {
if (!ctl.value) {
goto error_no_cable;
}
}
_LOG_DEBUG("Probing DV-timings or QuerySTD ...");
if (_capture_open_dv_timings(cap, false) < 0) {
US_ONCE_FOR(run->open_error_once, __LINE__, {
_LOG_ERROR("No signal from source");
});
goto error_no_signal;
switch (_capture_open_dv_timings(cap, false)) {
case 0: break;
case US_ERROR_NO_SIGNAL: goto error_no_signal;
case US_ERROR_NO_SYNC: goto error_no_sync;
default: goto error;
}
}
@@ -213,6 +223,15 @@ int us_capture_open(us_capture_s *cap) {
if (_capture_open_format(cap, true) < 0) {
goto error;
}
if (cap->dv_timings && cap->persistent) {
struct v4l2_control ctl = {.id = TC358743_CID_LANES_ENOUGH};
if (!us_xioctl(run->fd, VIDIOC_G_CTRL, &ctl)) {
if (!ctl.value) {
_LOG_ERROR("Not enough lanes, hardware can't handle this signal");
goto error_no_lanes;
}
}
}
_capture_open_hw_fps(cap);
_capture_open_jpeg_quality(cap);
if (_capture_open_io_method(cap) < 0) {
@@ -245,9 +264,23 @@ error_no_device:
us_capture_close(cap);
return US_ERROR_NO_DEVICE;
error_no_signal:
error_no_cable:
us_capture_close(cap);
return US_ERROR_NO_DATA;
return US_ERROR_NO_CABLE;
error_no_signal:
US_ONCE_FOR(run->open_error_once, __LINE__, { _LOG_ERROR("No signal from source"); });
us_capture_close(cap);
return US_ERROR_NO_SIGNAL;
error_no_sync:
US_ONCE_FOR(run->open_error_once, __LINE__, { _LOG_ERROR("No sync on signal"); });
us_capture_close(cap);
return US_ERROR_NO_SYNC;
error_no_lanes:
us_capture_close(cap);
return US_ERROR_NO_LANES;
error:
run->open_error_once = 0;
@@ -414,10 +447,16 @@ int us_capture_hwbuf_grab(us_capture_s *cap, us_capture_hwbuf_s **hw) {
(*hw)->raw.stride = run->stride;
(*hw)->raw.online = true;
_v4l2_buffer_copy(&buf, &(*hw)->buf);
(*hw)->raw.grab_ts = (ldf)((buf.timestamp.tv_sec * (u64)1000) + (buf.timestamp.tv_usec / 1000)) / 1000;
(*hw)->raw.grab_begin_ts = (ldf)((buf.timestamp.tv_sec * (u64)1000) + (buf.timestamp.tv_usec / 1000)) / 1000;
(*hw)->raw.grab_end_ts = us_get_now_monotonic();
_LOG_DEBUG("Grabbed HW buffer=%u: bytesused=%u, grab_begin_ts=%.3Lf, grab_end_ts=%.3Lf, latency=%.3Lf, skipped=%u",
buf.index, buf.bytesused,
(*hw)->raw.grab_begin_ts,
(*hw)->raw.grab_end_ts,
(*hw)->raw.grab_end_ts - (*hw)->raw.grab_begin_ts,
skipped);
_LOG_DEBUG("Grabbed HW buffer=%u: bytesused=%u, grab_ts=%.3Lf, latency=%.3Lf, skipped=%u",
buf.index, buf.bytesused, (*hw)->raw.grab_ts, us_get_now_monotonic() - (*hw)->raw.grab_ts, skipped);
return buf.index;
}
@@ -536,19 +575,28 @@ bool _capture_is_buffer_valid(const us_capture_s *cap, const struct v4l2_buffer
if (us_is_jpeg(cap->run->format)) {
if (buf->bytesused < 125) {
// https://stackoverflow.com/questions/2253404/what-is-the-smallest-valid-jpeg-file-size-in-bytes
_LOG_DEBUG("Discarding invalid frame, too small to be a valid JPEG: bytesused=%u", buf->bytesused);
_LOG_DEBUG("Discarding invalid frame, too small to be a valid JPEG: bytesused=%u",
buf->bytesused);
return false;
}
const u8 *const end_ptr = data + buf->bytesused;
const u8 *const eoi_ptr = end_ptr - 2;
const u16 eoi_marker = (((u16)(eoi_ptr[0]) << 8) | eoi_ptr[1]);
if (eoi_marker != 0xFFD9 && eoi_marker != 0xD900 && eoi_marker != 0x0000) {
const u16 begin_marker = (((u16)(data[0]) << 8) | data[1]);
if (begin_marker != 0xFFD8) {
_LOG_DEBUG("Discarding JPEG frame with invalid header: begin_marker=0x%04x, bytesused=%u",
begin_marker, buf->bytesused);
return false;
}
const u8 *const end_ptr = data + buf->bytesused - 2;
const u16 end_marker = (((u16)(end_ptr[0]) << 8) | end_ptr[1]);
if (end_marker != 0xFFD9 && end_marker != 0xD900 && end_marker != 0x0000) {
if (!cap->allow_truncated_frames) {
_LOG_DEBUG("Discarding truncated JPEG frame: eoi_marker=0x%04x, bytesused=%u", eoi_marker, buf->bytesused);
_LOG_DEBUG("Discarding truncated JPEG frame: end_marker=0x%04x, bytesused=%u",
end_marker, buf->bytesused);
return false;
}
_LOG_DEBUG("Got truncated JPEG frame: eoi_marker=0x%04x, bytesused=%u", eoi_marker, buf->bytesused);
_LOG_DEBUG("Got truncated JPEG frame: end_marker=0x%04x, bytesused=%u",
end_marker, buf->bytesused);
}
}
@@ -617,6 +665,10 @@ static int _capture_open_dv_timings(us_capture_s *cap, bool apply) {
// TC358743 errors here (see in the kernel: drivers/media/i2c/tc358743.c):
// - ENOLINK: No valid signal (SYS_STATUS & MASK_S_TMDS)
// - ENOLCK: No sync on signal (SYS_STATUS & MASK_S_SYNC)
switch (errno) {
case ENOLINK: return US_ERROR_NO_SIGNAL;
case ENOLCK: return US_ERROR_NO_SYNC;
}
dv_errno = errno;
goto querystd;
} else if (!apply) {
@@ -779,10 +831,8 @@ static int _capture_open_format(us_capture_s *cap, bool first) {
return 0;
}
static void _capture_open_hw_fps(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
run->hw_fps = 0;
static void _capture_open_hw_fps(us_capture_s *cap) { // cppcheck-suppress constParameterPointer
const us_capture_runtime_s *const run = cap->run;
struct v4l2_streamparm setfps = {.type = run->capture_type};
_LOG_DEBUG("Querying HW FPS ...");
@@ -805,7 +855,7 @@ static void _capture_open_hw_fps(us_capture_s *cap) {
US_MEMSET_ZERO(setfps);
setfps.type = run->capture_type;
SETFPS_TPF(numerator) = 1;
SETFPS_TPF(denominator) = (cap->desired_fps == 0 ? 255 : cap->desired_fps);
SETFPS_TPF(denominator) = -1; // Request maximum possible FPS
if (us_xioctl(run->fd, VIDIOC_S_PARM, &setfps) < 0) {
_LOG_PERROR("Can't set HW FPS");
@@ -822,12 +872,7 @@ static void _capture_open_hw_fps(us_capture_s *cap) {
return;
}
run->hw_fps = SETFPS_TPF(denominator);
if (cap->desired_fps != run->hw_fps) {
_LOG_INFO("Using HW FPS: %u -> %u (coerced)", cap->desired_fps, run->hw_fps);
} else {
_LOG_INFO("Using HW FPS: %u", run->hw_fps);
}
_LOG_INFO("Using HW FPS: %u/%u", SETFPS_TPF(numerator), SETFPS_TPF(denominator));
# undef SETFPS_TPF
}

View File

@@ -39,7 +39,7 @@
#define US_VIDEO_MAX_FPS ((uint)120)
#define US_STANDARDS_STR "PAL, NTSC, SECAM"
#define US_FORMATS_STR "YUYV, YVYU, UYVY, RGB565, RGB24, BGR24, MJPEG, JPEG"
#define US_FORMATS_STR "YUYV, YVYU, UYVY, YUV420, YVU420, RGB565, RGB24, BGR24, GREY, MJPEG, JPEG"
#define US_IO_METHODS_STR "MMAP, USERPTR"
@@ -58,7 +58,6 @@ typedef struct {
uint format;
uint stride;
float hz;
uint hw_fps;
uint jpeg_quality;
uz raw_size;
uint n_bufs;
@@ -113,7 +112,6 @@ typedef struct {
uint n_bufs;
bool dma_export;
bool dma_required;
uint desired_fps;
uz min_frame_size;
bool allow_truncated_frames;
bool persistent;

View File

@@ -26,7 +26,7 @@
#define US_VERSION_MAJOR 6
#define US_VERSION_MINOR 17
#define US_VERSION_MINOR 46
#define US_MAKE_VERSION2(_major, _minor) #_major "." #_minor
#define US_MAKE_VERSION1(_major, _minor) US_MAKE_VERSION2(_major, _minor)

View File

@@ -28,7 +28,9 @@
#include <sys/mman.h>
#include <sys/stat.h>
#include <sys/sysmacros.h>
#ifdef __linux__
# include <sys/sysmacros.h>
#endif
#include <linux/videodev2.h>
@@ -376,7 +378,7 @@ int us_drm_expose_stub(us_drm_s *drm, us_drm_stub_e stub, const us_capture_s *ca
DRAW_MSG("=== PiKVM ===\n \n< UNSUPPORTED CAPTURE FORMAT >");
break;
case US_DRM_STUB_NO_SIGNAL:
DRAW_MSG("=== PiKVM ===\n \n< NO SIGNAL >");
DRAW_MSG("=== PiKVM ===\n \n< NO LIVE VIDEO >");
break;
case US_DRM_STUB_BUSY:
DRAW_MSG("=== PiKVM ===\n \n< ONLINE IS ACTIVE >");
@@ -664,6 +666,15 @@ static drmModeModeInfo *_find_best_mode(drmModeConnector *conn, uint width, uint
continue; // Discard interlaced
}
const float mode_hz = _get_refresh_rate(mode);
if (width == 640 && height == 416 && mode->hdisplay == 640 && mode->vdisplay == 480) {
// A special case for some ancient DOS device with VGA converter.
// @CapnKirk in Discord
if (hz > 0 && mode_hz < hz) {
best = mode;
best->vdisplay = 416;
break;
}
}
if (mode->hdisplay == width && mode->vdisplay == height) {
best = mode; // Any mode with exact resolution
if (hz > 0 && mode_hz == hz) {

View File

@@ -24,4 +24,8 @@
#define US_ERROR_COMMON -1
#define US_ERROR_NO_DEVICE -2
#define US_ERROR_NO_DATA -3
#define US_ERROR_NO_CABLE -3
#define US_ERROR_NO_SIGNAL -4
#define US_ERROR_NO_SYNC -5
#define US_ERROR_NO_LANES -6
#define US_ERROR_NO_DATA -7

View File

@@ -85,8 +85,6 @@ void us_fpsi_update(us_fpsi_s *fpsi, bool bump, const us_fpsi_meta_s *meta) {
uint us_fpsi_get(us_fpsi_s *fpsi, us_fpsi_meta_s *meta) {
if (meta != NULL) {
assert(fpsi->with_meta);
} else {
assert(!fpsi->with_meta);
}
// Между чтением инфы и времени может быть гонка,
@@ -97,8 +95,7 @@ uint us_fpsi_get(us_fpsi_s *fpsi, us_fpsi_meta_s *meta) {
const ull state = atomic_load(&fpsi->state); // Потом инфа
uint current = state & 0xFFFF;
if (fpsi->with_meta) {
assert(meta != NULL);
if (fpsi->with_meta && meta != NULL) {
meta->width = (state >> 16) & 0xFFFF;
meta->height = (state >> 32) & 0xFFFF;
meta->online = (state >> 48) & 1;

View File

@@ -36,7 +36,7 @@
us_frame_s *us_frame_init(void) {
us_frame_s *frame;
US_CALLOC(frame, 1);
us_frame_realloc_data(frame, 512 * 1024);
us_frame_realloc_data(frame, 32 * 1024);
frame->dma_fd = -1;
return frame;
}
@@ -82,16 +82,32 @@ bool us_frame_compare(const us_frame_s *a, const us_frame_s *b) {
uint us_frame_get_padding(const us_frame_s *frame) {
uint bytes_per_pixel = 0;
switch (frame->format) {
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_YVU420:
case V4L2_PIX_FMT_GREY:
bytes_per_pixel = 1;
break;
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVYU:
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_RGB565: bytes_per_pixel = 2; break;
case V4L2_PIX_FMT_RGB565:
bytes_per_pixel = 2;
break;
case V4L2_PIX_FMT_BGR24:
case V4L2_PIX_FMT_RGB24: bytes_per_pixel = 3; break;
case V4L2_PIX_FMT_RGB24:
bytes_per_pixel = 3;
break;
// case V4L2_PIX_FMT_H264:
case V4L2_PIX_FMT_MJPEG:
case V4L2_PIX_FMT_JPEG: bytes_per_pixel = 0; break;
default: assert(0 && "Unknown format");
case V4L2_PIX_FMT_JPEG:
bytes_per_pixel = 0;
break;
default:
assert(0 && "Unknown format");
}
if (bytes_per_pixel > 0 && frame->stride > frame->width) {
return (frame->stride - frame->width * bytes_per_pixel);

View File

@@ -38,7 +38,8 @@
bool key; \
uint gop; \
\
ldf grab_ts; \
ldf grab_begin_ts; \
ldf grab_end_ts; \
ldf encode_begin_ts; \
ldf encode_end_ts;
@@ -62,7 +63,8 @@ typedef struct {
(x_dest)->key = (x_src)->key; \
(x_dest)->gop = (x_src)->gop; \
\
(x_dest)->grab_ts = (x_src)->grab_ts; \
(x_dest)->grab_begin_ts = (x_src)->grab_begin_ts; \
(x_dest)->grab_end_ts = (x_src)->grab_end_ts; \
(x_dest)->encode_begin_ts = (x_src)->encode_begin_ts; \
(x_dest)->encode_end_ts = (x_src)->encode_end_ts; \
}

View File

@@ -121,9 +121,16 @@ void us_frametext_draw(us_frametext_s *ft, const char *text, uint width, uint he
if (block_width == 0 || block_height == 0) {
goto empty;
}
uint scale_x = frame->width / block_width / 2;
uint scale_y = frame->height / block_height / 3;
if (scale_x < scale_y / 1.5) {
// Ширина текста должна быть от 75%, до половины экрана, в зависимости от длины
const float div_x = US_MAX(US_MIN((100 / block_width * 2), 2.0), 1.5);
// Высоту тоже отрегулировать как-нибудь
const float div_y = US_MAX(US_MIN((70 / block_height * 2), 2.0), 1.5);
uint scale_x = frame->width / block_width / div_x;
uint scale_y = frame->height / block_height / div_y;
if (scale_x < scale_y / 1.5) { // Keep proportions
scale_y = scale_x * 1.5;
} else if (scale_y < scale_x * 1.5) {
scale_x = scale_y / 1.5;

View File

@@ -25,14 +25,8 @@
#include <signal.h>
#include <unistd.h>
#if defined(__linux__)
# define HAS_PDEATHSIG
#elif defined(__FreeBSD__)
#if defined(__FreeBSD__)
# include <sys/param.h>
# if __FreeBSD_version >= 1102000
# define HAS_PDEATHSIG
# endif
#endif
@@ -49,20 +43,22 @@
# error setproctitle() not implemented, you can disable it using WITH_SETPROCTITLE=0
# endif
#endif
#ifdef HAS_PDEATHSIG
#ifdef WITH_PDEATHSIG
# if defined(__linux__)
# include <sys/prctl.h>
# elif defined(__FreeBSD__)
# elif defined(__FreeBSD__) && (__FreeBSD_version >= 1102000)
# include <sys/procctl.h>
# else
# error WITH_PDEATHSIG is not supported on your system
# endif
#endif
#include "types.h"
#ifdef WITH_SETPROCTITLE
# include "tools.h"
#endif
#ifdef HAS_PDEATHSIG
# include "logging.h"
#endif
#include "logging.h"
#ifdef WITH_SETPROCTITLE
@@ -70,7 +66,7 @@ extern char **environ;
#endif
#ifdef HAS_PDEATHSIG
#ifdef WITH_PDEATHSIG
INLINE int us_process_track_parent_death(void) {
const pid_t parent = getppid();
int signum = SIGTERM;

View File

@@ -33,17 +33,6 @@
#include "xioctl.h"
#ifndef V4L2_CID_USER_TC358743_BASE
# define V4L2_CID_USER_TC358743_BASE (V4L2_CID_USER_BASE + 0x1080)
#endif
#ifndef TC358743_CID_AUDIO_PRESENT
# define TC358743_CID_AUDIO_PRESENT (V4L2_CID_USER_TC358743_BASE + 1)
#endif
#ifndef TC358743_CID_AUDIO_SAMPLING_RATE
# define TC358743_CID_AUDIO_SAMPLING_RATE (V4L2_CID_USER_TC358743_BASE + 0)
#endif
int us_tc358743_xioctl_get_audio_hz(int fd, uint *audio_hz) {
*audio_hz = 0;

View File

@@ -22,7 +22,26 @@
#pragma once
#include <linux/v4l2-controls.h>
#include "types.h"
#ifndef V4L2_CID_USER_TC358743_BASE
# define V4L2_CID_USER_TC358743_BASE (V4L2_CID_USER_BASE + 0x1080)
#endif
#ifndef TC358743_CID_AUDIO_SAMPLING_RATE
# define TC358743_CID_AUDIO_SAMPLING_RATE (V4L2_CID_USER_TC358743_BASE + 0)
#endif
#ifndef TC358743_CID_AUDIO_PRESENT
# define TC358743_CID_AUDIO_PRESENT (V4L2_CID_USER_TC358743_BASE + 1)
#endif
#ifndef TC358743_CID_LANES_ENOUGH
# define TC358743_CID_LANES_ENOUGH (V4L2_CID_USER_TC358743_BASE + 2)
#endif
int us_tc358743_xioctl_get_audio_hz(int fd, uint *audio_hz);

View File

@@ -36,7 +36,7 @@ us_blank_s *us_blank_init(void) {
blank->ft = us_frametext_init();
blank->raw = blank->ft->frame;
blank->jpeg = us_frame_init();
us_blank_draw(blank, "< NO SIGNAL >", 640, 480);
us_blank_draw(blank, "< NO LIVE VIDEO >", 640, 480);
return blank;
}

View File

@@ -131,7 +131,7 @@ void us_encoder_open(us_encoder_s *enc, us_capture_s *cap) {
} else {
US_LOG_INFO("Switching to CPU encoder: the input format is not (M)JPEG ...");
type = US_ENCODER_TYPE_CPU;
quality = cap->jpeg_quality;
quality = cap->jpeg_quality; // cppcheck-suppress redundantAssignment
}
} else if (type == US_ENCODER_TYPE_M2M_VIDEO || type == US_ENCODER_TYPE_M2M_IMAGE) {
@@ -162,14 +162,8 @@ void us_encoder_open(us_encoder_s *enc, us_capture_s *cap) {
run->quality = quality;
US_MUTEX_UNLOCK(run->mutex);
const ldf desired_interval = (
cap->desired_fps > 0 && (cap->desired_fps < cap->run->hw_fps || cap->run->hw_fps == 0)
? (ldf)1 / cap->desired_fps
: 0
);
enc->run->pool = us_workers_pool_init(
"JPEG", "jw", n_workers, desired_interval,
"JPEG", "jw", n_workers,
_worker_job_init, (void*)enc,
_worker_job_destroy,
_worker_run_job);

View File

@@ -38,6 +38,8 @@ typedef struct {
static void _jpeg_set_dest_frame(j_compress_ptr jpeg, us_frame_s *frame);
static void _jpeg_write_scanlines_yuv(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_yuv_planar(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_grey(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_rgb24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
#ifndef JCS_EXTENSIONS
@@ -50,7 +52,7 @@ static boolean _jpeg_empty_output_buffer(j_compress_ptr jpeg);
static void _jpeg_term_destination(j_compress_ptr jpeg);
void us_cpu_encoder_compress(const us_frame_s *src, us_frame_s *dest, unsigned quality) {
void us_cpu_encoder_compress(const us_frame_s *src, us_frame_s *dest, uint quality) {
// This function based on compress_image_to_jpeg() from mjpg-streamer
us_frame_encoding_begin(src, dest, V4L2_PIX_FMT_JPEG);
@@ -69,11 +71,23 @@ void us_cpu_encoder_compress(const us_frame_s *src, us_frame_s *dest, unsigned q
switch (src->format) {
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVYU:
case V4L2_PIX_FMT_UYVY: jpeg.in_color_space = JCS_YCbCr; break;
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_YVU420:
jpeg.in_color_space = JCS_YCbCr;
break;
case V4L2_PIX_FMT_GREY:
jpeg.input_components = 1;
jpeg.in_color_space = JCS_GRAYSCALE;
break;
# ifdef JCS_EXTENSIONS
case V4L2_PIX_FMT_BGR24: jpeg.in_color_space = JCS_EXT_BGR; break;
case V4L2_PIX_FMT_BGR24:
jpeg.in_color_space = JCS_EXT_BGR;
break;
# endif
default: jpeg.in_color_space = JCS_RGB; break;
default:
jpeg.in_color_space = JCS_RGB;
break;
}
jpeg_set_defaults(&jpeg);
@@ -85,9 +99,27 @@ void us_cpu_encoder_compress(const us_frame_s *src, us_frame_s *dest, unsigned q
// https://www.fourcc.org/yuv.php
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVYU:
case V4L2_PIX_FMT_UYVY: _jpeg_write_scanlines_yuv(&jpeg, src); break;
case V4L2_PIX_FMT_RGB565: _jpeg_write_scanlines_rgb565(&jpeg, src); break;
case V4L2_PIX_FMT_RGB24: _jpeg_write_scanlines_rgb24(&jpeg, src); break;
case V4L2_PIX_FMT_UYVY:
_jpeg_write_scanlines_yuv(&jpeg, src);
break;
case V4L2_PIX_FMT_YUV420:
case V4L2_PIX_FMT_YVU420:
_jpeg_write_scanlines_yuv_planar(&jpeg, src);
break;
case V4L2_PIX_FMT_GREY:
_jpeg_write_scanlines_grey(&jpeg, src);
break;
case V4L2_PIX_FMT_RGB565:
_jpeg_write_scanlines_rgb565(&jpeg, src);
break;
case V4L2_PIX_FMT_RGB24:
_jpeg_write_scanlines_rgb24(&jpeg, src);
break;
case V4L2_PIX_FMT_BGR24:
# ifdef JCS_EXTENSIONS
_jpeg_write_scanlines_rgb24(&jpeg, src); // Use native JCS_EXT_BGR
@@ -121,19 +153,19 @@ static void _jpeg_set_dest_frame(j_compress_ptr jpeg, us_frame_s *frame) {
}
static void _jpeg_write_scanlines_yuv(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
uint8_t *line_buf;
u8 *line_buf;
US_CALLOC(line_buf, frame->width * 3);
const unsigned padding = us_frame_get_padding(frame);
const uint8_t *data = frame->data;
const uint padding = us_frame_get_padding(frame);
const u8 *data = frame->data;
while (jpeg->next_scanline < frame->height) {
uint8_t *ptr = line_buf;
u8 *ptr = line_buf;
for (unsigned x = 0; x < frame->width; ++x) {
for (uint x = 0; x < frame->width; ++x) {
// See also: https://www.kernel.org/doc/html/v4.8/media/uapi/v4l/pixfmt-uyvy.html
const bool is_odd_pixel = x & 1;
uint8_t y, u, v;
u8 y, u, v;
if (frame->format == V4L2_PIX_FMT_YUYV) {
y = data[is_odd_pixel ? 2 : 0];
u = data[1];
@@ -167,21 +199,104 @@ static void _jpeg_write_scanlines_yuv(struct jpeg_compress_struct *jpeg, const u
free(line_buf);
}
static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
uint8_t *line_buf;
static void _jpeg_write_scanlines_yuv_planar(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
u8 *line_buf;
US_CALLOC(line_buf, frame->width * 3);
const unsigned padding = us_frame_get_padding(frame);
const uint8_t *data = frame->data;
const uint padding = us_frame_get_padding(frame);
const uint image_size = frame->width * frame->height;
const uint chroma_array_size = (frame->used - image_size) / 2;
const uint chroma_matrix_order = (image_size / chroma_array_size) == 16 ? 4 : 2;
const u8 *data = frame->data;
const u8 *chroma1_data = frame->data + image_size;
const u8 *chroma2_data = frame->data + image_size + chroma_array_size;
//US_LOG_DEBUG("Planar data: Image Size %u, Chroma Array Size %u, Chroma Matrix Order %u",
// image_size, chroma_array_size, chroma_matrix_order);
while (jpeg->next_scanline < frame->height) {
uint8_t *ptr = line_buf;
u8 *ptr = line_buf;
for (unsigned x = 0; x < frame->width; ++x) {
const unsigned int two_byte = (data[1] << 8) + data[0];
for (uint x = 0; x < frame->width; ++x) {
// See also: https://www.kernel.org/doc/html/v4.8/media/uapi/v4l/pixfmt-yuv420.html
u8 y = data[x];
u8 u;
u8 v;
uint chroma_position = x / chroma_matrix_order;
switch (frame->format) {
case V4L2_PIX_FMT_YUV420:
u = chroma1_data[chroma_position];
v = chroma2_data[chroma_position];
break;
case V4L2_PIX_FMT_YVU420:
u = chroma2_data[chroma_position];
v = chroma1_data[chroma_position];
break;
default:
assert(0 && "Unsupported pixel format");
return; // Makes linter happy
}
ptr[0] = y;
ptr[1] = u;
ptr[2] = v;
ptr += 3;
}
data += frame->width + padding;
if (jpeg->next_scanline > 0 && jpeg->next_scanline % chroma_matrix_order == 0) {
chroma1_data += (frame->width + padding) / chroma_matrix_order;
chroma2_data += (frame->width + padding) / chroma_matrix_order;
}
JSAMPROW scanlines[1] = {line_buf};
jpeg_write_scanlines(jpeg, scanlines, 1);
}
free(line_buf);
}
static void _jpeg_write_scanlines_grey(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
u8 *line_buf;
US_CALLOC(line_buf, frame->width);
const uint padding = us_frame_get_padding(frame);
const u8 *data = frame->data;
while (jpeg->next_scanline < frame->height) {
u8 *ptr = line_buf;
for (uint x = 0; x < frame->width; ++x) {
ptr[0] = data[x];
ptr += 1;
}
data += frame->width + padding;
JSAMPROW scanlines[1] = {line_buf};
jpeg_write_scanlines(jpeg, scanlines, 1);
}
free(line_buf);
}
static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
u8 *line_buf;
US_CALLOC(line_buf, frame->width * 3);
const uint padding = us_frame_get_padding(frame);
const u8 *data = frame->data;
while (jpeg->next_scanline < frame->height) {
u8 *ptr = line_buf;
for (uint x = 0; x < frame->width; ++x) {
const uint two_byte = (data[1] << 8) + data[0];
ptr[0] = data[1] & 248; // Red
ptr[1] = (uint8_t)((two_byte & 2016) >> 3); // Green
ptr[1] = (u8)((two_byte & 2016) >> 3); // Green
ptr[2] = (data[0] & 31) * 8; // Blue
ptr += 3;
@@ -197,8 +312,8 @@ static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, cons
}
static void _jpeg_write_scanlines_rgb24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
const unsigned padding = us_frame_get_padding(frame);
uint8_t *data = frame->data;
const uint padding = us_frame_get_padding(frame);
u8 *data = frame->data;
while (jpeg->next_scanline < frame->height) {
JSAMPROW scanlines[1] = {data};
@@ -210,17 +325,17 @@ static void _jpeg_write_scanlines_rgb24(struct jpeg_compress_struct *jpeg, const
#ifndef JCS_EXTENSIONS
static void _jpeg_write_scanlines_bgr24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
uint8_t *line_buf;
u8 *line_buf;
US_CALLOC(line_buf, frame->width * 3);
const unsigned padding = us_frame_get_padding(frame);
uint8_t *data = frame->data;
const uint padding = us_frame_get_padding(frame);
u8 *data = frame->data;
while (jpeg->next_scanline < frame->height) {
uint8_t *ptr = line_buf;
u8 *ptr = line_buf;
// swap B and R values
for (unsigned x = 0; x < frame->width * 3; x += 3) {
for (uint x = 0; x < frame->width * 3; x += 3) {
ptr[0] = data[x + 2];
ptr[1] = data[x + 1];
ptr[2] = data[x];

View File

@@ -35,4 +35,4 @@
#include "../../../libs/frame.h"
void us_cpu_encoder_compress(const us_frame_s *src, us_frame_s *dest, unsigned quality);
void us_cpu_encoder_compress(const us_frame_s *src, us_frame_s *dest, uint quality);

View File

@@ -1,65 +0,0 @@
/*****************************************************************************
# #
# uStreamer - Lightweight and fast MJPEG-HTTP streamer. #
# #
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
# #
*****************************************************************************/
#include "bev.h"
#include <string.h>
#include <errno.h>
#include <event2/util.h>
#include <event2/bufferevent.h>
#include "../../libs/tools.h"
char *us_bufferevent_format_reason(short what) {
char *reason;
US_CALLOC(reason, 2048);
// evutil_socket_error_to_string() is not thread-safe
char *const perror_str = us_errno_to_string(EVUTIL_SOCKET_ERROR());
bool first = true;
strncat(reason, perror_str, 1023);
free(perror_str);
strcat(reason, " (");
# define FILL_REASON(x_bev, x_name) { \
if (what & x_bev) { \
if (first) { \
first = false; \
} else { \
strcat(reason, ","); \
} \
strcat(reason, x_name); \
} \
}
FILL_REASON(BEV_EVENT_READING, "reading");
FILL_REASON(BEV_EVENT_WRITING, "writing");
FILL_REASON(BEV_EVENT_ERROR, "error");
FILL_REASON(BEV_EVENT_TIMEOUT, "timeout");
FILL_REASON(BEV_EVENT_EOF, "eof"); // cppcheck-suppress unreadVariable
# undef FILL_REASON
strcat(reason, ")");
return reason;
}

View File

@@ -1,26 +0,0 @@
/*****************************************************************************
# #
# uStreamer - Lightweight and fast MJPEG-HTTP streamer. #
# #
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
# #
*****************************************************************************/
#pragma once
char *us_bufferevent_format_reason(short what);

View File

@@ -56,7 +56,6 @@
#include "../../libs/tools.h"
#include "../../libs/threading.h"
#include "../../libs/logging.h"
#include "../../libs/process.h"
#include "../../libs/frame.h"
#include "../../libs/base64.h"
#include "../../libs/list.h"
@@ -68,9 +67,7 @@
# include "../gpio/gpio.h"
#endif
#include "bev.h"
#include "unix.h"
#include "uri.h"
#include "tools.h"
#include "mime.h"
#include "static.h"
#ifdef WITH_SYSTEMD
@@ -98,9 +95,6 @@ static void _http_send_snapshot(us_server_s *server);
static bool _expose_frame(us_server_s *server, const us_frame_s *frame);
static const char *_http_get_header(struct evhttp_request *request, const char *key);
static char *_http_get_client_hostport(struct evhttp_request *request);
#define _LOG_ERROR(x_msg, ...) US_LOG_ERROR("HTTP: " x_msg, ##__VA_ARGS__)
#define _LOG_PERROR(x_msg, ...) US_LOG_PERROR("HTTP: " x_msg, ##__VA_ARGS__)
@@ -203,19 +197,9 @@ int us_server_listen(us_server_s *server) {
}
us_frame_copy(stream->run->blank->jpeg, ex->frame);
ex->notify_last_width = ex->frame->width;
ex->notify_last_height = ex->frame->height;
{
struct timeval interval = {0};
if (stream->cap->desired_fps > 0) {
interval.tv_usec = 1000000 / (stream->cap->desired_fps * 2);
} else {
interval.tv_usec = 16000; // ~60fps
}
assert((run->refresher = event_new(run->base, -1, EV_PERSIST, _http_refresher, server)) != NULL);
assert(!event_add(run->refresher, &interval));
}
assert((run->refresher = event_new(run->base, -1, 0, _http_refresher, server)) != NULL);
stream->run->http->jpeg_refresher = run->refresher;
evhttp_set_timeout(run->http, server->timeout);
@@ -282,8 +266,8 @@ static int _http_preprocess_request(struct evhttp_request *request, us_server_s
atomic_store(&server->stream->run->http->last_request_ts, us_get_now_monotonic());
if (server->allow_origin[0] != '\0') {
const char *const cors_headers = _http_get_header(request, "Access-Control-Request-Headers");
const char *const cors_method = _http_get_header(request, "Access-Control-Request-Method");
const char *const cors_headers = us_evhttp_get_header(request, "Access-Control-Request-Headers");
const char *const cors_method = us_evhttp_get_header(request, "Access-Control-Request-Method");
_A_ADD_HEADER(request, "Access-Control-Allow-Origin", server->allow_origin);
_A_ADD_HEADER(request, "Access-Control-Allow-Credentials", "true");
@@ -301,7 +285,7 @@ static int _http_preprocess_request(struct evhttp_request *request, us_server_s
}
if (run->auth_token != NULL) {
const char *const token = _http_get_header(request, "Authorization");
const char *const token = us_evhttp_get_header(request, "Authorization");
if (token == NULL || strcmp(token, run->auth_token) != 0) {
_A_ADD_HEADER(request, "WWW-Authenticate", "Basic realm=\"Restricted area\"");
evhttp_send_reply(request, 401, "Unauthorized", NULL);
@@ -527,7 +511,7 @@ static void _http_callback_state(struct evhttp_request *request, void *v_server)
(server->fake_width ? server->fake_width : captured_meta.width),
(server->fake_height ? server->fake_height : captured_meta.height),
us_bool_to_string(captured_meta.online),
stream->cap->desired_fps,
stream->desired_fps,
captured_fps,
us_fpsi_get(ex->queued_fpsi, NULL),
run->stream_clients_count
@@ -593,7 +577,7 @@ static void _http_callback_stream(struct evhttp_request *request, void *v_server
struct evkeyvalq params;
evhttp_parse_query(evhttp_request_get_uri(request), &params);
# define PARSE_PARAM(x_type, x_name) client->x_name = us_uri_get_##x_type(&params, #x_name)
# define PARSE_PARAM(x_type, x_name) client->x_name = us_evkeyvalq_get_##x_type(&params, #x_name)
PARSE_PARAM(string, key);
PARSE_PARAM(true, extra_headers);
PARSE_PARAM(true, advance_headers);
@@ -602,7 +586,7 @@ static void _http_callback_stream(struct evhttp_request *request, void *v_server
# undef PARSE_PARAM
evhttp_clear_headers(&params);
client->hostport = _http_get_client_hostport(request);
client->hostport = us_evhttp_get_hostport(request);
client->id = us_get_now_id();
{
@@ -682,8 +666,8 @@ static void _http_callback_stream_write(struct bufferevent *buf_event, void *v_c
_A_EVBUFFER_ADD_PRINTF(buf, "HTTP/1.0 200 OK" RN);
if (client->server->allow_origin[0] != '\0') {
const char *const cors_headers = _http_get_header(client->request, "Access-Control-Request-Headers");
const char *const cors_method = _http_get_header(client->request, "Access-Control-Request-Method");
const char *const cors_headers = us_evhttp_get_header(client->request, "Access-Control-Request-Headers");
const char *const cors_method = us_evhttp_get_header(client->request, "Access-Control-Request-Method");
_A_EVBUFFER_ADD_PRINTF(buf,
"Access-Control-Allow-Origin: %s" RN
@@ -738,7 +722,8 @@ static void _http_callback_stream_write(struct bufferevent *buf_event, void *v_c
"X-UStreamer-Width: %u" RN
"X-UStreamer-Height: %u" RN
"X-UStreamer-Client-FPS: %u" RN
"X-UStreamer-Grab-Time: %.06Lf" RN
"X-UStreamer-Grab-Begin-Time: %.06Lf" RN
"X-UStreamer-Grab-End-Time: %.06Lf" RN
"X-UStreamer-Encode-Begin-Time: %.06Lf" RN
"X-UStreamer-Encode-End-Time: %.06Lf" RN
"X-UStreamer-Expose-Begin-Time: %.06Lf" RN
@@ -752,14 +737,15 @@ static void _http_callback_stream_write(struct bufferevent *buf_event, void *v_c
ex->frame->width,
ex->frame->height,
us_fpsi_get(client->fpsi, NULL),
ex->frame->grab_ts,
ex->frame->grab_begin_ts,
ex->frame->grab_end_ts,
ex->frame->encode_begin_ts,
ex->frame->encode_end_ts,
ex->expose_begin_ts,
ex->expose_cmp_ts,
ex->expose_end_ts,
now_ts,
now_ts - ex->frame->grab_ts
now_ts - ex->frame->grab_begin_ts
);
}
}
@@ -888,7 +874,7 @@ static void _http_send_snapshot(us_server_s *server) {
if (!captured_meta.online) {
if (blank == NULL) {
blank = us_blank_init();
us_blank_draw(blank, "< NO SIGNAL >", captured_meta.width, captured_meta.height);
us_blank_draw(blank, "< NO LIVE VIDEO >", captured_meta.width, captured_meta.height);
}
frame = blank->jpeg;
}
@@ -908,7 +894,8 @@ static void _http_send_snapshot(us_server_s *server) {
_A_ADD_HEADER(request, "X-UStreamer-Online", us_bool_to_string(frame->online));
ADD_UNSIGNED_HEADER("X-UStreamer-Width", frame->width);
ADD_UNSIGNED_HEADER("X-UStreamer-Height", frame->height);
ADD_TIME_HEADER("X-UStreamer-Grab-Timestamp", frame->grab_ts);
ADD_TIME_HEADER("X-UStreamer-Grab-Begin-Timestamp", frame->grab_begin_ts);
ADD_TIME_HEADER("X-UStreamer-Grab-End-Timestamp", frame->grab_end_ts);
ADD_TIME_HEADER("X-UStreamer-Encode-Begin-Timestamp", frame->encode_begin_ts);
ADD_TIME_HEADER("X-UStreamer-Encode-End-Timestamp", frame->encode_end_ts);
ADD_TIME_HEADER("X-UStreamer-Send-Timestamp", us_get_now_monotonic());
@@ -940,13 +927,15 @@ static void _http_refresher(int fd, short what, void *v_server) {
bool stream_updated = false;
bool frame_updated = false;
const int ri = us_ring_consumer_acquire(ring, 0);
if (ri >= 0) {
int ri;
while ((ri = us_ring_consumer_acquire(ring, 0)) >= 0) {
const us_frame_s *const frame = ring->items[ri];
frame_updated = _expose_frame(server, frame);
stream_updated = true;
us_ring_consumer_release(ring, ri);
} else if (ex->expose_end_ts + 1 < us_get_now_monotonic()) {
}
if (!stream_updated && (ex->expose_end_ts + 1 < us_get_now_monotonic())) {
_LOG_DEBUG("Repeating exposed ...");
ex->expose_begin_ts = us_get_now_monotonic();
ex->expose_cmp_ts = ex->expose_begin_ts;
@@ -957,21 +946,6 @@ static void _http_refresher(int fd, short what, void *v_server) {
_http_send_stream(server, stream_updated, frame_updated);
_http_send_snapshot(server);
if (
frame_updated
&& server->notify_parent
&& (
ex->notify_last_online != ex->frame->online
|| ex->notify_last_width != ex->frame->width
|| ex->notify_last_height != ex->frame->height
)
) {
ex->notify_last_online = ex->frame->online;
ex->notify_last_width = ex->frame->width;
ex->notify_last_height = ex->frame->height;
us_process_notify_parent();
}
}
static bool _expose_frame(us_server_s *server, const us_frame_s *frame) {
@@ -1016,39 +990,3 @@ static bool _expose_frame(us_server_s *server, const us_frame_s *frame) {
ex->frame->online, (ex->expose_end_ts - ex->expose_begin_ts));
return true; // Updated
}
static const char *_http_get_header(struct evhttp_request *request, const char *key) {
return evhttp_find_header(evhttp_request_get_input_headers(request), key);
}
static char *_http_get_client_hostport(struct evhttp_request *request) {
char *addr = NULL;
unsigned short port = 0;
struct evhttp_connection *conn = evhttp_request_get_connection(request);
if (conn != NULL) {
char *peer;
evhttp_connection_get_peer(conn, &peer, &port);
addr = us_strdup(peer);
}
const char *xff = _http_get_header(request, "X-Forwarded-For");
if (xff != NULL) {
US_DELETE(addr, free);
assert((addr = strndup(xff, 1024)) != NULL);
for (uint index = 0; addr[index]; ++index) {
if (addr[index] == ',') {
addr[index] = '\0';
break;
}
}
}
if (addr == NULL) {
addr = us_strdup("???");
}
char *hostport;
US_ASPRINTF(hostport, "[%s]:%u", addr, port);
free(addr);
return hostport;
}

View File

@@ -72,10 +72,6 @@ typedef struct {
ldf expose_begin_ts;
ldf expose_cmp_ts;
ldf expose_end_ts;
bool notify_last_online;
uint notify_last_width;
uint notify_last_height;
} us_server_exposed_s;
typedef struct {
@@ -121,8 +117,6 @@ typedef struct us_server_sx {
uint fake_width;
uint fake_height;
bool notify_parent;
us_server_runtime_s *run;
} us_server_s;

View File

@@ -20,7 +20,7 @@
*****************************************************************************/
#include "unix.h"
#include "tools.h"
#include <string.h>
#include <unistd.h>
@@ -33,6 +33,8 @@
#include <event2/http.h>
#include <event2/util.h>
#include <event2/keyvalq_struct.h>
#include <event2/bufferevent.h>
#include "../../libs/types.h"
#include "../../libs/tools.h"
@@ -79,3 +81,94 @@ evutil_socket_t us_evhttp_bind_unix(struct evhttp *http, const char *path, bool
}
return fd;
}
const char *us_evhttp_get_header(struct evhttp_request *request, const char *key) {
return evhttp_find_header(evhttp_request_get_input_headers(request), key);
}
char *us_evhttp_get_hostport(struct evhttp_request *request) {
char *addr = NULL;
unsigned short port = 0;
struct evhttp_connection *conn = evhttp_request_get_connection(request);
if (conn != NULL) {
char *peer;
evhttp_connection_get_peer(conn, &peer, &port);
addr = us_strdup(peer);
}
const char *xff = us_evhttp_get_header(request, "X-Forwarded-For");
if (xff != NULL) {
US_DELETE(addr, free);
assert((addr = strndup(xff, 1024)) != NULL);
for (uint index = 0; addr[index]; ++index) {
if (addr[index] == ',') {
addr[index] = '\0';
break;
}
}
}
if (addr == NULL) {
addr = us_strdup("???");
}
char *hostport;
US_ASPRINTF(hostport, "[%s]:%u", addr, port);
free(addr);
return hostport;
}
bool us_evkeyvalq_get_true(struct evkeyvalq *params, const char *key) {
const char *value_str = evhttp_find_header(params, key);
if (value_str != NULL) {
if (
value_str[0] == '1'
|| !evutil_ascii_strcasecmp(value_str, "true")
|| !evutil_ascii_strcasecmp(value_str, "yes")
) {
return true;
}
}
return false;
}
char *us_evkeyvalq_get_string(struct evkeyvalq *params, const char *key) {
const char *const value_str = evhttp_find_header(params, key);
if (value_str != NULL) {
return evhttp_encode_uri(value_str);
}
return NULL;
}
char *us_bufferevent_format_reason(short what) {
char *reason;
US_CALLOC(reason, 2048);
// evutil_socket_error_to_string() is not thread-safe
char *const perror_str = us_errno_to_string(EVUTIL_SOCKET_ERROR());
bool first = true;
strncat(reason, perror_str, 1023);
free(perror_str);
strcat(reason, " (");
# define FILL_REASON(x_bev, x_name) { \
if (what & x_bev) { \
if (first) { \
first = false; \
} else { \
strcat(reason, ","); \
} \
strcat(reason, x_name); \
} \
}
FILL_REASON(BEV_EVENT_READING, "reading");
FILL_REASON(BEV_EVENT_WRITING, "writing");
FILL_REASON(BEV_EVENT_ERROR, "error");
FILL_REASON(BEV_EVENT_TIMEOUT, "timeout");
FILL_REASON(BEV_EVENT_EOF, "eof"); // cppcheck-suppress unreadVariable
# undef FILL_REASON
strcat(reason, ")");
return reason;
}

View File

@@ -25,9 +25,17 @@
#include <sys/stat.h>
#include <event2/http.h>
#include <event2/util.h>
#include <event2/keyvalq_struct.h>
#include "../../libs/types.h"
evutil_socket_t us_evhttp_bind_unix(struct evhttp *http, const char *path, bool rm, mode_t mode);
const char *us_evhttp_get_header(struct evhttp_request *request, const char *key);
char *us_evhttp_get_hostport(struct evhttp_request *request);
bool us_evkeyvalq_get_true(struct evkeyvalq *params, const char *key);
char *us_evkeyvalq_get_string(struct evkeyvalq *params, const char *key);
char *us_bufferevent_format_reason(short what);

View File

@@ -1,52 +0,0 @@
/*****************************************************************************
# #
# uStreamer - Lightweight and fast MJPEG-HTTP streamer. #
# #
# Copyright (C) 2018-2024 Maxim Devaev <mdevaev@gmail.com> #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <https://www.gnu.org/licenses/>. #
# #
*****************************************************************************/
#include "uri.h"
#include <event2/util.h>
#include <event2/http.h>
#include <event2/keyvalq_struct.h>
#include "../../libs/types.h"
bool us_uri_get_true(struct evkeyvalq *params, const char *key) {
const char *value_str = evhttp_find_header(params, key);
if (value_str != NULL) {
if (
value_str[0] == '1'
|| !evutil_ascii_strcasecmp(value_str, "true")
|| !evutil_ascii_strcasecmp(value_str, "yes")
) {
return true;
}
}
return false;
}
char *us_uri_get_string(struct evkeyvalq *params, const char *key) {
const char *const value_str = evhttp_find_header(params, key);
if (value_str != NULL) {
return evhttp_encode_uri(value_str);
}
return NULL;
}

View File

@@ -43,7 +43,7 @@
static us_m2m_encoder_s *_m2m_encoder_init(
const char *name, const char *path, uint output_format,
uint bitrate, uint gop, uint quality, bool allow_dma);
uint bitrate, uint gop, uint quality, bool allow_dma, bool boost);
static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame);
@@ -63,9 +63,9 @@ static int _m2m_encoder_compress_raw(us_m2m_encoder_s *enc, const us_frame_s *sr
#define _LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("%s: " x_msg, enc->name, ##__VA_ARGS__)
us_m2m_encoder_s *us_m2m_h264_encoder_init(const char *name, const char *path, uint bitrate, uint gop) {
us_m2m_encoder_s *us_m2m_h264_encoder_init(const char *name, const char *path, uint bitrate, uint gop, bool boost) {
bitrate *= 1000; // From Kbps
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_H264, bitrate, gop, 0, true);
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_H264, bitrate, gop, 0, true, boost);
}
us_m2m_encoder_s *us_m2m_mjpeg_encoder_init(const char *name, const char *path, uint quality) {
@@ -76,12 +76,12 @@ us_m2m_encoder_s *us_m2m_mjpeg_encoder_init(const char *name, const char *path,
bitrate = step * round(bitrate / step);
bitrate *= 1000; // From Kbps
assert(bitrate > 0);
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_MJPEG, bitrate, 0, 0, true);
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_MJPEG, bitrate, 0, 0, true, false);
}
us_m2m_encoder_s *us_m2m_jpeg_encoder_init(const char *name, const char *path, uint quality) {
// FIXME: DMA не работает
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_JPEG, 0, 0, quality, false);
return _m2m_encoder_init(name, path, V4L2_PIX_FMT_JPEG, 0, 0, quality, false, false);
}
void us_m2m_encoder_destroy(us_m2m_encoder_s *enc) {
@@ -139,7 +139,7 @@ int us_m2m_encoder_compress(us_m2m_encoder_s *enc, const us_frame_s *src, us_fra
static us_m2m_encoder_s *_m2m_encoder_init(
const char *name, const char *path, uint output_format,
uint bitrate, uint gop, uint quality, bool allow_dma) {
uint bitrate, uint gop, uint quality, bool allow_dma, bool boost) {
US_LOG_INFO("%s: Initializing encoder ...", name);
@@ -161,6 +161,7 @@ static us_m2m_encoder_s *_m2m_encoder_init(
enc->gop = gop;
enc->quality = quality;
enc->allow_dma = allow_dma;
enc->boost = boost;
enc->run = run;
return enc;
}
@@ -222,7 +223,11 @@ static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame)
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_I_PERIOD, enc->gop);
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_PROFILE, V4L2_MPEG_VIDEO_H264_PROFILE_CONSTRAINED_BASELINE);
if (run->p_width * run->p_height <= 1920 * 1080) { // https://forums.raspberrypi.com/viewtopic.php?t=291447#p1762296
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_LEVEL, V4L2_MPEG_VIDEO_H264_LEVEL_4_0);
if (enc->boost) {
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_LEVEL, V4L2_MPEG_VIDEO_H264_LEVEL_4_2);
} else {
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_LEVEL, V4L2_MPEG_VIDEO_H264_LEVEL_4_0);
}
} else {
SET_OPTION(V4L2_CID_MPEG_VIDEO_H264_LEVEL, V4L2_MPEG_VIDEO_H264_LEVEL_5_1);
}
@@ -276,10 +281,13 @@ static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame)
}
}
if (run->p_width * run->p_height <= 1280 * 720) {
if (
(run->p_width * run->p_height <= 1280 * 720)
|| ((enc->output_format == V4L2_PIX_FMT_H264) && enc->boost)
) {
// H264 требует каких-то лимитов. Больше 30 не поддерживается, а при 0
// через какое-то время начинает производить некорректные фреймы.
// Если же привысить fps, то резко увеличивается время кодирования.
// Если же превысить fps, то резко увеличивается время кодирования.
run->fps_limit = 60;
} else {
run->fps_limit = 30;

View File

@@ -58,12 +58,13 @@ typedef struct {
uint gop;
uint quality;
bool allow_dma;
bool boost;
us_m2m_encoder_runtime_s *run;
} us_m2m_encoder_s;
us_m2m_encoder_s *us_m2m_h264_encoder_init(const char *name, const char *path, uint bitrate, uint gop);
us_m2m_encoder_s *us_m2m_h264_encoder_init(const char *name, const char *path, uint bitrate, uint gop, bool boost);
us_m2m_encoder_s *us_m2m_mjpeg_encoder_init(const char *name, const char *path, uint quality);
us_m2m_encoder_s *us_m2m_jpeg_encoder_init(const char *name, const char *path, uint quality);
void us_m2m_encoder_destroy(us_m2m_encoder_s *enc);

View File

@@ -100,6 +100,7 @@ enum _US_OPT_VALUES {
_O_H264_BITRATE,
_O_H264_GOP,
_O_H264_M2M_DEVICE,
_O_H264_BOOST,
# undef ADD_SINK
# ifdef WITH_V4P
@@ -114,9 +115,10 @@ enum _US_OPT_VALUES {
_O_GPIO_HAS_HTTP_CLIENTS,
# endif
# ifdef HAS_PDEATHSIG
# ifdef WITH_PDEATHSIG
_O_EXIT_ON_PARENT_DEATH,
# endif
_O_EXIT_ON_DEVICE_ERROR,
_O_EXIT_ON_NO_CLIENTS,
# ifdef WITH_SETPROCTITLE
_O_PROCESS_NAME_PREFIX,
@@ -138,7 +140,7 @@ static const struct option _LONG_OPTS[] = {
{"input", required_argument, NULL, _O_INPUT},
{"resolution", required_argument, NULL, _O_RESOLUTION},
{"format", required_argument, NULL, _O_FORMAT},
{"format-swap-rgb", required_argument, NULL, _O_FORMAT_SWAP_RGB},
{"format-swap-rgb", no_argument, NULL, _O_FORMAT_SWAP_RGB},
{"tv-standard", required_argument, NULL, _O_TV_STANDARD},
{"io-method", required_argument, NULL, _O_IO_METHOD},
{"desired-fps", required_argument, NULL, _O_DESIRED_FPS},
@@ -205,6 +207,7 @@ static const struct option _LONG_OPTS[] = {
{"h264-bitrate", required_argument, NULL, _O_H264_BITRATE},
{"h264-gop", required_argument, NULL, _O_H264_GOP},
{"h264-m2m-device", required_argument, NULL, _O_H264_M2M_DEVICE},
{"h264-boost", no_argument, NULL, _O_H264_BOOST},
// Compatibility
{"sink", required_argument, NULL, _O_JPEG_SINK},
{"sink-mode", required_argument, NULL, _O_JPEG_SINK_MODE},
@@ -224,9 +227,10 @@ static const struct option _LONG_OPTS[] = {
{"gpio-has-http-clients", required_argument, NULL, _O_GPIO_HAS_HTTP_CLIENTS},
# endif
# ifdef HAS_PDEATHSIG
# ifdef WITH_PDEATHSIG
{"exit-on-parent-death", no_argument, NULL, _O_EXIT_ON_PARENT_DEATH},
# endif
{"exit-on-device-error", no_argument, NULL, _O_EXIT_ON_DEVICE_ERROR},
{"exit-on-no-clients", required_argument, NULL, _O_EXIT_ON_NO_CLIENTS},
# ifdef WITH_SETPROCTITLE
{"process-name-prefix", required_argument, NULL, _O_PROCESS_NAME_PREFIX},
@@ -384,7 +388,7 @@ int options_parse(us_options_s *options, us_capture_s *cap, us_encoder_s *enc, u
case _O_FORMAT_SWAP_RGB: OPT_SET(cap->format_swap_rgb, true);
case _O_TV_STANDARD: OPT_PARSE_ENUM("TV standard", cap->standard, us_capture_parse_standard, US_STANDARDS_STR);
case _O_IO_METHOD: OPT_PARSE_ENUM("IO method", cap->io_method, us_capture_parse_io_method, US_IO_METHODS_STR);
case _O_DESIRED_FPS: OPT_NUMBER("--desired-fps", cap->desired_fps, 0, US_VIDEO_MAX_FPS, 0);
case _O_DESIRED_FPS: OPT_NUMBER("--desired-fps", stream->desired_fps, 0, US_VIDEO_MAX_FPS, 0);
case _O_MIN_FRAME_SIZE: OPT_NUMBER("--min-frame-size", cap->min_frame_size, 1, 8192, 0);
case _O_ALLOW_TRUNCATED_FRAMES: OPT_SET(cap->allow_truncated_frames, true);
case _O_PERSISTENT: OPT_SET(cap->persistent, true);
@@ -467,6 +471,7 @@ int options_parse(us_options_s *options, us_capture_s *cap, us_encoder_s *enc, u
case _O_H264_BITRATE: OPT_NUMBER("--h264-bitrate", stream->h264_bitrate, 25, 20000, 0);
case _O_H264_GOP: OPT_NUMBER("--h264-gop", stream->h264_gop, 0, 60, 0);
case _O_H264_M2M_DEVICE: OPT_SET(stream->h264_m2m_path, optarg);
case _O_H264_BOOST: OPT_SET(stream->h264_boost, true);
# ifdef WITH_V4P
case _O_V4P:
@@ -483,18 +488,19 @@ int options_parse(us_options_s *options, us_capture_s *cap, us_encoder_s *enc, u
case _O_GPIO_HAS_HTTP_CLIENTS: OPT_NUMBER("--gpio-has-http-clients", us_g_gpio.has_http_clients.pin, 0, 256, 0);
# endif
# ifdef HAS_PDEATHSIG
# ifdef WITH_PDEATHSIG
case _O_EXIT_ON_PARENT_DEATH:
if (us_process_track_parent_death() < 0) {
return -1;
};
break;
# endif
case _O_EXIT_ON_DEVICE_ERROR: OPT_SET(stream->exit_on_device_error, true);
case _O_EXIT_ON_NO_CLIENTS: OPT_NUMBER("--exit-on-no-clients", stream->exit_on_no_clients, 0, 86400, 0);
# ifdef WITH_SETPROCTITLE
case _O_PROCESS_NAME_PREFIX: OPT_SET(process_name_prefix, optarg);
# endif
case _O_NOTIFY_PARENT: OPT_SET(server->notify_parent, true);
case _O_NOTIFY_PARENT: OPT_SET(stream->notify_parent, true);
case _O_LOG_LEVEL: OPT_NUMBER("--log-level", us_g_log_level, US_LOG_LEVEL_INFO, US_LOG_LEVEL_DEBUG, 0);
case _O_PERF: OPT_SET(us_g_log_level, US_LOG_LEVEL_PERF);
@@ -581,34 +587,52 @@ static int _check_instance_id(const char *str) {
}
static void _features(void) {
# ifdef WITH_GPIO
# ifdef MK_WITH_PYTHON
puts("+ WITH_PYTHON");
# else
puts("- WITH_PYTHON");
# endif
# ifdef MK_WITH_JANUS
puts("+ WITH_JANUS");
# else
puts("- WITH_JANUS");
# endif
# ifdef MK_WITH_V4P
puts("+ WITH_V4P");
# else
puts("- WITH_V4P");
# endif
# ifdef MK_WITH_GPIO
puts("+ WITH_GPIO");
# else
puts("- WITH_GPIO");
# endif
# ifdef WITH_SYSTEMD
# ifdef MK_WITH_SYSTEMD
puts("+ WITH_SYSTEMD");
# else
puts("- WITH_SYSTEMD");
# endif
# ifdef WITH_PTHREAD_NP
# ifdef MK_WITH_PTHREAD_NP
puts("+ WITH_PTHREAD_NP");
# else
puts("- WITH_PTHREAD_NP");
# endif
# ifdef WITH_SETPROCTITLE
# ifdef MK_WITH_SETPROCTITLE
puts("+ WITH_SETPROCTITLE");
# else
puts("- WITH_SETPROCTITLE");
# endif
# ifdef HAS_PDEATHSIG
puts("+ HAS_PDEATHSIG");
# ifdef MK_WITH_PDEATHSIG
puts("+ WITH_PDEATHSIG");
# else
puts("- HAS_PDEATHSIG");
puts("- WITH_PDEATHSIG");
# endif
}
@@ -725,6 +749,7 @@ static void _help(FILE *fp, const us_capture_s *cap, const us_encoder_s *enc, co
SAY(" --h264-bitrate <kbps> ───────── H264 bitrate in Kbps. Default: %u.\n", stream->h264_bitrate);
SAY(" --h264-gop <N> ──────────────── Interval between keyframes. Default: %u.\n", stream->h264_gop);
SAY(" --h264-m2m-device </dev/path> ─ Path to V4L2 M2M encoder device. Default: auto select.\n");
SAY(" --h264-boost ────────────────── Increase encoder performance on PiKVM V4. Default: disabled.\n");
# ifdef WITH_V4P
SAY("Passthrough options for PiKVM V4:");
SAY("═════════════════════════════════");
@@ -740,11 +765,11 @@ static void _help(FILE *fp, const us_capture_s *cap, const us_encoder_s *enc, co
SAY(" --gpio-stream-online <pin> ──── Set 1 while streaming. Default: disabled.\n");
SAY(" --gpio-has-http-clients <pin> ─ Set 1 while stream has at least one client. Default: disabled.\n");
# endif
# if (defined(HAS_PDEATHSIG) || defined(WITH_SETPROCTITLE))
# if (defined(WITH_PDEATHSIG) || defined(WITH_SETPROCTITLE))
SAY("Process options:");
SAY("════════════════");
# endif
# ifdef HAS_PDEATHSIG
# ifdef WITH_PDEATHSIG
SAY(" --exit-on-parent-death ─────── Exit the program if the parent process is dead. Default: disabled.\n");
# endif
SAY(" --exit-on-no-clients <sec> ──── Exit the program if there have been no stream or sink clients");

View File

@@ -28,9 +28,12 @@
#include <unistd.h>
#include <errno.h>
#include <assert.h>
#include <math.h>
#include <pthread.h>
#include <event2/event.h> // jpeg_refresher
#include "../libs/types.h"
#include "../libs/errors.h"
#include "../libs/tools.h"
@@ -85,6 +88,7 @@ static us_capture_hwbuf_s *_get_latest_hw(us_queue_s *queue);
static bool _stream_has_jpeg_clients_cached(us_stream_s *stream);
static bool _stream_has_any_clients_cached(us_stream_s *stream);
static int _stream_init_loop(us_stream_s *stream);
static void _stream_update_captured_fpsi(us_stream_s *stream, const us_frame_s *frame, bool bump);
#ifdef WITH_V4P
static void _stream_drm_ensure_no_signal(us_stream_s *stream);
#endif
@@ -122,15 +126,15 @@ us_stream_s *us_stream_init(us_capture_s *cap, us_encoder_s *enc) {
stream->h264_gop = 30;
stream->run = run;
us_blank_draw(run->blank, "< NO SIGNAL >", cap->width, cap->height);
us_fpsi_meta_s meta = {0};
us_fpsi_frame_to_meta(run->blank->raw, &meta);
us_fpsi_update(http->captured_fpsi, false, &meta);
us_stream_update_blank(stream, cap); // Init blank
return stream;
}
void us_stream_update_blank(us_stream_s *stream, const us_capture_s *cap) {
us_blank_draw(stream->run->blank, "< NO SIGNAL >", cap->width, cap->height);
us_stream_runtime_s *const run = stream->run;
us_blank_draw(run->blank, "< NO LIVE VIDEO >", cap->width, cap->height);
us_fpsi_frame_to_meta(run->blank->raw, &run->notify_meta); // Initial "unchanged" meta
_stream_update_captured_fpsi(stream, run->blank->raw, false);
}
void us_stream_destroy(us_stream_s *stream) {
@@ -153,7 +157,13 @@ void us_stream_loop(us_stream_s *stream) {
atomic_store(&run->http->last_request_ts, us_get_now_monotonic());
if (stream->h264_sink != NULL) {
run->h264_enc = us_m2m_h264_encoder_init("H264", stream->h264_m2m_path, stream->h264_bitrate, stream->h264_gop);
run->h264_enc = us_m2m_h264_encoder_init(
"H264",
stream->h264_m2m_path,
stream->h264_bitrate,
stream->h264_gop,
stream->h264_boost);
run->h264_tmp_src = us_frame_init();
run->h264_dest = us_frame_init();
}
@@ -204,9 +214,7 @@ void us_stream_loop(us_stream_s *stream) {
default: goto close; // Any error
}
us_fpsi_meta_s meta = {0};
us_fpsi_frame_to_meta(&hw->raw, &meta);
us_fpsi_update(run->http->captured_fpsi, true, &meta);
_stream_update_captured_fpsi(stream, &hw->raw, true);
# ifdef WITH_GPIO
us_gpio_set_stream_online(true);
@@ -313,6 +321,9 @@ static void *_jpeg_thread(void *v_ctx) {
_worker_context_s *ctx = v_ctx;
us_stream_s *stream = ctx->stream;
uint take = 1;
uint step = 1;
ldf grab_after_ts = 0;
uint fluency_passed = 0;
@@ -331,7 +342,7 @@ static void *_jpeg_thread(void *v_ctx) {
atomic_fetch_sub(&stream->run->http->snapshot_requested, 1);
}
US_LOG_PERF("JPEG: ##### Encoded JPEG exposed; worker=%s, latency=%.3Lf",
wr->name, us_get_now_monotonic() - job->dest->grab_ts);
wr->name, us_get_now_monotonic() - job->dest->grab_begin_ts);
} else {
US_LOG_PERF("JPEG: ----- Encoded JPEG dropped; worker=%s", wr->name);
}
@@ -349,6 +360,19 @@ static void *_jpeg_thread(void *v_ctx) {
continue;
}
if (stream->desired_fps > 0) {
const uint captured_fps = us_fpsi_get(stream->run->http->captured_fpsi, NULL);
take = ceilf((float)captured_fps / (float)stream->desired_fps);
if (step < take) {
US_LOG_DEBUG("JPEG: Passed encoding for FPS limit: step=%u, take=%u", step, take);
++step;
us_capture_hwbuf_decref(hw);
continue;
} else {
step = 1;
}
}
const ldf now_ts = us_get_now_monotonic();
if (now_ts < grab_after_ts) {
fluency_passed += 1;
@@ -395,7 +419,9 @@ static void *_h264_thread(void *v_ctx) {
_worker_context_s *ctx = v_ctx;
us_stream_s *stream = ctx->stream;
ldf grab_after_ts = 0;
uint take = 1;
uint step = 1;
while (!atomic_load(ctx->stop)) {
us_capture_hwbuf_s *hw = _get_latest_hw(ctx->queue);
if (hw == NULL) {
@@ -406,23 +432,25 @@ static void *_h264_thread(void *v_ctx) {
US_LOG_VERBOSE("H264: Passed encoding because nobody is watching");
goto decref;
}
if (hw->raw.grab_ts < grab_after_ts) {
US_LOG_DEBUG("H264: Passed encoding for FPS limit");
goto decref;
uint fps_limit = stream->run->h264_enc->run->fps_limit;
if (stream->desired_fps > 0 && (fps_limit == 0 || stream->desired_fps < fps_limit)) {
fps_limit = stream->desired_fps;
}
if (fps_limit > 0) {
const uint captured_fps = us_fpsi_get(stream->run->http->captured_fpsi, NULL);
take = ceilf((float)captured_fps / (float)fps_limit);
if (step < take) {
US_LOG_DEBUG("H264: Passed encoding for FPS limit: step=%u, take=%u", step, take);
++step;
goto decref;
} else {
step = 1;
}
}
_stream_encode_expose_h264(ctx->stream, &hw->raw, false);
// M2M-енкодер увеличивает задержку на 100 милисекунд при 1080p, если скормить ему больше 30 FPS.
// Поэтому у нас есть два режима: 60 FPS для маленьких видео и 30 для 1920x1080(1200).
// Следующй фрейм захватывается не раньше, чем это требуется по FPS, минус небольшая
// погрешность (если захват неравномерный) - немного меньше 1/60, и примерно треть от 1/30.
const uint fps_limit = stream->run->h264_enc->run->fps_limit;
if (fps_limit > 0) {
const ldf frame_interval = (ldf)1 / fps_limit;
grab_after_ts = hw->raw.grab_ts + frame_interval - 0.01;
}
decref:
us_capture_hwbuf_decref(hw);
}
@@ -530,6 +558,8 @@ static int _stream_init_loop(us_stream_s *stream) {
int once = 0;
while (!atomic_load(&stream->run->stop)) {
const char *blank_reason = "< NO LIVE VIDEO >";
# ifdef WITH_GPIO
us_gpio_set_stream_online(false);
# endif
@@ -555,17 +585,68 @@ static int _stream_init_loop(us_stream_s *stream) {
switch (us_capture_open(stream->cap)) {
case 0: break;
case US_ERROR_NO_DEVICE:
case US_ERROR_NO_DATA:
US_ONCE({ US_LOG_INFO("Waiting for the capture device ..."); });
goto offline_and_retry;
blank_reason = (
"< NO CAPTURE DEVICE >\n \n"
" Possible reasons: \n \n"
" - Device unplugged \n \n"
" - Bad config \n \n"
" - Malfunction "
);
goto silent_error;
case US_ERROR_NO_CABLE:
blank_reason = (
"< NO VIDEO SOURCE >\n \n"
" Possible reasons: \n \n"
" - Source is off \n \n"
" - Cable problems "
);
goto silent_error;
case US_ERROR_NO_SIGNAL:
blank_reason = (
"< NO SIGNAL DETECTED >\n \n"
" Possible reasons: \n \n"
" - Video suspended \n \n"
" - Cable problems "
);
goto silent_error;
case US_ERROR_NO_SYNC:
blank_reason = (
"< NO SYNC WITH SIGNAL >\n \n"
" Possible reasons: \n \n"
" - Source is crazy \n \n"
" - Cable problems "
);
goto silent_error;
case US_ERROR_NO_LANES:
blank_reason = (
"< UNSUPPORTED SIGNAL TIMINGS >\n \n"
" Possible reasons: \n \n"
" - Too high frequency \n \n"
" - Source ignores EDID \n \n"
" - Invalid EDID "
);
goto verbose_error;
default:
once = 0;
goto offline_and_retry;
goto verbose_error;
}
us_encoder_open(stream->enc, stream->cap);
return 0;
silent_error:
if (!stream->exit_on_device_error) {
US_ONCE({ US_LOG_INFO("Waiting for the capture device ..."); });
}
goto offline_and_retry;
verbose_error:
once = 0;
goto offline_and_retry;
offline_and_retry:
if (stream->exit_on_device_error) {
US_LOG_INFO("Device error, exiting ...");
us_process_suicide();
}
for (uint count = 0; count < stream->error_delay * 10; ++count) {
if (atomic_load(&run->stop)) {
break;
@@ -578,12 +659,9 @@ static int _stream_init_loop(us_stream_s *stream) {
width = stream->cap->width;
height = stream->cap->height;
}
us_blank_draw(run->blank, "< NO SIGNAL >", width, height);
us_fpsi_meta_s meta = {0};
us_fpsi_frame_to_meta(run->blank->raw, &meta);
us_fpsi_update(run->http->captured_fpsi, false, &meta);
us_blank_draw(run->blank, blank_reason, width, height);
_stream_update_captured_fpsi(stream, run->blank->raw, false);
_stream_expose_jpeg(stream, run->blank->jpeg);
_stream_expose_raw(stream, run->blank->raw);
_stream_encode_expose_h264(stream, run->blank->raw, true);
@@ -598,6 +676,19 @@ static int _stream_init_loop(us_stream_s *stream) {
return -1;
}
static void _stream_update_captured_fpsi(us_stream_s *stream, const us_frame_s *frame, bool bump) {
us_stream_runtime_s *const run = stream->run;
us_fpsi_meta_s meta = {0};
us_fpsi_frame_to_meta(frame, &meta);
us_fpsi_update(run->http->captured_fpsi, bump, &meta);
if (stream->notify_parent && memcmp(&run->notify_meta, &meta, sizeof(us_fpsi_meta_s))) {
memcpy(&run->notify_meta, &meta, sizeof(us_fpsi_meta_s));
us_process_notify_parent();
}
}
#ifdef WITH_V4P
static void _stream_drm_ensure_no_signal(us_stream_s *stream) {
if (stream->drm == NULL) {
@@ -634,6 +725,7 @@ static void _stream_expose_jpeg(us_stream_s *stream, const us_frame_s *frame) {
us_frame_s *const dest = run->http->jpeg_ring->items[ri];
us_frame_copy(frame, dest);
us_ring_producer_release(run->http->jpeg_ring, ri);
event_active(run->http->jpeg_refresher, 0, 0);
if (stream->jpeg_sink != NULL) {
us_memsink_server_put(stream->jpeg_sink, dest, NULL);
}

View File

@@ -26,6 +26,8 @@
#include <pthread.h>
#include <event2/event.h> // jpeg_refresher
#include "../libs/types.h"
#include "../libs/queue.h"
#include "../libs/ring.h"
@@ -51,6 +53,7 @@ typedef struct {
atomic_bool h264_online;
us_fpsi_s *h264_fpsi;
struct event *jpeg_refresher;
us_ring_s *jpeg_ring;
atomic_bool has_clients;
atomic_uint snapshot_requested;
@@ -68,6 +71,8 @@ typedef struct {
us_blank_s *blank;
us_fpsi_meta_s notify_meta;
atomic_bool stop;
} us_stream_runtime_s;
@@ -75,8 +80,11 @@ typedef struct {
us_capture_s *cap;
us_encoder_s *enc;
uint desired_fps;
bool notify_parent;
bool slowdown;
uint error_delay;
bool exit_on_device_error;
uint exit_on_no_clients;
us_memsink_s *jpeg_sink;
@@ -86,6 +94,7 @@ typedef struct {
uint h264_bitrate;
uint h264_gop;
char *h264_m2m_path;
bool h264_boost;
# ifdef WITH_V4P
us_drm_s *drm;

View File

@@ -37,7 +37,7 @@ static void *_worker_thread(void *v_worker);
us_workers_pool_s *us_workers_pool_init(
const char *name, const char *wr_prefix, uint n_workers, ldf desired_interval,
const char *name, const char *wr_prefix, uint n_workers,
us_workers_pool_job_init_f job_init, void *job_init_arg,
us_workers_pool_job_destroy_f job_destroy,
us_workers_pool_run_job_f run_job) {
@@ -47,7 +47,6 @@ us_workers_pool_s *us_workers_pool_init(
us_workers_pool_s *pool;
US_CALLOC(pool, 1);
pool->name = name;
pool->desired_interval = desired_interval;
pool->job_destroy = job_destroy;
pool->run_job = run_job;
@@ -147,14 +146,8 @@ ldf us_workers_pool_get_fluency_delay(us_workers_pool_s *pool, const us_worker_s
pool->approx_job_time = approx_job_time;
const ldf min_delay = pool->approx_job_time / pool->n_workers; // Среднее время работы размазывается на N воркеров
if (pool->desired_interval > 0 && min_delay > 0 && pool->desired_interval > min_delay) {
// Искусственное время задержки на основе желаемого FPS, если включен --desired-fps
// и аппаратный fps не попадает точно в желаемое значение
return pool->desired_interval;
}
return min_delay;
// Среднее время работы размазывается на N воркеров
return (pool->approx_job_time / pool->n_workers);
}
static void *_worker_thread(void *v_worker) {

View File

@@ -56,7 +56,6 @@ typedef bool (*us_workers_pool_run_job_f)(us_worker_s *wr);
typedef struct us_workers_pool_sx {
const char *name;
ldf desired_interval;
us_workers_pool_job_destroy_f job_destroy;
us_workers_pool_run_job_f run_job;
@@ -76,7 +75,7 @@ typedef struct us_workers_pool_sx {
us_workers_pool_s *us_workers_pool_init(
const char *name, const char *wr_prefix, uint n_workers, ldf desired_interval,
const char *name, const char *wr_prefix, uint n_workers,
us_workers_pool_job_init_f job_init, void *job_init_arg,
us_workers_pool_job_destroy_f job_destroy,
us_workers_pool_run_job_f run_job);

View File

@@ -242,7 +242,7 @@ static void _main_loop(void) {
us_drm_destroy(drm);
}
static void *_follower_thread(void *v_unix_follow) {
static void *_follower_thread(void *v_unix_follow) { // cppcheck-suppress constParameterCallback
US_THREAD_SETTLE("follower");
const char *path = v_unix_follow;
assert(path != NULL);