Compare commits

...

26 Commits
v5.43 ... v5.49

Author SHA1 Message Date
Maxim Devaev
b2ebcf99c8 Bump version: 5.48 → 5.49 2024-02-02 22:53:48 +02:00
Maxim Devaev
6a6b910869 refactoring 2024-02-02 22:41:45 +02:00
Maxim Devaev
4e8acf371f Issue #252: YVYU support 2024-02-02 22:41:45 +02:00
Thomer Gil
c4cb8288c7 Fix it's vs its typo (#255)
Co-authored-by: Thomer Gil <thomer@thomer.com>
2024-01-30 18:28:32 +02:00
Maxim Devaev
2dddb879bc Bump version: 5.47 → 5.48 2024-01-09 00:12:23 +02:00
Maxim Devaev
4d92dc662c removed some checks in gpio 2024-01-09 00:11:11 +02:00
Maxim Devaev
3cb649d97c Bump version: 5.46 → 5.47 2024-01-07 04:48:07 +02:00
Maxim Devaev
d9b5f2b03d Issue #255: libgpiod 2.0 API supported
Thanks for @jpalus for #249 with compatibility code
2024-01-07 04:36:53 +02:00
Jan Palus
2997906d98 add option to make verbose builds (#251)
if V=1 is passed to make echo invoked command verbosely
2024-01-02 02:27:12 +02:00
Jan Palus
bcd3deaa13 remove remainings of rpi vc paths (#250)
accidently left after Makefile split and subsequently switching to v4l2
based encoding
2024-01-02 02:24:37 +02:00
Maxim Devaev
f8ed7d7b3b Bump version: 5.45 → 5.46 2023-12-14 13:29:50 +02:00
Maxim Devaev
622f5cf1eb janus plugin: increased video queue 2023-12-14 12:41:29 +02:00
Maxim Devaev
81756811f3 update for bookworm 2023-11-15 14:59:34 +02:00
Maxim Devaev
bd403593a0 using libjpeg62-turbo on raspbian 2023-11-15 14:52:11 +02:00
Jason Huggins
fc3e0232e1 '-e' should be '--encoder' (#241) 2023-11-03 17:39:18 +02:00
Maxim Devaev
89fd83bfc1 added info for v2 camera 2023-10-28 11:05:55 +03:00
Maxim Devaev
83a77ea898 doc about youtube streaming 2023-10-24 20:18:07 +03:00
Maxim Devaev
33fdf9bf43 Bump version: 5.44 → 5.45 2023-10-24 14:14:00 +03:00
Ed Maste
6bd4ef59c0 avoid duplicate increment of slc (#239)
Clang reported "warning: variable 'slc' is incremented both in the loop
header and in the loop body".
2023-10-24 05:11:36 +03:00
Maxim Devaev
79987da1bf Bump version: 5.43 → 5.44 2023-10-14 01:47:24 +03:00
Maxim Devaev
05e5db09e4 fix 2023-10-12 04:19:23 +03:00
Maxim Devaev
55e432a529 Merge branch 'mp' 2023-10-12 04:13:10 +03:00
chr
4732c85ec4 Optimize JPEG scanline copy of yuv format (#235)
* opt jpeg scanline copy with yuv format

* remove unused macro
2023-10-12 04:11:34 +03:00
Michael Lynch
0ce7f28754 Correct typo on 'interval' (#236)
This fixes a minor typo on the word 'interval'.
2023-10-10 20:19:24 +03:00
Maxim Devaev
a2641dfcb6 some multiplane fixes 2023-10-10 20:13:57 +03:00
Artem
ec33425c05 Multi Planar device support (#233)
* added multi planar device support (RK3588 HDMI IN)

* sync with upstream version

* fix use local variable after free

Signed-off-by: Artem Mamonov <artyom.mamonov@gmail.com>

* request buffer length = VIDEO_MAX_PLANES for multi-planar devices

---------

Signed-off-by: Artem Mamonov <artyom.mamonov@gmail.com>
Co-authored-by: hongruichen <chraac@gmail.com>
2023-10-08 19:27:17 +03:00
22 changed files with 416 additions and 187 deletions

View File

@@ -1,7 +1,7 @@
[bumpversion]
commit = True
tag = True
current_version = 5.43
current_version = 5.49
parse = (?P<major>\d+)\.(?P<minor>\d+)
serialize =
{major}.{minor}

View File

@@ -9,9 +9,6 @@ PY ?= python3
CFLAGS ?= -O3
LDFLAGS ?=
RPI_VC_HEADERS ?= /opt/vc/include
RPI_VC_LIBS ?= /opt/vc/lib
export
_LINTERS_IMAGE ?= ustreamer-linters
@@ -22,6 +19,9 @@ define optbool
$(filter $(shell echo $(1) | tr A-Z a-z), yes on 1)
endef
ifeq ($(V),)
ECHO = @
endif
# =====
all:
@@ -36,18 +36,18 @@ endif
apps:
$(MAKE) -C src
@ ln -sf src/ustreamer.bin ustreamer
@ ln -sf src/ustreamer-dump.bin ustreamer-dump
$(ECHO) ln -sf src/ustreamer.bin ustreamer
$(ECHO) ln -sf src/ustreamer-dump.bin ustreamer-dump
python:
$(MAKE) -C python
@ ln -sf python/build/lib.*/*.so .
$(ECHO) ln -sf python/build/lib.*/*.so .
janus:
$(MAKE) -C janus
@ ln -sf janus/*.so .
$(ECHO) ln -sf janus/*.so .
install: all

View File

@@ -47,15 +47,13 @@ You need to download the µStreamer onto your system and build it from the sourc
You'll need ```make```, ```gcc```, ```libevent``` with ```pthreads``` support, ```libjpeg9```/```libjpeg-turbo``` and ```libbsd``` (only for Linux).
* Arch: `sudo pacman -S libevent libjpeg-turbo libutil-linux libbsd`.
* Raspbian: `sudo apt install libevent-dev libjpeg9-dev libbsd-dev`. Add `libgpiod-dev` for `WITH_GPIO=1` and `libsystemd-dev` for `WITH_SYSTEMD=1` and `libasound2-dev libspeex-dev libspeexdsp-dev libopus-dev` for `WITH_JANUS=1`.
* Raspberry OS Bullseye: `sudo apt install libevent-dev libjpeg62-turbo libbsd-dev`. Add `libgpiod-dev` for `WITH_GPIO=1` and `libsystemd-dev` for `WITH_SYSTEMD=1` and `libasound2-dev libspeex-dev libspeexdsp-dev libopus-dev` for `WITH_JANUS=1`.
* Raspberry OS Bookworm: same as previous but replace `libjpeg62-turbo` to `libjpeg62-turbo-dev`.
* Debian/Ubuntu: `sudo apt install build-essential libevent-dev libjpeg-dev libbsd-dev`.
* Alpine: `sudo apk add libevent-dev libbsd-dev libjpeg-turbo-dev musl-dev`. Build with `WITH_PTHREAD_NP=0`.
To enable GPIO support install [libgpiod](https://git.kernel.org/pub/scm/libs/libgpiod/libgpiod.git/about) and pass option ```WITH_GPIO=1```. If the compiler reports about a missing function ```pthread_get_name_np()``` (or similar), add option ```WITH_PTHREAD_NP=0``` (it's enabled by default). For the similar error with ```setproctitle()``` add option ```WITH_SETPROCTITLE=0```.
> **Note**
> Raspian: In case your version of Raspian is too old for there to be a libjpeg9 package, use `libjpeg8-dev` instead: `E: Package 'libjpeg9-dev' has no installation candidate`.
### Make
The most convenient process is to clone the µStreamer Git repository onto your system. If you don't have Git installed and don't want to install it either, you can download and unzip the sources from GitHub using `wget https://github.com/pikvm/ustreamer/archive/refs/heads/master.zip`.
@@ -156,9 +154,10 @@ Add `-e EDID=1` to set HDMI EDID before starting ustreamer. Use together with `-
Example usage for the Raspberry Pi v3 camera (required `libcamerify` which is located in `libcamera-tools` on Raspbian):
```
$ sudo modprobe bcm2835-v4l2
$ libcamerify ./ustreamer --host :: -e m2m-image
$ libcamerify ./ustreamer --host :: --encoder=m2m-image
```
For v2 camera you can use the same trick with `libcamerify` but enable legacy camera mode in `raspi-config`.
Example usage for the Raspberry Pi v1 camera:
```
@@ -181,7 +180,7 @@ $ modprobe bcm2835-v4l2 max_video_width=2592 max_video_height=1944
µStreamer supports bandwidth-efficient streaming using [H.264 compression](https://en.wikipedia.org/wiki/Advanced_Video_Coding) and the Janus WebRTC server. See the [Janus integration guide](docs/h264.md) for full details.
## Nginx
When uStreamer is behind an Nginx proxy, it's buffering behavior introduces latency into the video stream. It's possible to disable Nginx's buffering to eliminate the additional latency:
When uStreamer is behind an Nginx proxy, its buffering behavior introduces latency into the video stream. It's possible to disable Nginx's buffering to eliminate the additional latency:
```nginx
location /stream {

97
docs/youtube.md Normal file
View File

@@ -0,0 +1,97 @@
## Streaming to third party services
This method provides the ability of streaming a USB Webcam and include audio to large audiences.
It uses to two machines. One is a Raspberry Pi and the other a more capable machine to performance
the encoding of the video and audio that is streamed to the third party service such as YouTube.
Another benefit of using a browser (http stream) is the video can have overlays add in the custom ustreamer webpage.
For example a cron process that retrieves weather information and updates a file to include on the page, announcements,
or other creative ideas. The audio stream can also be something other than the webcam mic (music, voice files, etc.)
and easily changed on the second machine setup. In the following example filtering is applied in ffmpeg to
improve the sound of the webcam mic making vocals clearer and more intelligible.
* Machine 1:
* USB webcam on the machine (Pi for example) running ustreamer (video) and VLC (audio). Remember to make any needed firewall changes if machine 2 is on a separate network so it can reach the ports for the video and audio.
* To stream audio from the Pi.
```
/usr/bin/vlc -I dummy -vvv alsa://hw:2,0 --sout #transcode{acodec=mp3,ab=128}:standard{access=http,mux=ts,dst=:[PickAPort}
```
* Machine 2:
* On a more capable box run the video stream in a browser using ffmpeg to combine the video (browser) and audio and stream to YouTube or other services. In this example a VM with two virtual monitors running the browser full screen one of the monitors is used.
Script to stream the combination to YouTube:
```bash
#!/bin/bash
KEY=$1
echo
echo Cleanup -------------------------------------------------
source live-yt.key
killall -9 ffmpeg
killall -9 chromium
sleep 3
echo Setup General--------------------------------------------
cd /home/[USER]
rm -f nohup.out
export DISPLAY=:0.0
export $(dbus-launch)
echo Setup Chromium-------------------------------------------
CHROMIUM_TEMP=/home/{USER]/tmp/chromium
rm -rf $CHROMIUM_TEMP.bak
mv $CHROMIUM_TEMP $CHROMIUM_TEMP.bak
mkdir -p $CHROMIUM_TEMP
echo Start Chromium ------------------------------------------
nohup /usr/lib/chromium/chromium \
--new-window "http://[ustreamerURL]" \
--start-fullscreen \
--disable \
--disable-translate \
--disable-infobars \
--disable-suggestions-service \
--disable-save-password-bubble \
--disable-new-tab-first-run \
--disable-session-crashed-bubble \
--disable-bundled-ppapi-flash \
--disable-gpu \
--enable-javascript \
--enable-user-scripts \
--disk-cache-dir=$CHROMIUM_TEMP/cache/ustreamer/ \
--user-data-dir=$CHROMIUM_TEMP/user_data/ustreamer/ \
--window-position=1440,12 \
>/dev/null 2>&1 &
sleep 5
echo Start FFMpeg---------------------------------------------
nohup /usr/bin/ffmpeg \
-loglevel level+warning \
-thread_queue_size 512 \
-framerate 30 \
-f x11grab \
-s 1920x1080 \
-probesize 42M \
-i :0.0+1024,0 \
-i http://[VLCaudioURL] \
-filter:a "volume=10, highpass=f=300, lowpass=f=2800" \
-c:v libx264 \
-pix_fmt yuv420p \
-g 60 \
-b:v 2500k \
-c:a libmp3lame \
-ar 44100 \
-b:a 32k \
-preset ultrafast \
-maxrate 5000k \
-bufsize 2500k \
-preset ultrafast \
-flvflags no_duration_filesize \
-f flv "rtmp://a.rtmp.youtube.com/live2/$KEY" \
>/home/{USER]/ff-audio.log 2>&1 &
echo Done ----------------------------------------------------
echo
```
*PS: Recipe by David Klippel*

View File

@@ -31,13 +31,13 @@ endif
# =====
$(_PLUGIN): $(_SRCS:%.c=$(_BUILD)/%.o)
$(info == SO $@)
@ $(CC) $^ -o $@ $(_LDFLAGS)
$(ECHO) $(CC) $^ -o $@ $(_LDFLAGS)
$(_BUILD)/%.o: %.c
$(info -- CC $<)
@ mkdir -p $(dir $@) || true
@ $(CC) $< -o $@ $(_CFLAGS)
$(ECHO) mkdir -p $(dir $@) || true
$(ECHO) $(CC) $< -o $@ $(_CFLAGS)

View File

@@ -38,7 +38,7 @@ us_janus_client_s *us_janus_client_init(janus_callbacks *gw, janus_plugin_sessio
atomic_init(&client->stop, false);
client->video_queue = us_queue_init(1024);
client->video_queue = us_queue_init(2048);
US_THREAD_CREATE(client->video_tid, _video_thread, client);
client->audio_queue = us_queue_init(64);

View File

@@ -1,6 +1,6 @@
.\" Manpage for ustreamer-dump.
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
.TH USTREAMER-DUMP 1 "version 5.43" "January 2021"
.TH USTREAMER-DUMP 1 "version 5.49" "January 2021"
.SH NAME
ustreamer-dump \- Dump uStreamer's memory sink to file

View File

@@ -1,6 +1,6 @@
.\" Manpage for ustreamer.
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
.TH USTREAMER 1 "version 5.43" "November 2020"
.TH USTREAMER 1 "version 5.49" "November 2020"
.SH NAME
ustreamer \- stream MJPEG video from any V4L2 device to the network
@@ -52,7 +52,7 @@ Initial image resolution. Default: 640x480.
.TP
.BR \-m\ \fIfmt ", " \-\-format\ \fIfmt
Image format.
Available: YUYV, UYVY, RGB565, RGB24, JPEG; default: YUYV.
Available: YUYV, YVYU, UYVY, RGB565, RGB24, JPEG; default: YUYV.
.TP
.BR \-a\ \fIstd ", " \-\-tv\-standard\ \fIstd
Force TV standard.
@@ -248,7 +248,7 @@ Timeout for lock. Default: 1.
H264 bitrate in Kbps. Default: 5000.
.TP
.BR \-\-h264\-gop\ \fIN
Intarval between keyframes. Default: 30.
Interval between keyframes. Default: 30.
.TP
.BR \-\-h264\-m2m\-device\ \fI/dev/path
Path to V4L2 mem-to-mem encoder device. Default: auto-select.

View File

@@ -3,7 +3,7 @@
pkgname=ustreamer
pkgver=5.43
pkgver=5.49
pkgrel=1
pkgdesc="Lightweight and fast MJPEG-HTTP streamer"
url="https://github.com/pikvm/ustreamer"

View File

@@ -6,7 +6,7 @@
include $(TOPDIR)/rules.mk
PKG_NAME:=ustreamer
PKG_VERSION:=5.43
PKG_VERSION:=5.49
PKG_RELEASE:=1
PKG_MAINTAINER:=Maxim Devaev <mdevaev@gmail.com>

View File

@@ -9,7 +9,7 @@ PY ?= python3
# =====
all:
$(info == PY_BUILD ustreamer-*.so)
@ $(PY) setup.py build
$(ECHO) $(PY) setup.py build
install:

View File

@@ -17,7 +17,7 @@ def _find_sources(suffix: str) -> list[str]:
if __name__ == "__main__":
setup(
name="ustreamer",
version="5.43",
version="5.49",
description="uStreamer tools",
author="Maxim Devaev",
author_email="mdevaev@gmail.com",

View File

@@ -42,7 +42,7 @@ endef
ifneq ($(call optbool,$(WITH_GPIO)),)
_USTR_LIBS += -lgpiod
override _CFLAGS += -DWITH_GPIO
override _CFLAGS += -DWITH_GPIO $(shell pkg-config --atleast-version=2 libgpiod 2> /dev/null && echo -DHAVE_GPIOD2)
_USTR_SRCS += $(shell ls ustreamer/gpio/*.c)
endif
@@ -86,18 +86,18 @@ install-strip: install
$(_USTR): $(_USTR_SRCS:%.c=$(_BUILD)/%.o)
$(info == LD $@)
@ $(CC) $^ -o $@ $(_LDFLAGS) $(_USTR_LIBS)
$(ECHO) $(CC) $^ -o $@ $(_LDFLAGS) $(_USTR_LIBS)
$(_DUMP): $(_DUMP_SRCS:%.c=$(_BUILD)/%.o)
$(info == LD $@)
@ $(CC) $^ -o $@ $(_LDFLAGS) $(_DUMP_LIBS)
$(ECHO) $(CC) $^ -o $@ $(_LDFLAGS) $(_DUMP_LIBS)
$(_BUILD)/%.o: %.c
$(info -- CC $<)
@ mkdir -p $(dir $@) || true
@ $(CC) $< -o $@ $(_CFLAGS)
$(ECHO) mkdir -p $(dir $@) || true
$(ECHO) $(CC) $< -o $@ $(_CFLAGS)
clean:

View File

@@ -23,7 +23,7 @@
#pragma once
#define US_VERSION_MAJOR 5
#define US_VERSION_MINOR 43
#define US_VERSION_MINOR 49
#define US_MAKE_VERSION2(_major, _minor) #_major "." #_minor
#define US_MAKE_VERSION1(_major, _minor) US_MAKE_VERSION2(_major, _minor)

View File

@@ -73,8 +73,10 @@ unsigned us_frame_get_padding(const us_frame_s *frame) {
unsigned bytes_per_pixel = 0;
switch (frame->format) {
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVYU:
case V4L2_PIX_FMT_UYVY:
case V4L2_PIX_FMT_RGB565: bytes_per_pixel = 2; break;
case V4L2_PIX_FMT_BGR24:
case V4L2_PIX_FMT_RGB24: bytes_per_pixel = 3; break;
// case V4L2_PIX_FMT_H264:
case V4L2_PIX_FMT_MJPEG:

View File

@@ -38,9 +38,11 @@ static const struct {
const unsigned format; // cppcheck-suppress unusedStructMember
} _FORMATS[] = {
{"YUYV", V4L2_PIX_FMT_YUYV},
{"YVYU", V4L2_PIX_FMT_YVYU},
{"UYVY", V4L2_PIX_FMT_UYVY},
{"RGB565", V4L2_PIX_FMT_RGB565},
{"RGB24", V4L2_PIX_FMT_RGB24},
{"BGR24", V4L2_PIX_FMT_BGR24},
{"MJPEG", V4L2_PIX_FMT_MJPEG},
{"JPEG", V4L2_PIX_FMT_JPEG},
};
@@ -54,6 +56,7 @@ static const struct {
};
static void _v4l2_buffer_copy(const struct v4l2_buffer *src, struct v4l2_buffer *dest);
static bool _device_is_buffer_valid(us_device_s *dev, const struct v4l2_buffer *buf, const uint8_t *data);
static int _device_open_check_cap(us_device_s *dev);
static int _device_open_dv_timings(us_device_s *dev);
@@ -83,6 +86,7 @@ static const char *_io_method_to_string_supported(enum v4l2_memory io_method);
#define _RUN(x_next) dev->run->x_next
#define _D_XIOCTL(...) us_xioctl(_RUN(fd), __VA_ARGS__)
#define _D_IS_MPLANE (_RUN(capture_type) == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
us_device_s *us_device_init(void) {
@@ -195,6 +199,10 @@ void us_device_close(us_device_s *dev) {
US_DELETE(HW(raw.data), free);
}
if (_D_IS_MPLANE) {
free(HW(buf.m.planes));
}
# undef HW
}
_RUN(n_bufs) = 0;
@@ -218,7 +226,7 @@ int us_device_export_to_dma(us_device_s *dev) {
for (unsigned index = 0; index < _RUN(n_bufs); ++index) {
struct v4l2_exportbuffer exp = {0};
exp.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
exp.type = _RUN(capture_type);
exp.index = index;
US_LOG_DEBUG("Exporting device buffer=%u to DMA ...", index);
@@ -245,7 +253,7 @@ int us_device_export_to_dma(us_device_s *dev) {
int us_device_switch_capturing(us_device_s *dev, bool enable) {
if (enable != _RUN(capturing)) {
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
enum v4l2_buf_type type = _RUN(capture_type);
US_LOG_DEBUG("%s device capturing ...", (enable ? "Starting" : "Stopping"));
if (_D_XIOCTL((enable ? VIDIOC_STREAMON : VIDIOC_STREAMOFF), &type) < 0) {
@@ -311,6 +319,12 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
*hw = NULL;
struct v4l2_buffer buf = {0};
struct v4l2_plane buf_planes[VIDEO_MAX_PLANES] = {0};
if (_D_IS_MPLANE) {
// Just for _v4l2_buffer_copy(), buf.length is not needed here
buf.m.planes = buf_planes;
}
bool buf_got = false;
unsigned skipped = 0;
bool broken = false;
@@ -319,8 +333,14 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
do {
struct v4l2_buffer new = {0};
new.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
struct v4l2_plane new_planes[VIDEO_MAX_PLANES] = {0};
new.type = _RUN(capture_type);
new.memory = dev->io_method;
if (_D_IS_MPLANE) {
new.length = VIDEO_MAX_PLANES;
new.m.planes = new_planes;
}
const bool new_got = (_D_XIOCTL(VIDIOC_DQBUF, &new) >= 0);
if (new_got) {
@@ -338,6 +358,10 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
}
GRABBED(new) = true;
if (_D_IS_MPLANE) {
new.bytesused = new.m.planes[0].bytesused;
}
broken = !_device_is_buffer_valid(dev, &new, FRAME_DATA(new));
if (broken) {
US_LOG_DEBUG("Releasing device buffer=%u (broken frame) ...", new.index);
@@ -362,7 +386,7 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
# undef GRABBED
# undef FRAME_DATA
memcpy(&buf, &new, sizeof(struct v4l2_buffer));
_v4l2_buffer_copy(&new, &buf);
buf_got = true;
} else {
@@ -386,8 +410,8 @@ int us_device_grab_buffer(us_device_s *dev, us_hw_buffer_s **hw) {
HW(raw.format) = _RUN(format);
HW(raw.stride) = _RUN(stride);
HW(raw.online) = true;
memcpy(&HW(buf), &buf, sizeof(struct v4l2_buffer));
HW(raw.grab_ts)= (long double)((buf.timestamp.tv_sec * (uint64_t)1000) + (buf.timestamp.tv_usec / 1000)) / 1000;
_v4l2_buffer_copy(&buf, &HW(buf));
HW(raw.grab_ts) = (long double)((buf.timestamp.tv_sec * (uint64_t)1000) + (buf.timestamp.tv_usec / 1000)) / 1000;
US_LOG_DEBUG("Grabbed new frame: buffer=%u, bytesused=%u, grab_ts=%.3Lf, latency=%.3Lf, skipped=%u",
buf.index, buf.bytesused, HW(raw.grab_ts), us_get_now_monotonic() - HW(raw.grab_ts), skipped);
# undef HW
@@ -427,6 +451,16 @@ int us_device_consume_event(us_device_s *dev) {
return 0;
}
static void _v4l2_buffer_copy(const struct v4l2_buffer *src, struct v4l2_buffer *dest) {
struct v4l2_plane *dest_planes = dest->m.planes;
memcpy(dest, src, sizeof(struct v4l2_buffer));
if (src->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE) {
assert(dest_planes);
dest->m.planes = dest_planes;
memcpy(dest->m.planes, src->m.planes, sizeof(struct v4l2_plane) * VIDEO_MAX_PLANES);
}
}
bool _device_is_buffer_valid(us_device_s *dev, const struct v4l2_buffer *buf, const uint8_t *data) {
// Workaround for broken, corrupted frames:
// Under low light conditions corrupted frames may get captured.
@@ -475,7 +509,13 @@ static int _device_open_check_cap(us_device_s *dev) {
return -1;
}
if (!(cap.capabilities & V4L2_CAP_VIDEO_CAPTURE)) {
if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
_RUN(capture_type) = V4L2_BUF_TYPE_VIDEO_CAPTURE;
US_LOG_INFO("Using capture type: single-planar");
} else if (cap.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
_RUN(capture_type) = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
US_LOG_INFO("Using capture type: multi-planar");
} else {
US_LOG_ERROR("Video capture is not supported by device");
return -1;
}
@@ -485,11 +525,13 @@ static int _device_open_check_cap(us_device_s *dev) {
return -1;
}
int input = dev->input; // Needs a pointer to int for ioctl()
US_LOG_INFO("Using input channel: %d", input);
if (_D_XIOCTL(VIDIOC_S_INPUT, &input) < 0) {
US_LOG_ERROR("Can't set input channel");
return -1;
if (!_D_IS_MPLANE) {
int input = dev->input; // Needs a pointer to int for ioctl()
US_LOG_INFO("Using input channel: %d", input);
if (_D_XIOCTL(VIDIOC_S_INPUT, &input) < 0) {
US_LOG_ERROR("Can't set input channel");
return -1;
}
}
if (dev->standard != V4L2_STD_UNKNOWN) {
@@ -567,16 +609,25 @@ static int _device_apply_dv_timings(us_device_s *dev) {
return 0;
}
static int _device_open_format(us_device_s *dev, bool first) {
static int _device_open_format(us_device_s *dev, bool first) { // FIXME
const unsigned stride = us_align_size(_RUN(width), 32) << 1;
struct v4l2_format fmt = {0};
fmt.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
fmt.fmt.pix.width = _RUN(width);
fmt.fmt.pix.height = _RUN(height);
fmt.fmt.pix.pixelformat = dev->format;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
fmt.fmt.pix.bytesperline = stride;
fmt.type = _RUN(capture_type);
if (_D_IS_MPLANE) {
fmt.fmt.pix_mp.width = _RUN(width);
fmt.fmt.pix_mp.height = _RUN(height);
fmt.fmt.pix_mp.pixelformat = dev->format;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.flags = 0;
fmt.fmt.pix_mp.num_planes = 1;
} else {
fmt.fmt.pix.width = _RUN(width);
fmt.fmt.pix.height = _RUN(height);
fmt.fmt.pix.pixelformat = dev->format;
fmt.fmt.pix.field = V4L2_FIELD_ANY;
fmt.fmt.pix.bytesperline = stride;
}
// Set format
US_LOG_DEBUG("Probing device format=%s, stride=%u, resolution=%ux%u ...",
@@ -586,13 +637,21 @@ static int _device_open_format(us_device_s *dev, bool first) {
return -1;
}
if (fmt.type != _RUN(capture_type)) {
US_LOG_ERROR("Capture format mismatch, please report to the developer");
return -1;
}
# define FMT(x_next) (_D_IS_MPLANE ? fmt.fmt.pix_mp.x_next : fmt.fmt.pix.x_next)
# define FMTS(x_next) (_D_IS_MPLANE ? fmt.fmt.pix_mp.plane_fmt[0].x_next : fmt.fmt.pix.x_next)
// Check resolution
bool retry = false;
if (fmt.fmt.pix.width != _RUN(width) || fmt.fmt.pix.height != _RUN(height)) {
if (FMT(width) != _RUN(width) || FMT(height) != _RUN(height)) {
US_LOG_ERROR("Requested resolution=%ux%u is unavailable", _RUN(width), _RUN(height));
retry = true;
}
if (_device_apply_resolution(dev, fmt.fmt.pix.width, fmt.fmt.pix.height) < 0) {
if (_device_apply_resolution(dev, FMT(width), FMT(height)) < 0) {
return -1;
}
if (first && retry) {
@@ -601,27 +660,32 @@ static int _device_open_format(us_device_s *dev, bool first) {
US_LOG_INFO("Using resolution: %ux%u", _RUN(width), _RUN(height));
// Check format
if (fmt.fmt.pix.pixelformat != dev->format) {
if (FMT(pixelformat) != dev->format) {
US_LOG_ERROR("Could not obtain the requested format=%s; driver gave us %s",
_format_to_string_supported(dev->format),
_format_to_string_supported(fmt.fmt.pix.pixelformat));
_format_to_string_supported(FMT(pixelformat)));
char *format_str;
if ((format_str = (char *)_format_to_string_nullable(fmt.fmt.pix.pixelformat)) != NULL) {
if ((format_str = (char *)_format_to_string_nullable(FMT(pixelformat))) != NULL) {
US_LOG_INFO("Falling back to format=%s", format_str);
} else {
char fourcc_str[8];
US_LOG_ERROR("Unsupported format=%s (fourcc)",
us_fourcc_to_string(fmt.fmt.pix.pixelformat, fourcc_str, 8));
us_fourcc_to_string(FMT(pixelformat), fourcc_str, 8));
return -1;
}
}
_RUN(format) = fmt.fmt.pix.pixelformat;
_RUN(format) = FMT(pixelformat);
US_LOG_INFO("Using format: %s", _format_to_string_supported(_RUN(format)));
_RUN(stride) = fmt.fmt.pix.bytesperline;
_RUN(raw_size) = fmt.fmt.pix.sizeimage; // Only for userptr
_RUN(stride) = FMTS(bytesperline);
_RUN(raw_size) = FMTS(sizeimage); // Only for userptr
# undef FMTS
# undef FMT
return 0;
}
@@ -629,7 +693,7 @@ static void _device_open_hw_fps(us_device_s *dev) {
_RUN(hw_fps) = 0;
struct v4l2_streamparm setfps = {0};
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps.type = _RUN(capture_type);
US_LOG_DEBUG("Querying HW FPS ...");
if (_D_XIOCTL(VIDIOC_G_PARM, &setfps) < 0) {
@@ -649,7 +713,7 @@ static void _device_open_hw_fps(us_device_s *dev) {
# define SETFPS_TPF(x_next) setfps.parm.capture.timeperframe.x_next
US_MEMSET_ZERO(setfps);
setfps.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
setfps.type = _RUN(capture_type);
SETFPS_TPF(numerator) = 1;
SETFPS_TPF(denominator) = (dev->desired_fps == 0 ? 255 : dev->desired_fps);
@@ -712,7 +776,7 @@ static int _device_open_io_method(us_device_s *dev) {
static int _device_open_io_method_mmap(us_device_s *dev) {
struct v4l2_requestbuffers req = {0};
req.count = dev->n_bufs;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.type = _RUN(capture_type);
req.memory = V4L2_MEMORY_MMAP;
US_LOG_DEBUG("Requesting %u device buffers for MMAP ...", req.count);
@@ -733,9 +797,14 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
US_CALLOC(_RUN(hw_bufs), req.count);
for (_RUN(n_bufs) = 0; _RUN(n_bufs) < req.count; ++_RUN(n_bufs)) {
struct v4l2_buffer buf = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
buf.type = _RUN(capture_type);
buf.memory = V4L2_MEMORY_MMAP;
buf.index = _RUN(n_bufs);
if (_D_IS_MPLANE) {
buf.m.planes = planes;
buf.length = VIDEO_MAX_PLANES;
}
US_LOG_DEBUG("Calling us_xioctl(VIDIOC_QUERYBUF) for device buffer=%u ...", _RUN(n_bufs));
if (_D_XIOCTL(VIDIOC_QUERYBUF, &buf) < 0) {
@@ -747,20 +816,28 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
HW(dma_fd) = -1;
const size_t buf_size = (_D_IS_MPLANE ? buf.m.planes[0].length : buf.length);
const off_t buf_offset = (_D_IS_MPLANE ? buf.m.planes[0].m.mem_offset : buf.m.offset);
US_LOG_DEBUG("Mapping device buffer=%u ...", _RUN(n_bufs));
if ((HW(raw.data) = mmap(
NULL,
buf.length,
buf_size,
PROT_READ | PROT_WRITE,
MAP_SHARED,
_RUN(fd),
buf.m.offset
buf_offset
)) == MAP_FAILED) {
US_LOG_PERROR("Can't map device buffer=%u", _RUN(n_bufs));
return -1;
}
assert(HW(raw.data) != NULL);
HW(raw.allocated) = buf.length;
HW(raw.allocated) = buf_size;
if (_D_IS_MPLANE) {
US_CALLOC(HW(buf.m.planes), VIDEO_MAX_PLANES);
}
# undef HW
}
@@ -770,7 +847,7 @@ static int _device_open_io_method_mmap(us_device_s *dev) {
static int _device_open_io_method_userptr(us_device_s *dev) {
struct v4l2_requestbuffers req = {0};
req.count = dev->n_bufs;
req.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
req.type = _RUN(capture_type);
req.memory = V4L2_MEMORY_USERPTR;
US_LOG_DEBUG("Requesting %u device buffers for USERPTR ...", req.count);
@@ -798,6 +875,9 @@ static int _device_open_io_method_userptr(us_device_s *dev) {
assert((HW(raw.data) = aligned_alloc(page_size, buf_size)) != NULL);
memset(HW(raw.data), 0, buf_size);
HW(raw.allocated) = buf_size;
if (_D_IS_MPLANE) {
US_CALLOC(HW(buf.m.planes), VIDEO_MAX_PLANES);
}
# undef HW
}
return 0;
@@ -806,10 +886,18 @@ static int _device_open_io_method_userptr(us_device_s *dev) {
static int _device_open_queue_buffers(us_device_s *dev) {
for (unsigned index = 0; index < _RUN(n_bufs); ++index) {
struct v4l2_buffer buf = {0};
buf.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
struct v4l2_plane planes[VIDEO_MAX_PLANES] = {0};
buf.type = _RUN(capture_type);
buf.memory = dev->io_method;
buf.index = index;
if (_D_IS_MPLANE) {
buf.m.planes = planes;
buf.length = 1;
}
if (dev->io_method == V4L2_MEMORY_USERPTR) {
// I am not sure, may be this is incorrect for mplane device,
// but i don't have one which supports V4L2_MEMORY_USERPTR
buf.m.userptr = (unsigned long)_RUN(hw_bufs)[index].raw.data;
buf.length = _RUN(hw_bufs)[index].raw.allocated;
}

View File

@@ -61,7 +61,7 @@
#define US_STANDARDS_STR "PAL, NTSC, SECAM"
#define US_FORMAT_UNKNOWN -1
#define US_FORMATS_STR "YUYV, UYVY, RGB565, RGB24, MJPEG, JPEG"
#define US_FORMATS_STR "YUYV, YVYU, UYVY, RGB565, RGB24, BGR24, MJPEG, JPEG"
#define US_IO_METHOD_UNKNOWN -1
#define US_IO_METHODS_STR "MMAP, USERPTR"
@@ -75,18 +75,19 @@ typedef struct {
} us_hw_buffer_s;
typedef struct {
int fd;
unsigned width;
unsigned height;
unsigned format;
unsigned stride;
unsigned hw_fps;
unsigned jpeg_quality;
size_t raw_size;
unsigned n_bufs;
us_hw_buffer_s *hw_bufs;
bool capturing;
bool persistent_timeout_reported;
int fd;
unsigned width;
unsigned height;
unsigned format;
unsigned stride;
unsigned hw_fps;
unsigned jpeg_quality;
size_t raw_size;
unsigned n_bufs;
us_hw_buffer_s *hw_bufs;
enum v4l2_buf_type capture_type;
bool capturing;
bool persistent_timeout_reported;
} us_device_runtime_s;
typedef enum {
@@ -132,9 +133,7 @@ typedef struct {
size_t min_frame_size;
bool persistent;
unsigned timeout;
us_controls_s ctl;
us_device_runtime_s *run;
} us_device_s;

View File

@@ -37,10 +37,10 @@ typedef struct {
static void _jpeg_set_dest_frame(j_compress_ptr jpeg, us_frame_s *frame);
static void _jpeg_write_scanlines_yuyv(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_uyvy(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_yuv(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_rgb24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_write_scanlines_bgr24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame);
static void _jpeg_init_destination(j_compress_ptr jpeg);
static boolean _jpeg_empty_output_buffer(j_compress_ptr jpeg);
@@ -63,27 +63,24 @@ void us_cpu_encoder_compress(const us_frame_s *src, us_frame_s *dest, unsigned q
jpeg.image_width = src->width;
jpeg.image_height = src->height;
jpeg.input_components = 3;
jpeg.in_color_space = JCS_RGB;
jpeg.in_color_space = ((src->format == V4L2_PIX_FMT_YUYV || src->format == V4L2_PIX_FMT_UYVY) ? JCS_YCbCr : JCS_RGB);
jpeg_set_defaults(&jpeg);
jpeg_set_quality(&jpeg, quality, TRUE);
jpeg_start_compress(&jpeg, TRUE);
# define WRITE_SCANLINES(x_format, x_func) \
case x_format: { x_func(&jpeg, src); break; }
switch (src->format) {
// https://www.fourcc.org/yuv.php
WRITE_SCANLINES(V4L2_PIX_FMT_YUYV, _jpeg_write_scanlines_yuyv);
WRITE_SCANLINES(V4L2_PIX_FMT_UYVY, _jpeg_write_scanlines_uyvy);
WRITE_SCANLINES(V4L2_PIX_FMT_RGB565, _jpeg_write_scanlines_rgb565);
WRITE_SCANLINES(V4L2_PIX_FMT_RGB24, _jpeg_write_scanlines_rgb24);
default: assert(0 && "Unsupported input format for CPU encoder");
case V4L2_PIX_FMT_YUYV:
case V4L2_PIX_FMT_YVYU:
case V4L2_PIX_FMT_UYVY: _jpeg_write_scanlines_yuv(&jpeg, src); break;
case V4L2_PIX_FMT_RGB565: _jpeg_write_scanlines_rgb565(&jpeg, src); break;
case V4L2_PIX_FMT_RGB24: _jpeg_write_scanlines_rgb24(&jpeg, src); break;
case V4L2_PIX_FMT_BGR24: _jpeg_write_scanlines_bgr24(&jpeg, src); break;
default: assert(0 && "Unsupported input format for CPU encoder"); return;
}
# undef WRITE_SCANLINES
jpeg_finish_compress(&jpeg);
jpeg_destroy_compress(&jpeg);
@@ -106,39 +103,43 @@ static void _jpeg_set_dest_frame(j_compress_ptr jpeg, us_frame_s *frame) {
frame->used = 0;
}
#define YUV_R(_y, _, _v) (((_y) + (359 * (_v))) >> 8)
#define YUV_G(_y, _u, _v) (((_y) - (88 * (_u)) - (183 * (_v))) >> 8)
#define YUV_B(_y, _u, _) (((_y) + (454 * (_u))) >> 8)
#define NORM_COMPONENT(_x) (((_x) > 255) ? 255 : (((_x) < 0) ? 0 : (_x)))
static void _jpeg_write_scanlines_yuyv(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
static void _jpeg_write_scanlines_yuv(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
uint8_t *line_buf;
US_CALLOC(line_buf, frame->width * 3);
const unsigned padding = us_frame_get_padding(frame);
const uint8_t *data = frame->data;
unsigned z = 0;
while (jpeg->next_scanline < frame->height) {
uint8_t *ptr = line_buf;
for (unsigned x = 0; x < frame->width; ++x) {
const int y = (!z ? data[0] << 8 : data[2] << 8);
const int u = data[1] - 128;
const int v = data[3] - 128;
const int r = YUV_R(y, u, v);
const int g = YUV_G(y, u, v);
const int b = YUV_B(y, u, v);
*(ptr++) = NORM_COMPONENT(r);
*(ptr++) = NORM_COMPONENT(g);
*(ptr++) = NORM_COMPONENT(b);
if (z++) {
z = 0;
data += 4;
// See also: https://www.kernel.org/doc/html/v4.8/media/uapi/v4l/pixfmt-uyvy.html
const bool is_odd_pixel = x & 1;
uint8_t y, u, v;
if (frame->format == V4L2_PIX_FMT_YUYV) {
y = data[is_odd_pixel ? 2 : 0];
u = data[1];
v = data[3];
} else if (frame->format == V4L2_PIX_FMT_YVYU) {
y = data[is_odd_pixel ? 2 : 0];
u = data[3];
v = data[1];
} else if (frame->format == V4L2_PIX_FMT_UYVY) {
y = data[is_odd_pixel ? 3 : 1];
u = data[0];
v = data[2];
} else {
assert(0 && "Unsupported pixel format");
return; // Makes linter happy
}
ptr[0] = y;
ptr[1] = u;
ptr[2] = v;
ptr += 3;
data += (is_odd_pixel ? 4 : 0);
}
data += padding;
@@ -149,49 +150,6 @@ static void _jpeg_write_scanlines_yuyv(struct jpeg_compress_struct *jpeg, const
free(line_buf);
}
static void _jpeg_write_scanlines_uyvy(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
uint8_t *line_buf;
US_CALLOC(line_buf, frame->width * 3);
const unsigned padding = us_frame_get_padding(frame);
const uint8_t *data = frame->data;
unsigned z = 0;
while (jpeg->next_scanline < frame->height) {
uint8_t *ptr = line_buf;
for (unsigned x = 0; x < frame->width; ++x) {
const int y = (!z ? data[1] << 8 : data[3] << 8);
const int u = data[0] - 128;
const int v = data[2] - 128;
const int r = YUV_R(y, u, v);
const int g = YUV_G(y, u, v);
const int b = YUV_B(y, u, v);
*(ptr++) = NORM_COMPONENT(r);
*(ptr++) = NORM_COMPONENT(g);
*(ptr++) = NORM_COMPONENT(b);
if (z++) {
z = 0;
data += 4;
}
}
data += padding;
JSAMPROW scanlines[1] = {line_buf};
jpeg_write_scanlines(jpeg, scanlines, 1);
}
free(line_buf);
}
#undef NORM_COMPONENT
#undef YUV_B
#undef YUV_G
#undef YUV_R
static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
uint8_t *line_buf;
US_CALLOC(line_buf, frame->width * 3);
@@ -205,9 +163,10 @@ static void _jpeg_write_scanlines_rgb565(struct jpeg_compress_struct *jpeg, cons
for (unsigned x = 0; x < frame->width; ++x) {
const unsigned int two_byte = (data[1] << 8) + data[0];
*(ptr++) = data[1] & 248; // Red
*(ptr++) = (uint8_t)((two_byte & 2016) >> 3); // Green
*(ptr++) = (data[0] & 31) * 8; // Blue
ptr[0] = data[1] & 248; // Red
ptr[1] = (uint8_t)((two_byte & 2016) >> 3); // Green
ptr[2] = (data[0] & 31) * 8; // Blue
ptr += 3;
data += 2;
}
@@ -232,6 +191,33 @@ static void _jpeg_write_scanlines_rgb24(struct jpeg_compress_struct *jpeg, const
}
}
static void _jpeg_write_scanlines_bgr24(struct jpeg_compress_struct *jpeg, const us_frame_s *frame) {
uint8_t *line_buf;
US_CALLOC(line_buf, frame->width * 3);
const unsigned padding = us_frame_get_padding(frame);
uint8_t *data = frame->data;
while (jpeg->next_scanline < frame->height) {
uint8_t *ptr = line_buf;
// swap B and R values
for (unsigned x = 0; x < frame->width * 3; x += 3) {
ptr[0] = data[x + 2];
ptr[1] = data[x + 1];
ptr[2] = data[x];
ptr += 3;
}
JSAMPROW scanlines[1] = {line_buf};
jpeg_write_scanlines(jpeg, scanlines, 1);
data += (frame->width * 3) + padding;
}
free(line_buf);
}
#define JPEG_OUTPUT_BUFFER_SIZE ((size_t)4096)
static void _jpeg_init_destination(j_compress_ptr jpeg) {

View File

@@ -34,24 +34,26 @@ us_gpio_s us_g_gpio = {
.line = NULL, \
.state = false \
}
.prog_running = MAKE_OUTPUT("prog-running"),
.stream_online = MAKE_OUTPUT("stream-online"),
.has_http_clients = MAKE_OUTPUT("has-http-clients"),
# undef MAKE_OUTPUT
// mutex uninitialized
.chip = NULL
# ifndef HAVE_GPIOD2
.chip = NULL,
# endif
.initialized = false,
};
static void _gpio_output_init(us_gpio_output_s *output);
static void _gpio_output_init(us_gpio_output_s *output, struct gpiod_chip *chip);
static void _gpio_output_destroy(us_gpio_output_s *output);
void us_gpio_init(void) {
# ifndef HAVE_GPIOD2
assert(us_g_gpio.chip == NULL);
# endif
if (
us_g_gpio.prog_running.pin >= 0
|| us_g_gpio.stream_online.pin >= 0
@@ -59,10 +61,17 @@ void us_gpio_init(void) {
) {
US_MUTEX_INIT(us_g_gpio.mutex);
US_LOG_INFO("GPIO: Using chip device: %s", us_g_gpio.path);
if ((us_g_gpio.chip = gpiod_chip_open(us_g_gpio.path)) != NULL) {
_gpio_output_init(&us_g_gpio.prog_running);
_gpio_output_init(&us_g_gpio.stream_online);
_gpio_output_init(&us_g_gpio.has_http_clients);
struct gpiod_chip *chip;
if ((chip = gpiod_chip_open(us_g_gpio.path)) != NULL) {
_gpio_output_init(&us_g_gpio.prog_running, chip);
_gpio_output_init(&us_g_gpio.stream_online, chip);
_gpio_output_init(&us_g_gpio.has_http_clients, chip);
# ifdef HAVE_GPIOD2
gpiod_chip_close(chip);
# else
us_g_gpio.chip = chip;
# endif
us_g_gpio.initialized = true;
} else {
US_LOG_PERROR("GPIO: Can't initialize chip device %s", us_g_gpio.path);
}
@@ -73,23 +82,32 @@ void us_gpio_destroy(void) {
_gpio_output_destroy(&us_g_gpio.prog_running);
_gpio_output_destroy(&us_g_gpio.stream_online);
_gpio_output_destroy(&us_g_gpio.has_http_clients);
if (us_g_gpio.chip != NULL) {
if (us_g_gpio.initialized) {
# ifndef HAVE_GPIOD2
gpiod_chip_close(us_g_gpio.chip);
us_g_gpio.chip = NULL;
# endif
US_MUTEX_DESTROY(us_g_gpio.mutex);
us_g_gpio.initialized = false;
}
}
int us_gpio_inner_set(us_gpio_output_s *output, bool state) {
int retval = 0;
# ifndef HAVE_GPIOD2
assert(us_g_gpio.chip != NULL);
# endif
assert(output->line != NULL);
assert(output->state != state); // Must be checked in macro for the performance
US_MUTEX_LOCK(us_g_gpio.mutex);
if (gpiod_line_set_value(output->line, (int)state) < 0) { \
US_LOG_PERROR("GPIO: Can't write value %d to line %s (will be disabled)", state, output->consumer); \
# ifdef HAVE_GPIOD2
if (gpiod_line_request_set_value(output->line, output->pin, state) < 0) {
# else
if (gpiod_line_set_value(output->line, (int)state) < 0) {
# endif
US_LOG_PERROR("GPIO: Can't write value %d to line %s", state, output->consumer); \
_gpio_output_destroy(output);
retval = -1;
}
@@ -98,14 +116,42 @@ int us_gpio_inner_set(us_gpio_output_s *output, bool state) {
return retval;
}
static void _gpio_output_init(us_gpio_output_s *output) {
assert(us_g_gpio.chip != NULL);
static void _gpio_output_init(us_gpio_output_s *output, struct gpiod_chip *chip) {
assert(output->line == NULL);
US_ASPRINTF(output->consumer, "%s::%s", us_g_gpio.consumer_prefix, output->role);
if (output->pin >= 0) {
if ((output->line = gpiod_chip_get_line(us_g_gpio.chip, output->pin)) != NULL) {
# ifdef HAVE_GPIOD2
struct gpiod_line_settings *line_settings;
assert(line_settings = gpiod_line_settings_new());
assert(!gpiod_line_settings_set_direction(line_settings, GPIOD_LINE_DIRECTION_OUTPUT));
assert(!gpiod_line_settings_set_output_value(line_settings, false));
struct gpiod_line_config *line_config;
assert(line_config = gpiod_line_config_new());
const unsigned offset = output->pin;
assert(!gpiod_line_config_add_line_settings(line_config, &offset, 1, line_settings));
struct gpiod_request_config *request_config;
assert(request_config = gpiod_request_config_new());
gpiod_request_config_set_consumer(request_config, output->consumer);
if ((output->line = gpiod_chip_request_lines(chip, request_config, line_config)) == NULL) {
US_LOG_PERROR("GPIO: Can't request pin=%d as %s", output->pin, output->consumer);
}
gpiod_request_config_free(request_config);
gpiod_line_config_free(line_config);
gpiod_line_settings_free(line_settings);
if (output->line == NULL) {
_gpio_output_destroy(output);
}
# else
if ((output->line = gpiod_chip_get_line(chip, output->pin)) != NULL) {
if (gpiod_line_request_output(output->line, output->consumer, 0) < 0) {
US_LOG_PERROR("GPIO: Can't request pin=%d as %s", output->pin, output->consumer);
_gpio_output_destroy(output);
@@ -113,12 +159,17 @@ static void _gpio_output_init(us_gpio_output_s *output) {
} else {
US_LOG_PERROR("GPIO: Can't get pin=%d as %s", output->pin, output->consumer);
}
# endif
}
}
static void _gpio_output_destroy(us_gpio_output_s *output) {
if (output->line != NULL) {
# ifdef HAVE_GPIOD2
gpiod_line_request_release(output->line);
# else
gpiod_line_release(output->line);
# endif
output->line = NULL;
}
if (output->consumer != NULL) {

View File

@@ -36,11 +36,15 @@
typedef struct {
int pin;
const char *role;
char *consumer;
struct gpiod_line *line;
bool state;
int pin;
const char *role;
char *consumer;
# ifdef HAVE_GPIOD2
struct gpiod_line_request *line;
# else
struct gpiod_line *line;
# endif
bool state;
} us_gpio_output_s;
typedef struct {
@@ -52,7 +56,11 @@ typedef struct {
us_gpio_output_s has_http_clients;
pthread_mutex_t mutex;
# ifndef HAVE_GPIOD2
struct gpiod_chip *chip;
# endif
bool initialized;
} us_gpio_s;

View File

@@ -696,7 +696,7 @@ static void _help(FILE *fp, const us_device_s *dev, const us_encoder_s *enc, con
ADD_SINK("RAW", "raw-")
ADD_SINK("H264", "h264-")
SAY(" --h264-bitrate <kbps> ───────── H264 bitrate in Kbps. Default: %u.\n", stream->h264_bitrate);
SAY(" --h264-gop <N> ──────────────── Intarval between keyframes. Default: %u.\n", stream->h264_gop);
SAY(" --h264-gop <N> ──────────────── Interval between keyframes. Default: %u.\n", stream->h264_gop);
SAY(" --h264-m2m-device </dev/path> ─ Path to V4L2 M2M encoder device. Default: auto select.\n");
# undef ADD_SINK
# ifdef WITH_GPIO

View File

@@ -127,7 +127,6 @@ void us_stream_loop(us_stream_s *stream) {
unsigned slc = 0;
for (; slc < 10 && !atomic_load(&_RUN(stop)) && !us_stream_has_clients(stream); ++slc) {
usleep(100000);
++slc;
}
h264_force_key = (slc == 10);
}