Compare commits

...

6 Commits
v6.8 ... v6.9

Author SHA1 Message Date
Maxim Devaev
d64077c2d5 Bump version: 6.8 → 6.9 2024-03-27 21:39:03 +02:00
Maxim Devaev
83f12baa61 refactoring 2024-03-27 19:27:28 +02:00
Maxim Devaev
b6fac2608d ustreamer-v4p: bring back busy message 2024-03-27 19:22:21 +02:00
Maxim Devaev
e6ebc12505 replaced comment 2024-03-27 02:14:36 +02:00
Maxim Devaev
8c92ab6f47 ustreamer: blank drm output by timeout 2024-03-26 22:20:08 +02:00
Maxim Devaev
7dc492d875 refactoring 2024-03-26 21:51:47 +02:00
15 changed files with 323 additions and 307 deletions

View File

@@ -1,7 +1,7 @@
[bumpversion]
commit = True
tag = True
current_version = 6.8
current_version = 6.9
parse = (?P<major>\d+)\.(?P<minor>\d+)
serialize =
{major}.{minor}

View File

@@ -1,6 +1,6 @@
.\" Manpage for ustreamer-dump.
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
.TH USTREAMER-DUMP 1 "version 6.8" "January 2021"
.TH USTREAMER-DUMP 1 "version 6.9" "January 2021"
.SH NAME
ustreamer-dump \- Dump uStreamer's memory sink to file

View File

@@ -1,6 +1,6 @@
.\" Manpage for ustreamer.
.\" Open an issue or pull request to https://github.com/pikvm/ustreamer to correct errors or typos
.TH USTREAMER 1 "version 6.8" "November 2020"
.TH USTREAMER 1 "version 6.9" "November 2020"
.SH NAME
ustreamer \- stream MJPEG video from any V4L2 device to the network

View File

@@ -3,7 +3,7 @@
pkgname=ustreamer
pkgver=6.8
pkgver=6.9
pkgrel=1
pkgdesc="Lightweight and fast MJPEG-HTTP streamer"
url="https://github.com/pikvm/ustreamer"

View File

@@ -6,7 +6,7 @@
include $(TOPDIR)/rules.mk
PKG_NAME:=ustreamer
PKG_VERSION:=6.8
PKG_VERSION:=6.9
PKG_RELEASE:=1
PKG_MAINTAINER:=Maxim Devaev <mdevaev@gmail.com>

View File

@@ -17,7 +17,7 @@ def _find_sources(suffix: str) -> list[str]:
if __name__ == "__main__":
setup(
name="ustreamer",
version="6.8",
version="6.9",
description="uStreamer tools",
author="Maxim Devaev",
author_email="mdevaev@gmail.com",

View File

@@ -111,11 +111,11 @@ static const char *_standard_to_string(v4l2_std_id standard);
static const char *_io_method_to_string_supported(enum v4l2_memory io_method);
#define _D_LOG_ERROR(x_msg, ...) US_LOG_ERROR("CAP: " x_msg, ##__VA_ARGS__)
#define _D_LOG_PERROR(x_msg, ...) US_LOG_PERROR("CAP: " x_msg, ##__VA_ARGS__)
#define _D_LOG_INFO(x_msg, ...) US_LOG_INFO("CAP: " x_msg, ##__VA_ARGS__)
#define _D_LOG_VERBOSE(x_msg, ...) US_LOG_VERBOSE("CAP: " x_msg, ##__VA_ARGS__)
#define _D_LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("CAP: " x_msg, ##__VA_ARGS__)
#define _LOG_ERROR(x_msg, ...) US_LOG_ERROR("CAP: " x_msg, ##__VA_ARGS__)
#define _LOG_PERROR(x_msg, ...) US_LOG_PERROR("CAP: " x_msg, ##__VA_ARGS__)
#define _LOG_INFO(x_msg, ...) US_LOG_INFO("CAP: " x_msg, ##__VA_ARGS__)
#define _LOG_VERBOSE(x_msg, ...) US_LOG_VERBOSE("CAP: " x_msg, ##__VA_ARGS__)
#define _LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("CAP: " x_msg, ##__VA_ARGS__)
us_capture_s *us_capture_init(void) {
@@ -182,20 +182,20 @@ int us_capture_open(us_capture_s *cap) {
goto tmp_error;
}
_D_LOG_DEBUG("Opening capture device ...");
_LOG_DEBUG("Opening capture device ...");
if ((run->fd = open(cap->path, O_RDWR | O_NONBLOCK)) < 0) {
_D_LOG_PERROR("Can't capture open device");
_LOG_PERROR("Can't capture open device");
goto error;
}
_D_LOG_DEBUG("Capture device fd=%d opened", run->fd);
_LOG_DEBUG("Capture device fd=%d opened", run->fd);
if (cap->dv_timings && cap->persistent) {
_D_LOG_DEBUG("Probing DV-timings or QuerySTD ...");
_LOG_DEBUG("Probing DV-timings or QuerySTD ...");
if (_capture_open_dv_timings(cap, false) < 0) {
const int line = __LINE__;
if (run->open_error_reported != line) {
run->open_error_reported = line;
_D_LOG_ERROR("No signal from source");
_LOG_ERROR("No signal from source");
}
goto tmp_error;
}
@@ -232,13 +232,13 @@ int us_capture_open(us_capture_s *cap) {
enum v4l2_buf_type type = run->capture_type;
if (us_xioctl(run->fd, VIDIOC_STREAMON, &type) < 0) {
_D_LOG_PERROR("Can't start capturing");
_LOG_PERROR("Can't start capturing");
goto error;
}
run->streamon = true;
run->open_error_reported = 0;
_D_LOG_INFO("Capturing started");
_LOG_INFO("Capturing started");
return 0;
tmp_error:
@@ -258,17 +258,17 @@ void us_capture_close(us_capture_s *cap) {
if (run->streamon) {
say = true;
_D_LOG_DEBUG("Calling VIDIOC_STREAMOFF ...");
_LOG_DEBUG("Calling VIDIOC_STREAMOFF ...");
enum v4l2_buf_type type = run->capture_type;
if (us_xioctl(run->fd, VIDIOC_STREAMOFF, &type) < 0) {
_D_LOG_PERROR("Can't stop capturing");
_LOG_PERROR("Can't stop capturing");
}
run->streamon = false;
}
if (run->bufs != NULL) {
say = true;
_D_LOG_DEBUG("Releasing HW buffers ...");
_LOG_DEBUG("Releasing HW buffers ...");
for (uint index = 0; index < run->n_bufs; ++index) {
us_capture_hwbuf_s *hw = &run->bufs[index];
@@ -277,7 +277,7 @@ void us_capture_close(us_capture_s *cap) {
if (cap->io_method == V4L2_MEMORY_MMAP) {
if (hw->raw.allocated > 0 && hw->raw.data != NULL) {
if (munmap(hw->raw.data, hw->raw.allocated) < 0) {
_D_LOG_PERROR("Can't unmap HW buffer=%u", index);
_LOG_PERROR("Can't unmap HW buffer=%u", index);
}
}
} else { // V4L2_MEMORY_USERPTR
@@ -295,11 +295,11 @@ void us_capture_close(us_capture_s *cap) {
US_CLOSE_FD(run->fd);
if (say) {
_D_LOG_INFO("Capturing stopped");
_LOG_INFO("Capturing stopped");
}
}
int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw) {
int us_capture_hwbuf_grab(us_capture_s *cap, us_capture_hwbuf_s **hw) {
// Это сложная функция, которая делает сразу много всего, чтобы получить новый фрейм.
// - Вызывается _capture_wait_buffer() с select() внутри, чтобы подождать новый фрейм
// или эвент V4L2. Обработка эвентов более приоритетна, чем кадров.
@@ -327,7 +327,7 @@ int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw) {
uint skipped = 0;
bool broken = false;
_D_LOG_DEBUG("Grabbing hw buffer ...");
_LOG_DEBUG("Grabbing hw buffer ...");
do {
struct v4l2_buffer new = {0};
@@ -343,7 +343,7 @@ int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw) {
if (new_got) {
if (new.index >= run->n_bufs) {
_D_LOG_ERROR("V4L2 error: grabbed invalid HW buffer=%u, n_bufs=%u", new.index, run->n_bufs);
_LOG_ERROR("V4L2 error: grabbed invalid HW buffer=%u, n_bufs=%u", new.index, run->n_bufs);
return -1;
}
@@ -351,7 +351,7 @@ int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw) {
# define FRAME_DATA(x_buf) run->bufs[x_buf.index].raw.data
if (GRABBED(new)) {
_D_LOG_ERROR("V4L2 error: grabbed HW buffer=%u is already used", new.index);
_LOG_ERROR("V4L2 error: grabbed HW buffer=%u is already used", new.index);
return -1;
}
GRABBED(new) = true;
@@ -362,9 +362,9 @@ int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw) {
broken = !_capture_is_buffer_valid(cap, &new, FRAME_DATA(new));
if (broken) {
_D_LOG_DEBUG("Releasing HW buffer=%u (broken frame) ...", new.index);
_LOG_DEBUG("Releasing HW buffer=%u (broken frame) ...", new.index);
if (us_xioctl(run->fd, VIDIOC_QBUF, &new) < 0) {
_D_LOG_PERROR("Can't release HW buffer=%u (broken frame)", new.index);
_LOG_PERROR("Can't release HW buffer=%u (broken frame)", new.index);
return -1;
}
GRABBED(new) = false;
@@ -373,7 +373,7 @@ int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw) {
if (buf_got) {
if (us_xioctl(run->fd, VIDIOC_QBUF, &buf) < 0) {
_D_LOG_PERROR("Can't release HW buffer=%u (skipped frame)", buf.index);
_LOG_PERROR("Can't release HW buffer=%u (skipped frame)", buf.index);
return -1;
}
GRABBED(buf) = false;
@@ -395,7 +395,7 @@ int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw) {
return -2; // If we have only broken frames on this capture session
}
}
_D_LOG_PERROR("Can't grab HW buffer");
_LOG_PERROR("Can't grab HW buffer");
return -1;
}
} while (true);
@@ -412,29 +412,29 @@ int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw) {
_v4l2_buffer_copy(&buf, &(*hw)->buf);
(*hw)->raw.grab_ts = (ldf)((buf.timestamp.tv_sec * (u64)1000) + (buf.timestamp.tv_usec / 1000)) / 1000;
_D_LOG_DEBUG("Grabbed HW buffer=%u: bytesused=%u, grab_ts=%.3Lf, latency=%.3Lf, skipped=%u",
_LOG_DEBUG("Grabbed HW buffer=%u: bytesused=%u, grab_ts=%.3Lf, latency=%.3Lf, skipped=%u",
buf.index, buf.bytesused, (*hw)->raw.grab_ts, us_get_now_monotonic() - (*hw)->raw.grab_ts, skipped);
return buf.index;
}
int us_capture_release_buffer(us_capture_s *cap, us_capture_hwbuf_s *hw) {
int us_capture_hwbuf_release(us_capture_s *cap, us_capture_hwbuf_s *hw) {
assert(atomic_load(&hw->refs) == 0);
const uint index = hw->buf.index;
_D_LOG_DEBUG("Releasing HW buffer=%u ...", index);
_LOG_DEBUG("Releasing HW buffer=%u ...", index);
if (us_xioctl(cap->run->fd, VIDIOC_QBUF, &hw->buf) < 0) {
_D_LOG_PERROR("Can't release HW buffer=%u", index);
_LOG_PERROR("Can't release HW buffer=%u", index);
return -1;
}
hw->grabbed = false;
_D_LOG_DEBUG("HW buffer=%u released", index);
_LOG_DEBUG("HW buffer=%u released", index);
return 0;
}
void us_capture_buffer_incref(us_capture_hwbuf_s *hw) {
void us_capture_hwbuf_incref(us_capture_hwbuf_s *hw) {
atomic_fetch_add(&hw->refs, 1);
}
void us_capture_buffer_decref(us_capture_hwbuf_s *hw) {
void us_capture_hwbuf_decref(us_capture_hwbuf_s *hw) {
atomic_fetch_sub(&hw->refs, 1);
}
@@ -455,7 +455,7 @@ int _capture_wait_buffer(us_capture_s *cap) {
timeout.tv_sec = cap->timeout;
timeout.tv_usec = 0;
_D_LOG_DEBUG("Calling select() on video device ...");
_LOG_DEBUG("Calling select() on video device ...");
bool has_read = false;
bool has_error = false;
@@ -464,15 +464,15 @@ int _capture_wait_buffer(us_capture_s *cap) {
has_read = FD_ISSET(run->fd, &read_fds);
has_error = FD_ISSET(run->fd, &error_fds);
}
_D_LOG_DEBUG("Device select() --> %d; has_read=%d, has_error=%d", selected, has_read, has_error);
_LOG_DEBUG("Device select() --> %d; has_read=%d, has_error=%d", selected, has_read, has_error);
if (selected < 0) {
if (errno != EINTR) {
_D_LOG_PERROR("Device select() error");
_LOG_PERROR("Device select() error");
}
return -1;
} else if (selected == 0) {
_D_LOG_ERROR("Device select() timeout");
_LOG_ERROR("Device select() timeout");
return -1;
} else {
if (has_error && _capture_consume_event(cap) < 0) {
@@ -485,15 +485,15 @@ int _capture_wait_buffer(us_capture_s *cap) {
static int _capture_consume_event(us_capture_s *cap) {
struct v4l2_event event;
if (us_xioctl(cap->run->fd, VIDIOC_DQEVENT, &event) < 0) {
_D_LOG_PERROR("Can't consume V4L2 event");
_LOG_PERROR("Can't consume V4L2 event");
return -1;
}
switch (event.type) {
case V4L2_EVENT_SOURCE_CHANGE:
_D_LOG_INFO("Got V4L2_EVENT_SOURCE_CHANGE: Source changed");
_LOG_INFO("Got V4L2_EVENT_SOURCE_CHANGE: Source changed");
return -1;
case V4L2_EVENT_EOS:
_D_LOG_INFO("Got V4L2_EVENT_EOS: End of stream");
_LOG_INFO("Got V4L2_EVENT_EOS: End of stream");
return -1;
}
return 0;
@@ -516,7 +516,7 @@ bool _capture_is_buffer_valid(us_capture_s *cap, const struct v4l2_buffer *buf,
// For example a VGA (640x480) webcam frame is normally >= 8kByte large,
// corrupted frames are smaller.
if (buf->bytesused < cap->min_frame_size) {
_D_LOG_DEBUG("Dropped too small frame, assuming it was broken: buffer=%u, bytesused=%u",
_LOG_DEBUG("Dropped too small frame, assuming it was broken: buffer=%u, bytesused=%u",
buf->index, buf->bytesused);
return false;
}
@@ -532,7 +532,7 @@ bool _capture_is_buffer_valid(us_capture_s *cap, const struct v4l2_buffer *buf,
if (us_is_jpeg(cap->run->format)) {
if (buf->bytesused < 125) {
// https://stackoverflow.com/questions/2253404/what-is-the-smallest-valid-jpeg-file-size-in-bytes
_D_LOG_DEBUG("Discarding invalid frame, too small to be a valid JPEG: bytesused=%u", buf->bytesused);
_LOG_DEBUG("Discarding invalid frame, too small to be a valid JPEG: bytesused=%u", buf->bytesused);
return false;
}
@@ -540,7 +540,7 @@ bool _capture_is_buffer_valid(us_capture_s *cap, const struct v4l2_buffer *buf,
const u8 *const eoi_ptr = end_ptr - 2;
const u16 eoi_marker = (((u16)(eoi_ptr[0]) << 8) | eoi_ptr[1]);
if (eoi_marker != 0xFFD9 && eoi_marker != 0xD900 && eoi_marker != 0x0000) {
_D_LOG_DEBUG("Discarding truncated JPEG frame: eoi_marker=0x%04x, bytesused=%u", eoi_marker, buf->bytesused);
_LOG_DEBUG("Discarding truncated JPEG frame: eoi_marker=0x%04x, bytesused=%u", eoi_marker, buf->bytesused);
return false;
}
}
@@ -552,47 +552,47 @@ static int _capture_open_check_cap(us_capture_s *cap) {
us_capture_runtime_s *const run = cap->run;
struct v4l2_capability cpb = {0};
_D_LOG_DEBUG("Querying device capabilities ...");
_LOG_DEBUG("Querying device capabilities ...");
if (us_xioctl(run->fd, VIDIOC_QUERYCAP, &cpb) < 0) {
_D_LOG_PERROR("Can't query device capabilities");
_LOG_PERROR("Can't query device capabilities");
return -1;
}
if (cpb.capabilities & V4L2_CAP_VIDEO_CAPTURE) {
run->capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
run->capture_mplane = false;
_D_LOG_INFO("Using capture type: single-planar");
_LOG_INFO("Using capture type: single-planar");
} else if (cpb.capabilities & V4L2_CAP_VIDEO_CAPTURE_MPLANE) {
run->capture_type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
run->capture_mplane = true;
_D_LOG_INFO("Using capture type: multi-planar");
_LOG_INFO("Using capture type: multi-planar");
} else {
_D_LOG_ERROR("Video capture is not supported by device");
_LOG_ERROR("Video capture is not supported by device");
return -1;
}
if (!(cpb.capabilities & V4L2_CAP_STREAMING)) {
_D_LOG_ERROR("Device doesn't support streaming IO");
_LOG_ERROR("Device doesn't support streaming IO");
return -1;
}
if (!run->capture_mplane) {
int input = cap->input; // Needs a pointer to int for ioctl()
_D_LOG_INFO("Using input channel: %d", input);
_LOG_INFO("Using input channel: %d", input);
if (us_xioctl(run->fd, VIDIOC_S_INPUT, &input) < 0) {
_D_LOG_ERROR("Can't set input channel");
_LOG_ERROR("Can't set input channel");
return -1;
}
}
if (cap->standard != V4L2_STD_UNKNOWN) {
_D_LOG_INFO("Using TV standard: %s", _standard_to_string(cap->standard));
_LOG_INFO("Using TV standard: %s", _standard_to_string(cap->standard));
if (us_xioctl(run->fd, VIDIOC_S_STD, &cap->standard) < 0) {
_D_LOG_ERROR("Can't set video standard");
_LOG_ERROR("Can't set video standard");
return -1;
}
} else {
_D_LOG_DEBUG("Using TV standard: DEFAULT");
_LOG_DEBUG("Using TV standard: DEFAULT");
}
return 0;
}
@@ -605,7 +605,7 @@ static int _capture_open_dv_timings(us_capture_s *cap, bool apply) {
int dv_errno = 0;
struct v4l2_dv_timings dv = {0};
_D_LOG_DEBUG("Querying DV-timings (apply=%u) ...", apply);
_LOG_DEBUG("Querying DV-timings (apply=%u) ...", apply);
if (us_xioctl(run->fd, VIDIOC_QUERY_DV_TIMINGS, &dv) < 0) {
// TC358743 errors here (see in the kernel: drivers/media/i2c/tc358743.c):
// - ENOLINK: No valid signal (SYS_STATUS & MASK_S_TMDS)
@@ -623,18 +623,18 @@ static int _capture_open_dv_timings(us_capture_s *cap, bool apply) {
const uint vtot = V4L2_DV_BT_FRAME_HEIGHT(&dv.bt) / (dv.bt.interlaced ? 2 : 1);
const uint fps = ((htot * vtot) > 0 ? ((100 * (u64)dv.bt.pixelclock)) / (htot * vtot) : 0);
hz = (fps / 100) + (fps % 100) / 100.0;
_D_LOG_INFO("Detected DV-timings: %ux%u%s%.02f, pixclk=%llu, vsync=%u, hsync=%u",
_LOG_INFO("Detected DV-timings: %ux%u%s%.02f, pixclk=%llu, vsync=%u, hsync=%u",
dv.bt.width, dv.bt.height, (dv.bt.interlaced ? "i" : "p"), hz,
(ull)dv.bt.pixelclock, dv.bt.vsync, dv.bt.hsync); // See #11 about %llu
} else {
_D_LOG_INFO("Detected DV-timings: %ux%u, pixclk=%llu, vsync=%u, hsync=%u",
_LOG_INFO("Detected DV-timings: %ux%u, pixclk=%llu, vsync=%u, hsync=%u",
dv.bt.width, dv.bt.height,
(ull)dv.bt.pixelclock, dv.bt.vsync, dv.bt.hsync);
}
_D_LOG_DEBUG("Applying DV-timings ...");
_LOG_DEBUG("Applying DV-timings ...");
if (us_xioctl(run->fd, VIDIOC_S_DV_TIMINGS, &dv) < 0) {
_D_LOG_PERROR("Failed to apply DV-timings");
_LOG_PERROR("Failed to apply DV-timings");
return -1;
}
if (_capture_apply_resolution(cap, dv.bt.width, dv.bt.height, hz) < 0) {
@@ -643,12 +643,12 @@ static int _capture_open_dv_timings(us_capture_s *cap, bool apply) {
goto subscribe;
querystd:
_D_LOG_DEBUG("Failed to query DV-timings, trying QuerySTD ...");
_LOG_DEBUG("Failed to query DV-timings, trying QuerySTD ...");
if (us_xioctl(run->fd, VIDIOC_QUERYSTD, &cap->standard) < 0) {
if (apply) {
char *std_error = us_errno_to_string(errno); // Read the errno first
char *dv_error = us_errno_to_string(dv_errno);
_D_LOG_ERROR("Failed to query DV-timings (%s) and QuerySTD (%s)", dv_error, std_error);
_LOG_ERROR("Failed to query DV-timings (%s) and QuerySTD (%s)", dv_error, std_error);
free(dv_error);
free(std_error);
}
@@ -657,17 +657,17 @@ querystd:
goto probe_only;
}
if (us_xioctl(run->fd, VIDIOC_S_STD, &cap->standard) < 0) {
_D_LOG_PERROR("Can't set apply standard: %s", _standard_to_string(cap->standard));
_LOG_PERROR("Can't set apply standard: %s", _standard_to_string(cap->standard));
return -1;
}
_D_LOG_DEBUG("Applied new video standard: %s", _standard_to_string(cap->standard));
_LOG_DEBUG("Applied new video standard: %s", _standard_to_string(cap->standard));
subscribe:
; // Empty statement for the goto label above
struct v4l2_event_subscription sub = {.type = V4L2_EVENT_SOURCE_CHANGE};
_D_LOG_DEBUG("Subscribing to V4L2_EVENT_SOURCE_CHANGE ...")
_LOG_DEBUG("Subscribing to V4L2_EVENT_SOURCE_CHANGE ...")
if (us_xioctl(cap->run->fd, VIDIOC_SUBSCRIBE_EVENT, &sub) < 0) {
_D_LOG_PERROR("Can't subscribe to V4L2_EVENT_SOURCE_CHANGE");
_LOG_PERROR("Can't subscribe to V4L2_EVENT_SOURCE_CHANGE");
return -1;
}
@@ -698,15 +698,15 @@ static int _capture_open_format(us_capture_s *cap, bool first) {
}
// Set format
_D_LOG_DEBUG("Probing device format=%s, stride=%u, resolution=%ux%u ...",
_LOG_DEBUG("Probing device format=%s, stride=%u, resolution=%ux%u ...",
_format_to_string_supported(cap->format), stride, run->width, run->height);
if (us_xioctl(run->fd, VIDIOC_S_FMT, &fmt) < 0) {
_D_LOG_PERROR("Can't set device format");
_LOG_PERROR("Can't set device format");
return -1;
}
if (fmt.type != run->capture_type) {
_D_LOG_ERROR("Capture format mismatch, please report to the developer");
_LOG_ERROR("Capture format mismatch, please report to the developer");
return -1;
}
@@ -716,7 +716,7 @@ static int _capture_open_format(us_capture_s *cap, bool first) {
// Check resolution
bool retry = false;
if (FMT(width) != run->width || FMT(height) != run->height) {
_D_LOG_ERROR("Requested resolution=%ux%u is unavailable", run->width, run->height);
_LOG_ERROR("Requested resolution=%ux%u is unavailable", run->width, run->height);
retry = true;
}
if (_capture_apply_resolution(cap, FMT(width), FMT(height), run->hz) < 0) {
@@ -725,27 +725,27 @@ static int _capture_open_format(us_capture_s *cap, bool first) {
if (first && retry) {
return _capture_open_format(cap, false);
}
_D_LOG_INFO("Using resolution: %ux%u", run->width, run->height);
_LOG_INFO("Using resolution: %ux%u", run->width, run->height);
// Check format
if (FMT(pixelformat) != cap->format) {
_D_LOG_ERROR("Could not obtain the requested format=%s; driver gave us %s",
_LOG_ERROR("Could not obtain the requested format=%s; driver gave us %s",
_format_to_string_supported(cap->format),
_format_to_string_supported(FMT(pixelformat)));
char *format_str;
if ((format_str = (char*)_format_to_string_nullable(FMT(pixelformat))) != NULL) {
_D_LOG_INFO("Falling back to format=%s", format_str);
_LOG_INFO("Falling back to format=%s", format_str);
} else {
char fourcc_str[8];
_D_LOG_ERROR("Unsupported format=%s (fourcc)",
_LOG_ERROR("Unsupported format=%s (fourcc)",
us_fourcc_to_string(FMT(pixelformat), fourcc_str, 8));
return -1;
}
}
run->format = FMT(pixelformat);
_D_LOG_INFO("Using format: %s", _format_to_string_supported(run->format));
_LOG_INFO("Using format: %s", _format_to_string_supported(run->format));
if (cap->format_swap_rgb) {
// Userspace workaround for TC358743 RGB/BGR bug:
@@ -756,7 +756,7 @@ static int _capture_open_format(us_capture_s *cap, bool first) {
case V4L2_PIX_FMT_BGR24: swapped = V4L2_PIX_FMT_RGB24; break;
}
if (swapped > 0) {
_D_LOG_INFO("Using format swap: %s -> %s",
_LOG_INFO("Using format swap: %s -> %s",
_format_to_string_supported(run->format),
_format_to_string_supported(swapped));
run->format = swapped;
@@ -778,18 +778,18 @@ static void _capture_open_hw_fps(us_capture_s *cap) {
run->hw_fps = 0;
struct v4l2_streamparm setfps = {.type = run->capture_type};
_D_LOG_DEBUG("Querying HW FPS ...");
_LOG_DEBUG("Querying HW FPS ...");
if (us_xioctl(run->fd, VIDIOC_G_PARM, &setfps) < 0) {
if (errno == ENOTTY) { // Quiet message for TC358743
_D_LOG_INFO("Querying HW FPS changing is not supported");
_LOG_INFO("Querying HW FPS changing is not supported");
} else {
_D_LOG_PERROR("Can't query HW FPS changing");
_LOG_PERROR("Can't query HW FPS changing");
}
return;
}
if (!(setfps.parm.capture.capability & V4L2_CAP_TIMEPERFRAME)) {
_D_LOG_INFO("Changing HW FPS is not supported");
_LOG_INFO("Changing HW FPS is not supported");
return;
}
@@ -801,25 +801,25 @@ static void _capture_open_hw_fps(us_capture_s *cap) {
SETFPS_TPF(denominator) = (cap->desired_fps == 0 ? 255 : cap->desired_fps);
if (us_xioctl(run->fd, VIDIOC_S_PARM, &setfps) < 0) {
_D_LOG_PERROR("Can't set HW FPS");
_LOG_PERROR("Can't set HW FPS");
return;
}
if (SETFPS_TPF(numerator) != 1) {
_D_LOG_ERROR("Invalid HW FPS numerator: %u != 1", SETFPS_TPF(numerator));
_LOG_ERROR("Invalid HW FPS numerator: %u != 1", SETFPS_TPF(numerator));
return;
}
if (SETFPS_TPF(denominator) == 0) { // Не знаю, бывает ли так, но пускай на всякий случай
_D_LOG_ERROR("Invalid HW FPS denominator: 0");
_LOG_ERROR("Invalid HW FPS denominator: 0");
return;
}
run->hw_fps = SETFPS_TPF(denominator);
if (cap->desired_fps != run->hw_fps) {
_D_LOG_INFO("Using HW FPS: %u -> %u (coerced)", cap->desired_fps, run->hw_fps);
_LOG_INFO("Using HW FPS: %u -> %u (coerced)", cap->desired_fps, run->hw_fps);
} else {
_D_LOG_INFO("Using HW FPS: %u", run->hw_fps);
_LOG_INFO("Using HW FPS: %u", run->hw_fps);
}
# undef SETFPS_TPF
@@ -831,11 +831,11 @@ static void _capture_open_jpeg_quality(us_capture_s *cap) {
if (us_is_jpeg(run->format)) {
struct v4l2_jpegcompression comp = {0};
if (us_xioctl(run->fd, VIDIOC_G_JPEGCOMP, &comp) < 0) {
_D_LOG_ERROR("Device doesn't support setting of HW encoding quality parameters");
_LOG_ERROR("Device doesn't support setting of HW encoding quality parameters");
} else {
comp.quality = cap->jpeg_quality;
if (us_xioctl(run->fd, VIDIOC_S_JPEGCOMP, &comp) < 0) {
_D_LOG_ERROR("Can't change MJPEG quality for JPEG source with HW pass-through encoder");
_LOG_ERROR("Can't change MJPEG quality for JPEG source with HW pass-through encoder");
} else {
quality = cap->jpeg_quality;
}
@@ -845,7 +845,7 @@ static void _capture_open_jpeg_quality(us_capture_s *cap) {
}
static int _capture_open_io_method(us_capture_s *cap) {
_D_LOG_INFO("Using IO method: %s", _io_method_to_string_supported(cap->io_method));
_LOG_INFO("Using IO method: %s", _io_method_to_string_supported(cap->io_method));
switch (cap->io_method) {
case V4L2_MEMORY_MMAP: return _capture_open_io_method_mmap(cap);
case V4L2_MEMORY_USERPTR: return _capture_open_io_method_userptr(cap);
@@ -862,20 +862,20 @@ static int _capture_open_io_method_mmap(us_capture_s *cap) {
.type = run->capture_type,
.memory = V4L2_MEMORY_MMAP,
};
_D_LOG_DEBUG("Requesting %u device buffers for MMAP ...", req.count);
_LOG_DEBUG("Requesting %u device buffers for MMAP ...", req.count);
if (us_xioctl(run->fd, VIDIOC_REQBUFS, &req) < 0) {
_D_LOG_PERROR("Device '%s' doesn't support MMAP method", cap->path);
_LOG_PERROR("Device '%s' doesn't support MMAP method", cap->path);
return -1;
}
if (req.count < 1) {
_D_LOG_ERROR("Insufficient buffer memory: %u", req.count);
_LOG_ERROR("Insufficient buffer memory: %u", req.count);
return -1;
} else {
_D_LOG_INFO("Requested %u device buffers, got %u", cap->n_bufs, req.count);
_LOG_INFO("Requested %u device buffers, got %u", cap->n_bufs, req.count);
}
_D_LOG_DEBUG("Allocating device buffers ...");
_LOG_DEBUG("Allocating device buffers ...");
US_CALLOC(run->bufs, req.count);
@@ -890,9 +890,9 @@ static int _capture_open_io_method_mmap(us_capture_s *cap) {
buf.length = VIDEO_MAX_PLANES;
}
_D_LOG_DEBUG("Calling us_xioctl(VIDIOC_QUERYBUF) for device buffer=%u ...", run->n_bufs);
_LOG_DEBUG("Calling us_xioctl(VIDIOC_QUERYBUF) for device buffer=%u ...", run->n_bufs);
if (us_xioctl(run->fd, VIDIOC_QUERYBUF, &buf) < 0) {
_D_LOG_PERROR("Can't VIDIOC_QUERYBUF");
_LOG_PERROR("Can't VIDIOC_QUERYBUF");
return -1;
}
@@ -901,13 +901,13 @@ static int _capture_open_io_method_mmap(us_capture_s *cap) {
const uz buf_size = (run->capture_mplane ? buf.m.planes[0].length : buf.length);
const off_t buf_offset = (run->capture_mplane ? buf.m.planes[0].m.mem_offset : buf.m.offset);
_D_LOG_DEBUG("Mapping device buffer=%u ...", run->n_bufs);
_LOG_DEBUG("Mapping device buffer=%u ...", run->n_bufs);
if ((hw->raw.data = mmap(
NULL, buf_size,
PROT_READ | PROT_WRITE, MAP_SHARED,
run->fd, buf_offset
)) == MAP_FAILED) {
_D_LOG_PERROR("Can't map device buffer=%u", run->n_bufs);
_LOG_PERROR("Can't map device buffer=%u", run->n_bufs);
return -1;
}
assert(hw->raw.data != NULL);
@@ -930,20 +930,20 @@ static int _capture_open_io_method_userptr(us_capture_s *cap) {
.type = run->capture_type,
.memory = V4L2_MEMORY_USERPTR,
};
_D_LOG_DEBUG("Requesting %u device buffers for USERPTR ...", req.count);
_LOG_DEBUG("Requesting %u device buffers for USERPTR ...", req.count);
if (us_xioctl(run->fd, VIDIOC_REQBUFS, &req) < 0) {
_D_LOG_PERROR("Device '%s' doesn't support USERPTR method", cap->path);
_LOG_PERROR("Device '%s' doesn't support USERPTR method", cap->path);
return -1;
}
if (req.count < 1) {
_D_LOG_ERROR("Insufficient buffer memory: %u", req.count);
_LOG_ERROR("Insufficient buffer memory: %u", req.count);
return -1;
} else {
_D_LOG_INFO("Requested %u device buffers, got %u", cap->n_bufs, req.count);
_LOG_INFO("Requested %u device buffers, got %u", cap->n_bufs, req.count);
}
_D_LOG_DEBUG("Allocating device buffers ...");
_LOG_DEBUG("Allocating device buffers ...");
US_CALLOC(run->bufs, req.count);
@@ -983,9 +983,9 @@ static int _capture_open_queue_buffers(us_capture_s *cap) {
buf.length = run->bufs[index].raw.allocated;
}
_D_LOG_DEBUG("Calling us_xioctl(VIDIOC_QBUF) for buffer=%u ...", index);
_LOG_DEBUG("Calling us_xioctl(VIDIOC_QBUF) for buffer=%u ...", index);
if (us_xioctl(run->fd, VIDIOC_QBUF, &buf) < 0) {
_D_LOG_PERROR("Can't VIDIOC_QBUF");
_LOG_PERROR("Can't VIDIOC_QBUF");
return -1;
}
}
@@ -1000,9 +1000,9 @@ static int _capture_open_export_to_dma(us_capture_s *cap) {
.type = run->capture_type,
.index = index,
};
_D_LOG_DEBUG("Exporting device buffer=%u to DMA ...", index);
_LOG_DEBUG("Exporting device buffer=%u to DMA ...", index);
if (us_xioctl(run->fd, VIDIOC_EXPBUF, &exp) < 0) {
_D_LOG_PERROR("Can't export device buffer=%u to DMA", index);
_LOG_PERROR("Can't export device buffer=%u to DMA", index);
goto error;
}
run->bufs[index].dma_fd = exp.fd;
@@ -1023,7 +1023,7 @@ static int _capture_apply_resolution(us_capture_s *cap, uint width, uint height,
width == 0 || width > US_VIDEO_MAX_WIDTH
|| height == 0 || height > US_VIDEO_MAX_HEIGHT
) {
_D_LOG_ERROR("Requested forbidden resolution=%ux%u: min=1x1, max=%ux%u",
_LOG_ERROR("Requested forbidden resolution=%ux%u: min=1x1, max=%ux%u",
width, height, US_VIDEO_MAX_WIDTH, US_VIDEO_MAX_HEIGHT);
return -1;
}
@@ -1099,7 +1099,7 @@ static int _capture_query_control(
if (us_xioctl(cap->run->fd, VIDIOC_QUERYCTRL, query) < 0 || query->flags & V4L2_CTRL_FLAG_DISABLED) {
if (!quiet) {
_D_LOG_ERROR("Changing control %s is unsupported", name);
_LOG_ERROR("Changing control %s is unsupported", name);
}
return -1;
}
@@ -1112,7 +1112,7 @@ static void _capture_set_control(
if (value < query->minimum || value > query->maximum || value % query->step != 0) {
if (!quiet) {
_D_LOG_ERROR("Invalid value %d of control %s: min=%d, max=%d, default=%d, step=%u",
_LOG_ERROR("Invalid value %d of control %s: min=%d, max=%d, default=%d, step=%u",
value, name, query->minimum, query->maximum, query->default_value, query->step);
}
return;
@@ -1124,10 +1124,10 @@ static void _capture_set_control(
};
if (us_xioctl(cap->run->fd, VIDIOC_S_CTRL, &ctl) < 0) {
if (!quiet) {
_D_LOG_PERROR("Can't set control %s", name);
_LOG_PERROR("Can't set control %s", name);
}
} else if (!quiet) {
_D_LOG_INFO("Applying control %s: %d", name, ctl.value);
_LOG_INFO("Applying control %s: %d", name, ctl.value);
}
}

View File

@@ -132,8 +132,8 @@ int us_capture_parse_io_method(const char *str);
int us_capture_open(us_capture_s *cap);
void us_capture_close(us_capture_s *cap);
int us_capture_grab_buffer(us_capture_s *cap, us_capture_hwbuf_s **hw);
int us_capture_release_buffer(us_capture_s *cap, us_capture_hwbuf_s *hw);
int us_capture_hwbuf_grab(us_capture_s *cap, us_capture_hwbuf_s **hw);
int us_capture_hwbuf_release(us_capture_s *cap, us_capture_hwbuf_s *hw);
void us_capture_buffer_incref(us_capture_hwbuf_s *hw);
void us_capture_buffer_decref(us_capture_hwbuf_s *hw);
void us_capture_hwbuf_incref(us_capture_hwbuf_s *hw);
void us_capture_hwbuf_decref(us_capture_hwbuf_s *hw);

View File

@@ -26,7 +26,7 @@
#define US_VERSION_MAJOR 6
#define US_VERSION_MINOR 8
#define US_VERSION_MINOR 9
#define US_MAKE_VERSION2(_major, _minor) #_major "." #_minor
#define US_MAKE_VERSION1(_major, _minor) US_MAKE_VERSION2(_major, _minor)

View File

@@ -58,11 +58,11 @@ static const char *_connector_type_to_string(u32 type);
static float _get_refresh_rate(const drmModeModeInfo *mode);
#define _D_LOG_ERROR(x_msg, ...) US_LOG_ERROR("DRM: " x_msg, ##__VA_ARGS__)
#define _D_LOG_PERROR(x_msg, ...) US_LOG_PERROR("DRM: " x_msg, ##__VA_ARGS__)
#define _D_LOG_INFO(x_msg, ...) US_LOG_INFO("DRM: " x_msg, ##__VA_ARGS__)
#define _D_LOG_VERBOSE(x_msg, ...) US_LOG_VERBOSE("DRM: " x_msg, ##__VA_ARGS__)
#define _D_LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("DRM: " x_msg, ##__VA_ARGS__)
#define _LOG_ERROR(x_msg, ...) US_LOG_ERROR("DRM: " x_msg, ##__VA_ARGS__)
#define _LOG_PERROR(x_msg, ...) US_LOG_PERROR("DRM: " x_msg, ##__VA_ARGS__)
#define _LOG_INFO(x_msg, ...) US_LOG_INFO("DRM: " x_msg, ##__VA_ARGS__)
#define _LOG_VERBOSE(x_msg, ...) US_LOG_VERBOSE("DRM: " x_msg, ##__VA_ARGS__)
#define _LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("DRM: " x_msg, ##__VA_ARGS__)
us_drm_s *us_drm_init(void) {
@@ -102,13 +102,13 @@ int us_drm_open(us_drm_s *drm, const us_capture_s *cap) {
default: goto error;
}
_D_LOG_INFO("Configuring DRM device for %s ...", (cap == NULL ? "STUB" : "DMA"));
_LOG_INFO("Configuring DRM device for %s ...", (cap == NULL ? "STUB" : "DMA"));
if ((run->fd = open(drm->path, O_RDWR | O_CLOEXEC | O_NONBLOCK)) < 0) {
_D_LOG_PERROR("Can't open DRM device");
_LOG_PERROR("Can't open DRM device");
goto error;
}
_D_LOG_DEBUG("DRM device fd=%d opened", run->fd);
_LOG_DEBUG("DRM device fd=%d opened", run->fd);
int stub = 0; // Open the real device with DMA
if (cap == NULL) {
@@ -117,18 +117,18 @@ int us_drm_open(us_drm_s *drm, const us_capture_s *cap) {
stub = US_DRM_STUB_BAD_FORMAT;
char fourcc_str[8];
us_fourcc_to_string(cap->run->format, fourcc_str, 8);
_D_LOG_ERROR("Input format %s is not supported, forcing to STUB ...", fourcc_str);
_LOG_ERROR("Input format %s is not supported, forcing to STUB ...", fourcc_str);
}
# define CHECK_CAP(x_cap) { \
_D_LOG_DEBUG("Checking %s ...", #x_cap); \
_LOG_DEBUG("Checking %s ...", #x_cap); \
u64 m_check; \
if (drmGetCap(run->fd, x_cap, &m_check) < 0) { \
_D_LOG_PERROR("Can't check " #x_cap); \
_LOG_PERROR("Can't check " #x_cap); \
goto error; \
} \
if (!m_check) { \
_D_LOG_ERROR(#x_cap " is not supported"); \
_LOG_ERROR(#x_cap " is not supported"); \
goto error; \
} \
}
@@ -149,7 +149,7 @@ int us_drm_open(us_drm_s *drm, const us_capture_s *cap) {
if ((stub == 0) && (width != run->mode.hdisplay || height < run->mode.vdisplay)) {
// We'll try to show something instead of nothing if height != vdisplay
stub = US_DRM_STUB_BAD_RESOLUTION;
_D_LOG_ERROR("There is no appropriate modes for the capture, forcing to STUB ...");
_LOG_ERROR("There is no appropriate modes for the capture, forcing to STUB ...");
}
if (_drm_init_buffers(drm, (stub > 0 ? NULL : cap)) < 0) {
@@ -157,16 +157,16 @@ int us_drm_open(us_drm_s *drm, const us_capture_s *cap) {
}
run->saved_crtc = drmModeGetCrtc(run->fd, run->crtc_id);
_D_LOG_DEBUG("Setting up CRTC ...");
_LOG_DEBUG("Setting up CRTC ...");
if (drmModeSetCrtc(run->fd, run->crtc_id, run->bufs[0].id, 0, 0, &run->conn_id, 1, &run->mode) < 0) {
_D_LOG_PERROR("Can't set CRTC");
_LOG_PERROR("Can't set CRTC");
goto error;
}
run->opened_for_stub = (stub > 0);
run->exposing_dma_fd = -1;
run->unplugged_reported = false;
_D_LOG_INFO("Opened for %s ...", (run->opened_for_stub ? "STUB" : "DMA"));
_LOG_INFO("Opened for %s ...", (run->opened_for_stub ? "STUB" : "DMA"));
return stub;
error:
@@ -175,7 +175,7 @@ error:
unplugged:
if (!run->unplugged_reported) {
_D_LOG_ERROR("Display is not plugged");
_LOG_ERROR("Display is not plugged");
run->unplugged_reported = true;
}
us_drm_close(drm);
@@ -194,33 +194,33 @@ void us_drm_close(us_drm_s *drm) {
}
if (run->saved_crtc != NULL) {
_D_LOG_DEBUG("Restoring CRTC ...");
_LOG_DEBUG("Restoring CRTC ...");
if (drmModeSetCrtc(run->fd,
run->saved_crtc->crtc_id, run->saved_crtc->buffer_id,
run->saved_crtc->x, run->saved_crtc->y,
&run->conn_id, 1, &run->saved_crtc->mode
) < 0 && errno != ENOENT) {
_D_LOG_PERROR("Can't restore CRTC");
_LOG_PERROR("Can't restore CRTC");
}
drmModeFreeCrtc(run->saved_crtc);
run->saved_crtc = NULL;
}
if (run->bufs != NULL) {
_D_LOG_DEBUG("Releasing buffers ...");
_LOG_DEBUG("Releasing buffers ...");
for (uint n_buf = 0; n_buf < run->n_bufs; ++n_buf) {
us_drm_buffer_s *const buf = &run->bufs[n_buf];
if (buf->fb_added && drmModeRmFB(run->fd, buf->id) < 0) {
_D_LOG_PERROR("Can't remove buffer=%u", n_buf);
_LOG_PERROR("Can't remove buffer=%u", n_buf);
}
if (buf->dumb_created) {
struct drm_mode_destroy_dumb destroy = {.handle = buf->handle};
if (drmIoctl(run->fd, DRM_IOCTL_MODE_DESTROY_DUMB, &destroy) < 0) {
_D_LOG_PERROR("Can't destroy dumb buffer=%u", n_buf);
_LOG_PERROR("Can't destroy dumb buffer=%u", n_buf);
}
}
if (buf->data != NULL && munmap(buf->data, buf->allocated)) {
_D_LOG_PERROR("Can't unmap buffer=%u", n_buf);
_LOG_PERROR("Can't unmap buffer=%u", n_buf);
}
}
US_DELETE(run->bufs, free);
@@ -237,7 +237,7 @@ void us_drm_close(us_drm_s *drm) {
run->stub_n_buf = 0;
if (say) {
_D_LOG_INFO("Closed");
_LOG_INFO("Closed");
}
}
@@ -276,13 +276,13 @@ int us_drm_wait_for_vsync(us_drm_s *drm) {
FD_ZERO(&fds);
FD_SET(run->fd, &fds);
_D_LOG_DEBUG("Calling select() for VSync ...");
_LOG_DEBUG("Calling select() for VSync ...");
const int result = select(run->fd + 1, &fds, NULL, NULL, &timeout);
if (result < 0) {
_D_LOG_PERROR("Can't select(%d) device for VSync", run->fd);
_LOG_PERROR("Can't select(%d) device for VSync", run->fd);
return -1;
} else if (result == 0) {
_D_LOG_ERROR("Device timeout while waiting VSync");
_LOG_ERROR("Device timeout while waiting VSync");
return -1;
}
@@ -290,9 +290,9 @@ int us_drm_wait_for_vsync(us_drm_s *drm) {
.version = DRM_EVENT_CONTEXT_VERSION,
.page_flip_handler = _drm_vsync_callback,
};
_D_LOG_DEBUG("Handling DRM event (maybe VSync) ...");
_LOG_DEBUG("Handling DRM event (maybe VSync) ...");
if (drmHandleEvent(run->fd, &ctx) < 0) {
_D_LOG_PERROR("Can't handle DRM event");
_LOG_PERROR("Can't handle DRM event");
return -1;
}
return 0;
@@ -306,7 +306,7 @@ static void _drm_vsync_callback(int fd, uint n_frame, uint sec, uint usec, void
us_drm_buffer_s *const buf = v_buf;
*buf->ctx.has_vsync = true;
*buf->ctx.exposing_dma_fd = -1;
_D_LOG_DEBUG("Got VSync signal");
_LOG_DEBUG("Got VSync signal");
}
int us_drm_expose_stub(us_drm_s *drm, us_drm_stub_e stub, const us_capture_s *cap) {
@@ -337,12 +337,7 @@ int us_drm_expose_stub(us_drm_s *drm, us_drm_stub_e stub, const us_capture_s *ca
break;
};
case US_DRM_STUB_BAD_FORMAT:
DRAW_MSG(
"=== PiKVM ==="
"\n \n< UNSUPPORTED CAPTURE FORMAT >"
"\n \nIt shouldn't happen ever."
"\n \nPlease check the logs and report a bug:"
"\n \n- https://github.com/pikvm/pikvm -");
DRAW_MSG("=== PiKVM ===\n \n< UNSUPPORTED CAPTURE FORMAT >");
break;
case US_DRM_STUB_NO_SIGNAL:
DRAW_MSG("=== PiKVM ===\n \n< NO SIGNAL >");
@@ -360,18 +355,18 @@ int us_drm_expose_stub(us_drm_s *drm, us_drm_stub_e stub, const us_capture_s *ca
run->has_vsync = false;
_D_LOG_DEBUG("Copying STUB frame ...")
_LOG_DEBUG("Copying STUB frame ...")
memcpy(buf->data, run->ft->frame->data, US_MIN(run->ft->frame->used, buf->allocated));
_D_LOG_DEBUG("Exposing STUB framebuffer n_buf=%u ...", run->stub_n_buf);
_LOG_DEBUG("Exposing STUB framebuffer n_buf=%u ...", run->stub_n_buf);
const int retval = drmModePageFlip(
run->fd, run->crtc_id, buf->id,
DRM_MODE_PAGE_FLIP_EVENT | DRM_MODE_PAGE_FLIP_ASYNC,
buf);
if (retval < 0) {
_D_LOG_PERROR("Can't expose STUB framebuffer n_buf=%u ...", run->stub_n_buf);
_LOG_PERROR("Can't expose STUB framebuffer n_buf=%u ...", run->stub_n_buf);
}
_D_LOG_DEBUG("Exposed STUB framebuffer n_buf=%u", run->stub_n_buf);
_LOG_DEBUG("Exposed STUB framebuffer n_buf=%u", run->stub_n_buf);
run->stub_n_buf = (run->stub_n_buf + 1) % run->n_bufs;
return retval;
@@ -393,15 +388,15 @@ int us_drm_expose_dma(us_drm_s *drm, const us_capture_hwbuf_s *hw) {
run->has_vsync = false;
_D_LOG_DEBUG("Exposing DMA framebuffer n_buf=%u ...", hw->buf.index);
_LOG_DEBUG("Exposing DMA framebuffer n_buf=%u ...", hw->buf.index);
const int retval = drmModePageFlip(
run->fd, run->crtc_id, buf->id,
DRM_MODE_PAGE_FLIP_EVENT | DRM_MODE_PAGE_FLIP_ASYNC,
buf);
if (retval < 0) {
_D_LOG_PERROR("Can't expose DMA framebuffer n_buf=%u ...", run->stub_n_buf);
_LOG_PERROR("Can't expose DMA framebuffer n_buf=%u ...", run->stub_n_buf);
}
_D_LOG_DEBUG("Exposed DMA framebuffer n_buf=%u", run->stub_n_buf);
_LOG_DEBUG("Exposed DMA framebuffer n_buf=%u", run->stub_n_buf);
run->exposing_dma_fd = hw->dma_fd;
return retval;
}
@@ -410,35 +405,35 @@ static int _drm_check_status(us_drm_s *drm) {
us_drm_runtime_s *run = drm->run;
if (run->status_fd < 0) {
_D_LOG_DEBUG("Trying to find status file ...");
_LOG_DEBUG("Trying to find status file ...");
struct stat st;
if (stat(drm->path, &st) < 0) {
_D_LOG_PERROR("Can't stat() DRM device");
_LOG_PERROR("Can't stat() DRM device");
goto error;
}
const uint mi = minor(st.st_rdev);
_D_LOG_DEBUG("DRM device minor(st_rdev)=%u", mi);
_LOG_DEBUG("DRM device minor(st_rdev)=%u", mi);
char path[128];
US_SNPRINTF(path, 127, "/sys/class/drm/card%u-%s/status", mi, drm->port);
_D_LOG_DEBUG("Opening status file %s ...", path);
_LOG_DEBUG("Opening status file %s ...", path);
if ((run->status_fd = open(path, O_RDONLY | O_CLOEXEC)) < 0) {
_D_LOG_PERROR("Can't open status file: %s", path);
_LOG_PERROR("Can't open status file: %s", path);
goto error;
}
_D_LOG_DEBUG("Status file fd=%d opened", run->status_fd);
_LOG_DEBUG("Status file fd=%d opened", run->status_fd);
}
char status_ch;
if (read(run->status_fd, &status_ch, 1) != 1) {
_D_LOG_PERROR("Can't read status file");
_LOG_PERROR("Can't read status file");
goto error;
}
if (lseek(run->status_fd, 0, SEEK_SET) != 0) {
_D_LOG_PERROR("Can't rewind status file");
_LOG_PERROR("Can't rewind status file");
goto error;
}
_D_LOG_DEBUG("Current display status: %c", status_ch);
_LOG_DEBUG("Current display status: %c", status_ch);
return (status_ch == 'd' ? -2 : 0);
error:
@@ -449,12 +444,12 @@ error:
static void _drm_ensure_dpms_power(us_drm_s *drm, bool on) {
us_drm_runtime_s *const run = drm->run;
if (run->dpms_id > 0 && run->dpms_state != (int)on) {
_D_LOG_INFO("Changing DPMS power mode: %d -> %u ...", run->dpms_state, on);
_LOG_INFO("Changing DPMS power mode: %d -> %u ...", run->dpms_state, on);
if (drmModeConnectorSetProperty(
run->fd, run->conn_id, run->dpms_id,
(on ? DRM_MODE_DPMS_ON : DRM_MODE_DPMS_OFF)
) < 0) {
_D_LOG_PERROR("Can't set DPMS power=%u (ignored)", on);
_LOG_PERROR("Can't set DPMS power=%u (ignored)", on);
}
}
run->dpms_state = (int)on;
@@ -466,7 +461,7 @@ static int _drm_init_buffers(us_drm_s *drm, const us_capture_s *cap) {
const uint n_bufs = (cap == NULL ? 4 : cap->run->n_bufs);
const char *name = (cap == NULL ? "STUB" : "DMA");
_D_LOG_DEBUG("Initializing %u %s buffers ...", n_bufs, name);
_LOG_DEBUG("Initializing %u %s buffers ...", n_bufs, name);
uint format = DRM_FORMAT_RGB888;
@@ -489,7 +484,7 @@ static int _drm_init_buffers(us_drm_s *drm, const us_capture_s *cap) {
.bpp = 24,
};
if (drmIoctl(run->fd, DRM_IOCTL_MODE_CREATE_DUMB, &create) < 0) {
_D_LOG_PERROR("Can't create %s buffer=%u", name, n_buf);
_LOG_PERROR("Can't create %s buffer=%u", name, n_buf);
return -1;
}
buf->handle = create.handle;
@@ -497,7 +492,7 @@ static int _drm_init_buffers(us_drm_s *drm, const us_capture_s *cap) {
struct drm_mode_map_dumb map = {.handle = create.handle};
if (drmIoctl(run->fd, DRM_IOCTL_MODE_MAP_DUMB, &map) < 0) {
_D_LOG_PERROR("Can't prepare dumb buffer=%u to mapping", n_buf);
_LOG_PERROR("Can't prepare dumb buffer=%u to mapping", n_buf);
return -1;
}
if ((buf->data = mmap(
@@ -505,7 +500,7 @@ static int _drm_init_buffers(us_drm_s *drm, const us_capture_s *cap) {
PROT_READ | PROT_WRITE, MAP_SHARED,
run->fd, map.offset
)) == MAP_FAILED) {
_D_LOG_PERROR("Can't map buffer=%u", n_buf);
_LOG_PERROR("Can't map buffer=%u", n_buf);
return -1;
}
memset(buf->data, 0, create.size);
@@ -516,7 +511,7 @@ static int _drm_init_buffers(us_drm_s *drm, const us_capture_s *cap) {
} else {
if (drmPrimeFDToHandle(run->fd, cap->run->bufs[n_buf].dma_fd, &buf->handle) < 0) {
_D_LOG_PERROR("Can't import DMA buffer=%u from capture device", n_buf);
_LOG_PERROR("Can't import DMA buffer=%u from capture device", n_buf);
return -1;
}
handles[0] = buf->handle;
@@ -533,7 +528,7 @@ static int _drm_init_buffers(us_drm_s *drm, const us_capture_s *cap) {
run->mode.hdisplay, run->mode.vdisplay, format,
handles, strides, offsets, &buf->id, 0
)) {
_D_LOG_PERROR("Can't setup buffer=%u", n_buf);
_LOG_PERROR("Can't setup buffer=%u", n_buf);
return -1;
}
buf->fb_added = true;
@@ -546,22 +541,22 @@ static int _drm_find_sink(us_drm_s *drm, uint width, uint height, float hz) {
run->crtc_id = 0;
_D_LOG_DEBUG("Trying to find the appropriate sink ...");
_LOG_DEBUG("Trying to find the appropriate sink ...");
drmModeRes *res = drmModeGetResources(run->fd);
if (res == NULL) {
_D_LOG_PERROR("Can't get resources info");
_LOG_PERROR("Can't get resources info");
goto done;
}
if (res->count_connectors <= 0) {
_D_LOG_ERROR("Can't find any connectors");
_LOG_ERROR("Can't find any connectors");
goto done;
}
for (int ci = 0; ci < res->count_connectors; ++ci) {
drmModeConnector *conn = drmModeGetConnector(run->fd, res->connectors[ci]);
if (conn == NULL) {
_D_LOG_PERROR("Can't get connector index=%d", ci);
_LOG_PERROR("Can't get connector index=%d", ci);
goto done;
}
@@ -573,37 +568,37 @@ static int _drm_find_sink(us_drm_s *drm, uint width, uint height, float hz) {
drmModeFreeConnector(conn);
continue;
}
_D_LOG_INFO("Using connector %s: conn_type=%d, conn_type_id=%d",
_LOG_INFO("Using connector %s: conn_type=%d, conn_type_id=%d",
drm->port, conn->connector_type, conn->connector_type_id);
if (conn->connection != DRM_MODE_CONNECTED) {
_D_LOG_ERROR("Connector for port %s has !DRM_MODE_CONNECTED", drm->port);
_LOG_ERROR("Connector for port %s has !DRM_MODE_CONNECTED", drm->port);
drmModeFreeConnector(conn);
goto done;
}
drmModeModeInfo *best;
if ((best = _find_best_mode(conn, width, height, hz)) == NULL) {
_D_LOG_ERROR("Can't find any appropriate display modes");
_LOG_ERROR("Can't find any appropriate display modes");
drmModeFreeConnector(conn);
goto unplugged;
}
_D_LOG_INFO("Using best mode: %ux%up%.02f",
_LOG_INFO("Using best mode: %ux%up%.02f",
best->hdisplay, best->vdisplay, _get_refresh_rate(best));
if ((run->dpms_id = _find_dpms(run->fd, conn)) > 0) {
_D_LOG_INFO("Using DPMS: id=%u", run->dpms_id);
_LOG_INFO("Using DPMS: id=%u", run->dpms_id);
} else {
_D_LOG_INFO("Using DPMS: None");
_LOG_INFO("Using DPMS: None");
}
u32 taken_crtcs = 0; // Unused here
if ((run->crtc_id = _find_crtc(run->fd, res, conn, &taken_crtcs)) == 0) {
_D_LOG_ERROR("Can't find CRTC");
_LOG_ERROR("Can't find CRTC");
drmModeFreeConnector(conn);
goto done;
}
_D_LOG_INFO("Using CRTC: id=%u", run->crtc_id);
_LOG_INFO("Using CRTC: id=%u", run->crtc_id);
run->conn_id = conn->connector_id;
memcpy(&run->mode, best, sizeof(drmModeModeInfo));

View File

@@ -102,11 +102,11 @@ static const char *_http_get_header(struct evhttp_request *request, const char *
static char *_http_get_client_hostport(struct evhttp_request *request);
#define _S_LOG_ERROR(x_msg, ...) US_LOG_ERROR("HTTP: " x_msg, ##__VA_ARGS__)
#define _S_LOG_PERROR(x_msg, ...) US_LOG_PERROR("HTTP: " x_msg, ##__VA_ARGS__)
#define _S_LOG_INFO(x_msg, ...) US_LOG_INFO("HTTP: " x_msg, ##__VA_ARGS__)
#define _S_LOG_VERBOSE(x_msg, ...) US_LOG_VERBOSE("HTTP: " x_msg, ##__VA_ARGS__)
#define _S_LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("HTTP: " x_msg, ##__VA_ARGS__)
#define _LOG_ERROR(x_msg, ...) US_LOG_ERROR("HTTP: " x_msg, ##__VA_ARGS__)
#define _LOG_PERROR(x_msg, ...) US_LOG_PERROR("HTTP: " x_msg, ##__VA_ARGS__)
#define _LOG_INFO(x_msg, ...) US_LOG_INFO("HTTP: " x_msg, ##__VA_ARGS__)
#define _LOG_VERBOSE(x_msg, ...) US_LOG_VERBOSE("HTTP: " x_msg, ##__VA_ARGS__)
#define _LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("HTTP: " x_msg, ##__VA_ARGS__)
#define _A_EVBUFFER_NEW(x_buf) assert((x_buf = evbuffer_new()) != NULL)
#define _A_EVBUFFER_ADD(x_buf, x_data, x_size) assert(!evbuffer_add(x_buf, x_data, x_size))
@@ -188,7 +188,7 @@ int us_server_listen(us_server_s *server) {
{
if (server->static_path[0] != '\0') {
_S_LOG_INFO("Enabling the file server: %s", server->static_path);
_LOG_INFO("Enabling the file server: %s", server->static_path);
evhttp_set_gencb(run->http, _http_callback_static, (void*)server);
} else {
assert(!evhttp_set_cb(run->http, "/", _http_callback_root, (void*)server));
@@ -227,11 +227,11 @@ int us_server_listen(us_server_s *server) {
US_ASPRINTF(run->auth_token, "Basic %s", encoded_token);
free(encoded_token);
_S_LOG_INFO("Using HTTP basic auth");
_LOG_INFO("Using HTTP basic auth");
}
if (server->unix_path[0] != '\0') {
_S_LOG_DEBUG("Binding server to UNIX socket '%s' ...", server->unix_path);
_LOG_DEBUG("Binding server to UNIX socket '%s' ...", server->unix_path);
if ((run->ext_fd = us_evhttp_bind_unix(
run->http,
server->unix_path,
@@ -240,33 +240,33 @@ int us_server_listen(us_server_s *server) {
) {
return -1;
}
_S_LOG_INFO("Listening HTTP on UNIX socket '%s'", server->unix_path);
_LOG_INFO("Listening HTTP on UNIX socket '%s'", server->unix_path);
# ifdef WITH_SYSTEMD
} else if (server->systemd) {
_S_LOG_DEBUG("Binding HTTP to systemd socket ...");
_LOG_DEBUG("Binding HTTP to systemd socket ...");
if ((run->ext_fd = us_evhttp_bind_systemd(run->http)) < 0) {
return -1;
}
_S_LOG_INFO("Listening systemd socket ...");
_LOG_INFO("Listening systemd socket ...");
# endif
} else {
_S_LOG_DEBUG("Binding HTTP to [%s]:%u ...", server->host, server->port);
_LOG_DEBUG("Binding HTTP to [%s]:%u ...", server->host, server->port);
if (evhttp_bind_socket(run->http, server->host, server->port) < 0) {
_S_LOG_PERROR("Can't bind HTTP on [%s]:%u", server->host, server->port)
_LOG_PERROR("Can't bind HTTP on [%s]:%u", server->host, server->port)
return -1;
}
_S_LOG_INFO("Listening HTTP on [%s]:%u", server->host, server->port);
_LOG_INFO("Listening HTTP on [%s]:%u", server->host, server->port);
}
return 0;
}
void us_server_loop(us_server_s *server) {
_S_LOG_INFO("Starting eventloop ...");
_LOG_INFO("Starting eventloop ...");
event_base_dispatch(server->run->base);
_S_LOG_INFO("Eventloop stopped");
_LOG_INFO("Eventloop stopped");
}
void us_server_loop_break(us_server_s *server) {
@@ -407,18 +407,18 @@ static void _http_callback_static(struct evhttp_request *request, void *v_server
}
if ((fd = open(static_path, O_RDONLY)) < 0) {
_S_LOG_PERROR("Can't open found static file %s", static_path);
_LOG_PERROR("Can't open found static file %s", static_path);
goto not_found;
}
{
struct stat st;
if (fstat(fd, &st) < 0) {
_S_LOG_PERROR("Can't stat() found static file %s", static_path);
_LOG_PERROR("Can't stat() found static file %s", static_path);
goto not_found;
}
if (st.st_size > 0 && evbuffer_add_file(buf, fd, 0, st.st_size) < 0) {
_S_LOG_ERROR("Can't serve static file %s", static_path);
_LOG_ERROR("Can't serve static file %s", static_path);
goto not_found;
}
@@ -599,17 +599,17 @@ static void _http_callback_stream(struct evhttp_request *request, void *v_server
# endif
}
_S_LOG_INFO("NEW client (now=%u): %s, id=%" PRIx64,
_LOG_INFO("NEW client (now=%u): %s, id=%" PRIx64,
run->stream_clients_count, client->hostport, client->id);
struct bufferevent *const buf_event = evhttp_connection_get_bufferevent(conn);
if (server->tcp_nodelay && run->ext_fd >= 0) {
_S_LOG_DEBUG("Setting up TCP_NODELAY to the client %s ...", client->hostport);
_LOG_DEBUG("Setting up TCP_NODELAY to the client %s ...", client->hostport);
const evutil_socket_t fd = bufferevent_getfd(buf_event);
assert(fd >= 0);
int on = 1;
if (setsockopt(fd, IPPROTO_TCP, TCP_NODELAY, (void*)&on, sizeof(on)) != 0) {
_S_LOG_PERROR("Can't set TCP_NODELAY to the client %s", client->hostport);
_LOG_PERROR("Can't set TCP_NODELAY to the client %s", client->hostport);
}
}
bufferevent_setcb(buf_event, NULL, NULL, _http_callback_stream_error, (void*)client);
@@ -786,7 +786,7 @@ static void _http_callback_stream_error(struct bufferevent *buf_event, short wha
}
char *const reason = us_bufferevent_format_reason(what);
_S_LOG_INFO("DEL client (now=%u): %s, id=%" PRIx64 ", %s",
_LOG_INFO("DEL client (now=%u): %s, id=%" PRIx64 ", %s",
run->stream_clients_count, client->hostport, client->id, reason);
free(reason);
@@ -942,7 +942,7 @@ static void _http_refresher(int fd, short what, void *v_server) {
stream_updated = true;
us_ring_consumer_release(ring, ri);
} else if (ex->expose_end_ts + 1 < us_get_now_monotonic()) {
_S_LOG_DEBUG("Repeating exposed ...");
_LOG_DEBUG("Repeating exposed ...");
ex->expose_begin_ts = us_get_now_monotonic();
ex->expose_cmp_ts = ex->expose_begin_ts;
ex->expose_end_ts = ex->expose_begin_ts;
@@ -972,7 +972,7 @@ static void _http_refresher(int fd, short what, void *v_server) {
static bool _expose_frame(us_server_s *server, const us_frame_s *frame) {
us_server_exposed_s *const ex = server->run->exposed;
_S_LOG_DEBUG("Updating exposed frame (online=%d) ...", frame->online);
_LOG_DEBUG("Updating exposed frame (online=%d) ...", frame->online);
ex->expose_begin_ts = us_get_now_monotonic();
if (server->drop_same_frames && frame->online) {
@@ -984,13 +984,13 @@ static bool _expose_frame(us_server_s *server, const us_frame_s *frame) {
) {
ex->expose_cmp_ts = us_get_now_monotonic();
ex->expose_end_ts = ex->expose_cmp_ts;
_S_LOG_VERBOSE("Dropped same frame number %u; cmp_time=%.06Lf",
_LOG_VERBOSE("Dropped same frame number %u; cmp_time=%.06Lf",
ex->dropped, (ex->expose_cmp_ts - ex->expose_begin_ts));
ex->dropped += 1;
return false; // Not updated
} else {
ex->expose_cmp_ts = us_get_now_monotonic();
_S_LOG_VERBOSE("Passed same frame check (need_drop=%d, maybe_same=%d); cmp_time=%.06Lf",
_LOG_VERBOSE("Passed same frame check (need_drop=%d, maybe_same=%d); cmp_time=%.06Lf",
need_drop, maybe_same, (ex->expose_cmp_ts - ex->expose_begin_ts));
}
}
@@ -1007,7 +1007,7 @@ static bool _expose_frame(us_server_s *server, const us_frame_s *frame) {
ex->expose_cmp_ts = ex->expose_begin_ts;
ex->expose_end_ts = us_get_now_monotonic();
_S_LOG_VERBOSE("Exposed frame: online=%d, exp_time=%.06Lf",
_LOG_VERBOSE("Exposed frame: online=%d, exp_time=%.06Lf",
ex->frame->online, (ex->expose_end_ts - ex->expose_begin_ts));
return true; // Updated
}

View File

@@ -56,11 +56,11 @@ static void _m2m_encoder_cleanup(us_m2m_encoder_s *enc);
static int _m2m_encoder_compress_raw(us_m2m_encoder_s *enc, const us_frame_s *src, us_frame_s *dest, bool force_key);
#define _E_LOG_ERROR(x_msg, ...) US_LOG_ERROR("%s: " x_msg, enc->name, ##__VA_ARGS__)
#define _E_LOG_PERROR(x_msg, ...) US_LOG_PERROR("%s: " x_msg, enc->name, ##__VA_ARGS__)
#define _E_LOG_INFO(x_msg, ...) US_LOG_INFO("%s: " x_msg, enc->name, ##__VA_ARGS__)
#define _E_LOG_VERBOSE(x_msg, ...) US_LOG_VERBOSE("%s: " x_msg, enc->name, ##__VA_ARGS__)
#define _E_LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("%s: " x_msg, enc->name, ##__VA_ARGS__)
#define _LOG_ERROR(x_msg, ...) US_LOG_ERROR("%s: " x_msg, enc->name, ##__VA_ARGS__)
#define _LOG_PERROR(x_msg, ...) US_LOG_PERROR("%s: " x_msg, enc->name, ##__VA_ARGS__)
#define _LOG_INFO(x_msg, ...) US_LOG_INFO("%s: " x_msg, enc->name, ##__VA_ARGS__)
#define _LOG_VERBOSE(x_msg, ...) US_LOG_VERBOSE("%s: " x_msg, enc->name, ##__VA_ARGS__)
#define _LOG_DEBUG(x_msg, ...) US_LOG_DEBUG("%s: " x_msg, enc->name, ##__VA_ARGS__)
us_m2m_encoder_s *us_m2m_h264_encoder_init(const char *name, const char *path, uint bitrate, uint gop) {
@@ -85,7 +85,7 @@ us_m2m_encoder_s *us_m2m_jpeg_encoder_init(const char *name, const char *path, u
}
void us_m2m_encoder_destroy(us_m2m_encoder_s *enc) {
_E_LOG_INFO("Destroying encoder ...");
_LOG_INFO("Destroying encoder ...");
_m2m_encoder_cleanup(enc);
free(enc->path);
free(enc->name);
@@ -104,17 +104,17 @@ int us_m2m_encoder_compress(us_m2m_encoder_s *enc, const us_frame_s *src, us_fra
force_key = (enc->output_format == V4L2_PIX_FMT_H264 && (force_key || run->last_online != src->online));
_E_LOG_DEBUG("Compressing new frame; force_key=%d ...", force_key);
_LOG_DEBUG("Compressing new frame; force_key=%d ...", force_key);
if (_m2m_encoder_compress_raw(enc, src, dest, force_key) < 0) {
_m2m_encoder_cleanup(enc);
_E_LOG_ERROR("Encoder destroyed due an error (compress)");
_LOG_ERROR("Encoder destroyed due an error (compress)");
return -1;
}
us_frame_encoding_end(dest);
_E_LOG_VERBOSE("Compressed new frame: size=%zu, time=%0.3Lf, force_key=%d",
_LOG_VERBOSE("Compressed new frame: size=%zu, time=%0.3Lf, force_key=%d",
dest->used, dest->encode_end_ts - dest->encode_begin_ts, force_key);
run->last_online = src->online;
@@ -151,7 +151,7 @@ static us_m2m_encoder_s *_m2m_encoder_init(
#define _E_XIOCTL(x_request, x_value, x_msg, ...) { \
if (us_xioctl(run->fd, x_request, x_value) < 0) { \
_E_LOG_PERROR(x_msg, ##__VA_ARGS__); \
_LOG_PERROR(x_msg, ##__VA_ARGS__); \
goto error; \
} \
}
@@ -170,9 +170,9 @@ static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame)
return; // Configured already
}
_E_LOG_INFO("Configuring encoder: DMA=%d ...", dma);
_LOG_INFO("Configuring encoder: DMA=%d ...", dma);
_E_LOG_DEBUG("Encoder changes: width=%u->%u, height=%u->%u, input_format=%u->%u, stride=%u->%u, dma=%u->%u",
_LOG_DEBUG("Encoder changes: width=%u->%u, height=%u->%u, input_format=%u->%u, stride=%u->%u, dma=%u->%u",
run->p_width, frame->width,
run->p_height, frame->height,
run->p_input_format, frame->format,
@@ -187,18 +187,18 @@ static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame)
run->p_stride = frame->stride;
run->p_dma = dma;
_E_LOG_DEBUG("Opening encoder device ...");
_LOG_DEBUG("Opening encoder device ...");
if ((run->fd = open(enc->path, O_RDWR)) < 0) {
_E_LOG_PERROR("Can't open encoder device");
_LOG_PERROR("Can't open encoder device");
goto error;
}
_E_LOG_DEBUG("Encoder device fd=%d opened", run->fd);
_LOG_DEBUG("Encoder device fd=%d opened", run->fd);
# define SET_OPTION(x_cid, x_value) { \
struct v4l2_control m_ctl = {0}; \
m_ctl.id = x_cid; \
m_ctl.value = x_value; \
_E_LOG_DEBUG("Configuring option " #x_cid " ..."); \
_LOG_DEBUG("Configuring option " #x_cid " ..."); \
_E_XIOCTL(VIDIOC_S_CTRL, &m_ctl, "Can't set option " #x_cid); \
}
if (enc->output_format == V4L2_PIX_FMT_H264) {
@@ -227,10 +227,10 @@ static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame)
fmt.fmt.pix_mp.height = run->p_height;
fmt.fmt.pix_mp.pixelformat = run->p_input_format;
fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_JPEG; // libcamera currently has no means to request the right colour space
fmt.fmt.pix_mp.colorspace = V4L2_COLORSPACE_JPEG; // FIXME: Wrong colors
fmt.fmt.pix_mp.num_planes = 1;
// fmt.fmt.pix_mp.plane_fmt[0].bytesperline = run->p_stride;
_E_LOG_DEBUG("Configuring INPUT format ...");
_LOG_DEBUG("Configuring INPUT format ...");
_E_XIOCTL(VIDIOC_S_FMT, &fmt, "Can't set INPUT format");
}
@@ -249,13 +249,13 @@ static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame)
// https://github.com/raspberrypi/linux/pull/5232
fmt.fmt.pix_mp.plane_fmt[0].sizeimage = (1024 + 512) << 10; // 1.5Mb
}
_E_LOG_DEBUG("Configuring OUTPUT format ...");
_LOG_DEBUG("Configuring OUTPUT format ...");
_E_XIOCTL(VIDIOC_S_FMT, &fmt, "Can't set OUTPUT format");
if (fmt.fmt.pix_mp.pixelformat != enc->output_format) {
char fourcc_str[8];
_E_LOG_ERROR("The OUTPUT format can't be configured as %s",
_LOG_ERROR("The OUTPUT format can't be configured as %s",
us_fourcc_to_string(enc->output_format, fourcc_str, 8));
_E_LOG_ERROR("In case of Raspberry Pi, try to append 'start_x=1' to /boot/config.txt");
_LOG_ERROR("In case of Raspberry Pi, try to append 'start_x=1' to /boot/config.txt");
goto error;
}
}
@@ -277,7 +277,7 @@ static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame)
setfps.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
setfps.parm.output.timeperframe.numerator = 1;
setfps.parm.output.timeperframe.denominator = run->fps_limit;
_E_LOG_DEBUG("Configuring INPUT FPS ...");
_LOG_DEBUG("Configuring INPUT FPS ...");
_E_XIOCTL(VIDIOC_S_PARM, &setfps, "Can't set INPUT FPS");
}
@@ -296,21 +296,21 @@ static void _m2m_encoder_ensure(us_m2m_encoder_s *enc, const us_frame_s *frame)
{
enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
_E_LOG_DEBUG("Starting INPUT ...");
_LOG_DEBUG("Starting INPUT ...");
_E_XIOCTL(VIDIOC_STREAMON, &type, "Can't start INPUT");
type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
_E_LOG_DEBUG("Starting OUTPUT ...");
_LOG_DEBUG("Starting OUTPUT ...");
_E_XIOCTL(VIDIOC_STREAMON, &type, "Can't start OUTPUT");
}
run->ready = true;
_E_LOG_INFO("Encoder is ready");
_LOG_INFO("Encoder is ready");
return;
error:
_m2m_encoder_cleanup(enc);
_E_LOG_ERROR("Encoder destroyed due an error (prepare)");
_LOG_ERROR("Encoder destroyed due an error (prepare)");
}
static int _m2m_encoder_init_buffers(
@@ -319,20 +319,20 @@ static int _m2m_encoder_init_buffers(
us_m2m_encoder_runtime_s *const run = enc->run;
_E_LOG_DEBUG("Initializing %s buffers ...", name);
_LOG_DEBUG("Initializing %s buffers ...", name);
struct v4l2_requestbuffers req = {0};
req.count = 1;
req.type = type;
req.memory = (dma ? V4L2_MEMORY_DMABUF : V4L2_MEMORY_MMAP);
_E_LOG_DEBUG("Requesting %u %s buffers ...", req.count, name);
_LOG_DEBUG("Requesting %u %s buffers ...", req.count, name);
_E_XIOCTL(VIDIOC_REQBUFS, &req, "Can't request %s buffers", name);
if (req.count < 1) {
_E_LOG_ERROR("Insufficient %s buffer memory: %u", name, req.count);
_LOG_ERROR("Insufficient %s buffer memory: %u", name, req.count);
goto error;
}
_E_LOG_DEBUG("Got %u %s buffers", req.count, name);
_LOG_DEBUG("Got %u %s buffers", req.count, name);
if (dma) {
*n_bufs_ptr = req.count;
@@ -349,25 +349,25 @@ static int _m2m_encoder_init_buffers(
buf.length = 1;
buf.m.planes = &plane;
_E_LOG_DEBUG("Querying %s buffer=%u ...", name, *n_bufs_ptr);
_LOG_DEBUG("Querying %s buffer=%u ...", name, *n_bufs_ptr);
_E_XIOCTL(VIDIOC_QUERYBUF, &buf, "Can't query %s buffer=%u", name, *n_bufs_ptr);
_E_LOG_DEBUG("Mapping %s buffer=%u ...", name, *n_bufs_ptr);
_LOG_DEBUG("Mapping %s buffer=%u ...", name, *n_bufs_ptr);
if (((*bufs_ptr)[*n_bufs_ptr].data = mmap(
NULL, plane.length,
PROT_READ | PROT_WRITE, MAP_SHARED,
run->fd, plane.m.mem_offset
)) == MAP_FAILED) {
_E_LOG_PERROR("Can't map %s buffer=%u", name, *n_bufs_ptr);
_LOG_PERROR("Can't map %s buffer=%u", name, *n_bufs_ptr);
goto error;
}
assert((*bufs_ptr)[*n_bufs_ptr].data != NULL);
(*bufs_ptr)[*n_bufs_ptr].allocated = plane.length;
_E_LOG_DEBUG("Queuing %s buffer=%u ...", name, *n_bufs_ptr);
_LOG_DEBUG("Queuing %s buffer=%u ...", name, *n_bufs_ptr);
_E_XIOCTL(VIDIOC_QBUF, &buf, "Can't queue %s buffer=%u", name, *n_bufs_ptr);
}
_E_LOG_DEBUG("All %s buffers are ready", name);
_LOG_DEBUG("All %s buffers are ready", name);
return 0;
error: // Mostly for _E_XIOCTL
@@ -383,9 +383,9 @@ static void _m2m_encoder_cleanup(us_m2m_encoder_s *enc) {
say = true;
# define STOP_STREAM(x_name, x_type) { \
enum v4l2_buf_type m_type_var = x_type; \
_E_LOG_DEBUG("Stopping %s ...", x_name); \
_LOG_DEBUG("Stopping %s ...", x_name); \
if (us_xioctl(run->fd, VIDIOC_STREAMOFF, &m_type_var) < 0) { \
_E_LOG_PERROR("Can't stop %s", x_name); \
_LOG_PERROR("Can't stop %s", x_name); \
} \
}
STOP_STREAM("OUTPUT", V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE);
@@ -400,7 +400,7 @@ static void _m2m_encoder_cleanup(us_m2m_encoder_s *enc) {
us_m2m_buffer_s *m_buf = &run->x_target##_bufs[m_index]; \
if (m_buf->allocated > 0 && m_buf->data != NULL) { \
if (munmap(m_buf->data, m_buf->allocated) < 0) { \
_E_LOG_PERROR("Can't unmap %s buffer=%u", #x_name, m_index); \
_LOG_PERROR("Can't unmap %s buffer=%u", #x_name, m_index); \
} \
} \
} \
@@ -415,7 +415,7 @@ static void _m2m_encoder_cleanup(us_m2m_encoder_s *enc) {
if (run->fd >= 0) {
say = true;
if (close(run->fd) < 0) {
_E_LOG_PERROR("Can't close encoder device");
_LOG_PERROR("Can't close encoder device");
}
run->fd = -1;
}
@@ -424,7 +424,7 @@ static void _m2m_encoder_cleanup(us_m2m_encoder_s *enc) {
run->ready = false;
if (say) {
_E_LOG_INFO("Encoder closed");
_LOG_INFO("Encoder closed");
}
}
@@ -437,7 +437,7 @@ static int _m2m_encoder_compress_raw(us_m2m_encoder_s *enc, const us_frame_s *sr
struct v4l2_control ctl = {0};
ctl.id = V4L2_CID_MPEG_VIDEO_FORCE_KEY_FRAME;
ctl.value = 1;
_E_LOG_DEBUG("Forcing keyframe ...")
_LOG_DEBUG("Forcing keyframe ...")
_E_XIOCTL(VIDIOC_S_CTRL, &ctl, "Can't force keyframe");
}
@@ -452,17 +452,17 @@ static int _m2m_encoder_compress_raw(us_m2m_encoder_s *enc, const us_frame_s *sr
input_buf.memory = V4L2_MEMORY_DMABUF;
input_buf.field = V4L2_FIELD_NONE;
input_plane.m.fd = src->dma_fd;
_E_LOG_DEBUG("Using INPUT-DMA buffer=%u", input_buf.index);
_LOG_DEBUG("Using INPUT-DMA buffer=%u", input_buf.index);
} else {
input_buf.memory = V4L2_MEMORY_MMAP;
_E_LOG_DEBUG("Grabbing INPUT buffer ...");
_LOG_DEBUG("Grabbing INPUT buffer ...");
_E_XIOCTL(VIDIOC_DQBUF, &input_buf, "Can't grab INPUT buffer");
if (input_buf.index >= run->n_input_bufs) {
_E_LOG_ERROR("V4L2 error: grabbed invalid INPUT: buffer=%u, n_bufs=%u",
_LOG_ERROR("V4L2 error: grabbed invalid INPUT: buffer=%u, n_bufs=%u",
input_buf.index, run->n_input_bufs);
goto error;
}
_E_LOG_DEBUG("Grabbed INPUT buffer=%u", input_buf.index);
_LOG_DEBUG("Grabbed INPUT buffer=%u", input_buf.index);
}
const u64 now_ts = us_get_now_monotonic_u64();
@@ -481,7 +481,7 @@ static int _m2m_encoder_compress_raw(us_m2m_encoder_s *enc, const us_frame_s *sr
const char *input_name = (run->p_dma ? "INPUT-DMA" : "INPUT");
_E_LOG_DEBUG("Sending%s %s buffer ...", (!run->p_dma ? " (releasing)" : ""), input_name);
_LOG_DEBUG("Sending%s %s buffer ...", (!run->p_dma ? " (releasing)" : ""), input_name);
_E_XIOCTL(VIDIOC_QBUF, &input_buf, "Can't send %s buffer", input_name);
// Для не-DMA отправка буфера по факту являтся освобождением этого буфера
@@ -493,20 +493,20 @@ static int _m2m_encoder_compress_raw(us_m2m_encoder_s *enc, const us_frame_s *sr
while (true) {
if (us_get_now_monotonic() > deadline_ts) {
_E_LOG_ERROR("Waiting for the encoder is too long");
_LOG_ERROR("Waiting for the encoder is too long");
goto error;
}
struct pollfd enc_poll = {run->fd, POLLIN, 0};
_E_LOG_DEBUG("Polling encoder ...");
_LOG_DEBUG("Polling encoder ...");
if (poll(&enc_poll, 1, 1000) < 0 && errno != EINTR) {
_E_LOG_PERROR("Can't poll encoder");
_LOG_PERROR("Can't poll encoder");
goto error;
}
if (enc_poll.revents & POLLIN) {
if (!input_released) {
_E_LOG_DEBUG("Releasing %s buffer=%u ...", input_name, input_buf.index);
_LOG_DEBUG("Releasing %s buffer=%u ...", input_name, input_buf.index);
_E_XIOCTL(VIDIOC_DQBUF, &input_buf, "Can't release %s buffer=%u",
input_name, input_buf.index);
input_released = true;
@@ -518,7 +518,7 @@ static int _m2m_encoder_compress_raw(us_m2m_encoder_s *enc, const us_frame_s *sr
output_buf.memory = V4L2_MEMORY_MMAP;
output_buf.length = 1;
output_buf.m.planes = &output_plane;
_E_LOG_DEBUG("Fetching OUTPUT buffer ...");
_LOG_DEBUG("Fetching OUTPUT buffer ...");
_E_XIOCTL(VIDIOC_DQBUF, &output_buf, "Can't fetch OUTPUT buffer");
bool done = false;
@@ -526,7 +526,7 @@ static int _m2m_encoder_compress_raw(us_m2m_encoder_s *enc, const us_frame_s *sr
// Енкодер первый раз может выдать буфер с мусором и нулевым таймстампом,
// так что нужно убедиться, что мы читаем выходной буфер, соответствующий
// входному (с тем же таймстампом).
_E_LOG_DEBUG("Need to retry OUTPUT buffer due timestamp mismatch");
_LOG_DEBUG("Need to retry OUTPUT buffer due timestamp mismatch");
} else {
us_frame_set_data(dest, run->output_bufs[output_buf.index].data, output_plane.bytesused);
dest->key = output_buf.flags & V4L2_BUF_FLAG_KEYFRAME;
@@ -534,7 +534,7 @@ static int _m2m_encoder_compress_raw(us_m2m_encoder_s *enc, const us_frame_s *sr
done = true;
}
_E_LOG_DEBUG("Releasing OUTPUT buffer=%u ...", output_buf.index);
_LOG_DEBUG("Releasing OUTPUT buffer=%u ...", output_buf.index);
_E_XIOCTL(VIDIOC_QBUF, &output_buf, "Can't release OUTPUT buffer=%u", output_buf.index);
if (done) {

View File

@@ -204,7 +204,7 @@ void us_stream_loop(us_stream_s *stream) {
uint slowdown_count = 0;
while (!atomic_load(&run->stop) && !atomic_load(&threads_stop)) {
us_capture_hwbuf_s *hw;
switch (us_capture_grab_buffer(cap, &hw)) {
switch (us_capture_hwbuf_grab(cap, &hw)) {
case -2: continue; // Broken frame
case -1: goto close; // Error
default: break; // Grabbed on >= 0
@@ -224,19 +224,19 @@ void us_stream_loop(us_stream_s *stream) {
us_gpio_set_stream_online(true);
# endif
us_capture_buffer_incref(hw); // JPEG
us_capture_hwbuf_incref(hw); // JPEG
us_queue_put(jpeg_ctx.queue, hw, 0);
if (run->h264 != NULL) {
us_capture_buffer_incref(hw); // H264
us_capture_hwbuf_incref(hw); // H264
us_queue_put(h264_ctx.queue, hw, 0);
}
if (stream->raw_sink != NULL) {
us_capture_buffer_incref(hw); // RAW
us_capture_hwbuf_incref(hw); // RAW
us_queue_put(raw_ctx.queue, hw, 0);
}
# ifdef WITH_V4P
if (stream->v4p) {
us_capture_buffer_incref(hw); // DRM
us_capture_hwbuf_incref(hw); // DRM
us_queue_put(drm_ctx.queue, hw, 0);
}
# endif
@@ -339,7 +339,7 @@ static void *_releaser_thread(void *v_ctx) {
}
US_MUTEX_LOCK(*ctx->mutex);
const int released = us_capture_release_buffer(ctx->cap, hw);
const int released = us_capture_hwbuf_release(ctx->cap, hw);
US_MUTEX_UNLOCK(*ctx->mutex);
if (released < 0) {
goto done;
@@ -364,7 +364,7 @@ static void *_jpeg_thread(void *v_ctx) {
us_encoder_job_s *const ready_job = ready_wr->job;
if (ready_job->hw != NULL) {
us_capture_buffer_decref(ready_job->hw);
us_capture_hwbuf_decref(ready_job->hw);
ready_job->hw = NULL;
if (ready_wr->job_failed) {
// pass
@@ -388,7 +388,7 @@ static void *_jpeg_thread(void *v_ctx) {
const bool update_required = (stream->jpeg_sink != NULL && us_memsink_server_check(stream->jpeg_sink, NULL));
if (!update_required && !_stream_has_jpeg_clients_cached(stream)) {
US_LOG_VERBOSE("JPEG: Passed encoding because nobody is watching");
us_capture_buffer_decref(hw);
us_capture_hwbuf_decref(hw);
continue;
}
@@ -397,7 +397,7 @@ static void *_jpeg_thread(void *v_ctx) {
fluency_passed += 1;
US_LOG_VERBOSE("JPEG: Passed %u frames for fluency: now=%.03Lf, grab_after=%.03Lf",
fluency_passed, now_ts, grab_after_ts);
us_capture_buffer_decref(hw);
us_capture_hwbuf_decref(hw);
continue;
}
fluency_passed = 0;
@@ -428,13 +428,13 @@ static void *_h264_thread(void *v_ctx) {
}
if (!us_memsink_server_check(h264->sink, NULL)) {
us_capture_buffer_decref(hw);
us_capture_hwbuf_decref(hw);
US_LOG_VERBOSE("H264: Passed encoding because nobody is watching");
continue;
}
if (hw->raw.grab_ts < grab_after_ts) {
us_capture_buffer_decref(hw);
us_capture_hwbuf_decref(hw);
US_LOG_VERBOSE("H264: Passed encoding for FPS limit: %u", h264->enc->run->fps_limit);
continue;
}
@@ -452,7 +452,7 @@ static void *_h264_thread(void *v_ctx) {
const ldf frame_interval = (ldf)1 / h264->enc->run->fps_limit;
grab_after_ts = hw->raw.grab_ts + frame_interval - 0.01;
us_capture_buffer_decref(hw);
us_capture_hwbuf_decref(hw);
}
return NULL;
}
@@ -468,13 +468,13 @@ static void *_raw_thread(void *v_ctx) {
}
if (!us_memsink_server_check(ctx->stream->raw_sink, NULL)) {
us_capture_buffer_decref(hw);
us_capture_hwbuf_decref(hw);
US_LOG_VERBOSE("RAW: Passed publishing because nobody is watching");
continue;
}
us_memsink_server_put(ctx->stream->raw_sink, &hw->raw, false);
us_capture_buffer_decref(hw);
us_capture_hwbuf_decref(hw);
}
return NULL;
}
@@ -497,7 +497,7 @@ static void *_drm_thread(void *v_ctx) {
while (!atomic_load(ctx->stop) && us_get_now_monotonic() < m_next_ts) { \
us_capture_hwbuf_s *m_pass_hw = _get_latest_hw(ctx->queue); \
if (m_pass_hw != NULL) { \
us_capture_buffer_decref(m_pass_hw); \
us_capture_hwbuf_decref(m_pass_hw); \
} \
} \
}
@@ -506,7 +506,7 @@ static void *_drm_thread(void *v_ctx) {
while (!atomic_load(ctx->stop)) {
CHECK(us_drm_wait_for_vsync(run->drm));
US_DELETE(prev_hw, us_capture_buffer_decref);
US_DELETE(prev_hw, us_capture_hwbuf_decref);
us_capture_hwbuf_s *hw = _get_latest_hw(ctx->queue);
if (hw == NULL) {
@@ -520,7 +520,7 @@ static void *_drm_thread(void *v_ctx) {
}
CHECK(us_drm_expose_stub(run->drm, run->drm_opened, ctx->stream->cap));
us_capture_buffer_decref(hw);
us_capture_hwbuf_decref(hw);
SLOWDOWN;
}
@@ -528,7 +528,7 @@ static void *_drm_thread(void *v_ctx) {
close:
us_drm_close(run->drm);
run->drm_opened = -1;
US_DELETE(prev_hw, us_capture_buffer_decref);
US_DELETE(prev_hw, us_capture_hwbuf_decref);
SLOWDOWN;
# undef SLOWDOWN
@@ -544,7 +544,7 @@ static us_capture_hwbuf_s *_get_latest_hw(us_queue_s *queue) {
return NULL;
}
while (!us_queue_is_empty(queue)) { // Берем только самый свежий кадр
us_capture_buffer_decref(hw);
us_capture_hwbuf_decref(hw);
assert(!us_queue_get(queue, (void**)&hw, 0));
}
return hw;
@@ -652,18 +652,36 @@ static int _stream_init_loop(us_stream_s *stream) {
#ifdef WITH_V4P
static void _stream_drm_ensure_no_signal(us_stream_s *stream) {
us_stream_runtime_s *const run = stream->run;
if (!stream->v4p) {
return;
}
# define CHECK(x_arg) if ((x_arg) < 0) { goto close; }
if (run->drm_opened <= 0) {
us_drm_close(run->drm);
run->drm_opened = us_drm_open(run->drm, NULL);
run->drm_blank_at_ts = 0;
CHECK(run->drm_opened = us_drm_open(run->drm, NULL));
}
if (run->drm_opened > 0) {
if (us_drm_wait_for_vsync(run->drm) == 0) {
us_drm_expose_stub(run->drm, US_DRM_STUB_NO_SIGNAL, NULL);
}
ldf now_ts = us_get_now_monotonic();
if (run->drm_blank_at_ts == 0) {
run->drm_blank_at_ts = now_ts + 5;
}
if (now_ts <= run->drm_blank_at_ts) {
CHECK(us_drm_wait_for_vsync(run->drm));
CHECK(us_drm_expose_stub(run->drm, US_DRM_STUB_NO_SIGNAL, NULL));
} else {
// US_ONCE({ US_LOG_INFO("DRM: Turning off the display by timeout ..."); });
CHECK(us_drm_dpms_power_off(run->drm));
}
return;
# undef CHECK
close:
us_drm_close(run->drm);
run->drm_opened = -1;
}
#endif

View File

@@ -46,6 +46,7 @@ typedef struct {
# ifdef WITH_V4P
us_drm_s *drm;
int drm_opened;
ldf drm_blank_at_ts;
# endif
us_ring_s *http_jpeg_ring;

View File

@@ -177,17 +177,19 @@ static void _main_loop(void) {
while (!atomic_load(&_g_stop)) {
# define CHECK(x_arg) if ((x_arg) < 0) { goto close; }
if (atomic_load(&_g_ustreamer_online)) {
blank_at_ts = 0;
US_ONCE({ US_LOG_INFO("DRM: Online stream is active, pausing the service ..."); });
goto close;
}
if (drm_opened <= 0) {
blank_at_ts = 0;
CHECK(drm_opened = us_drm_open(drm, NULL));
}
assert(drm_opened > 0);
if (atomic_load(&_g_ustreamer_online)) {
blank_at_ts = 0;
US_ONCE({ US_LOG_INFO("DRM: Online stream is active, pausing the service ..."); });
CHECK(us_drm_wait_for_vsync(drm));
CHECK(us_drm_expose_stub(drm, US_DRM_STUB_BUSY, NULL));
_slowdown();
continue;
}
if (us_capture_open(cap) < 0) {
ldf now_ts = us_get_now_monotonic();
@@ -219,12 +221,12 @@ static void _main_loop(void) {
CHECK(us_drm_wait_for_vsync(drm));
if (prev_hw != NULL) {
CHECK(us_capture_release_buffer(cap, prev_hw));
CHECK(us_capture_hwbuf_release(cap, prev_hw));
prev_hw = NULL;
}
us_capture_hwbuf_s *hw;
switch (us_capture_grab_buffer(cap, &hw)) {
switch (us_capture_hwbuf_grab(cap, &hw)) {
case -2: continue; // Broken frame
case -1: goto close; // Any error
default: break; // Grabbed on >= 0
@@ -237,7 +239,7 @@ static void _main_loop(void) {
}
CHECK(us_drm_expose_stub(drm, drm_opened, cap));
CHECK(us_capture_release_buffer(cap, hw));
CHECK(us_capture_hwbuf_release(cap, hw));
_slowdown();
}