| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | /* | ||
| 2 | * V4L2 context helper functions. | ||
| 3 | * | ||
| 4 | * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org> | ||
| 5 | * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org> | ||
| 6 | * | ||
| 7 | * This file is part of FFmpeg. | ||
| 8 | * | ||
| 9 | * FFmpeg is free software; you can redistribute it and/or | ||
| 10 | * modify it under the terms of the GNU Lesser General Public | ||
| 11 | * License as published by the Free Software Foundation; either | ||
| 12 | * version 2.1 of the License, or (at your option) any later version. | ||
| 13 | * | ||
| 14 | * FFmpeg is distributed in the hope that it will be useful, | ||
| 15 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 16 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 17 | * Lesser General Public License for more details. | ||
| 18 | * | ||
| 19 | * You should have received a copy of the GNU Lesser General Public | ||
| 20 | * License along with FFmpeg; if not, write to the Free Software | ||
| 21 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
| 22 | */ | ||
| 23 | |||
| 24 | #include <linux/videodev2.h> | ||
| 25 | #include <sys/ioctl.h> | ||
| 26 | #include <sys/mman.h> | ||
| 27 | #include <unistd.h> | ||
| 28 | #include <fcntl.h> | ||
| 29 | #include <poll.h> | ||
| 30 | #include "libavutil/mem.h" | ||
| 31 | #include "libavcodec/avcodec.h" | ||
| 32 | #include "decode.h" | ||
| 33 | #include "v4l2_buffers.h" | ||
| 34 | #include "v4l2_fmt.h" | ||
| 35 | #include "v4l2_m2m.h" | ||
| 36 | |||
| 37 | struct v4l2_format_update { | ||
| 38 | uint32_t v4l2_fmt; | ||
| 39 | int update_v4l2; | ||
| 40 | |||
| 41 | enum AVPixelFormat av_fmt; | ||
| 42 | int update_avfmt; | ||
| 43 | }; | ||
| 44 | |||
| 45 | ✗ | static inline V4L2m2mContext *ctx_to_m2mctx(V4L2Context *ctx) | |
| 46 | { | ||
| 47 | ✗ | return V4L2_TYPE_IS_OUTPUT(ctx->type) ? | |
| 48 | ✗ | container_of(ctx, V4L2m2mContext, output) : | |
| 49 | ✗ | container_of(ctx, V4L2m2mContext, capture); | |
| 50 | } | ||
| 51 | |||
| 52 | ✗ | static inline AVCodecContext *logger(V4L2Context *ctx) | |
| 53 | { | ||
| 54 | ✗ | return ctx_to_m2mctx(ctx)->avctx; | |
| 55 | } | ||
| 56 | |||
| 57 | ✗ | static inline unsigned int v4l2_get_width(struct v4l2_format *fmt) | |
| 58 | { | ||
| 59 | ✗ | return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.width : fmt->fmt.pix.width; | |
| 60 | } | ||
| 61 | |||
| 62 | ✗ | static inline unsigned int v4l2_get_height(struct v4l2_format *fmt) | |
| 63 | { | ||
| 64 | ✗ | return V4L2_TYPE_IS_MULTIPLANAR(fmt->type) ? fmt->fmt.pix_mp.height : fmt->fmt.pix.height; | |
| 65 | } | ||
| 66 | |||
| 67 | ✗ | static AVRational v4l2_get_sar(V4L2Context *ctx) | |
| 68 | { | ||
| 69 | ✗ | struct AVRational sar = { 0, 1 }; | |
| 70 | struct v4l2_cropcap cropcap; | ||
| 71 | int ret; | ||
| 72 | |||
| 73 | ✗ | memset(&cropcap, 0, sizeof(cropcap)); | |
| 74 | ✗ | cropcap.type = ctx->type; | |
| 75 | |||
| 76 | ✗ | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_CROPCAP, &cropcap); | |
| 77 | ✗ | if (ret) | |
| 78 | ✗ | return sar; | |
| 79 | |||
| 80 | ✗ | sar.num = cropcap.pixelaspect.numerator; | |
| 81 | ✗ | sar.den = cropcap.pixelaspect.denominator; | |
| 82 | ✗ | return sar; | |
| 83 | } | ||
| 84 | |||
| 85 | ✗ | static inline unsigned int v4l2_resolution_changed(V4L2Context *ctx, struct v4l2_format *fmt2) | |
| 86 | { | ||
| 87 | ✗ | struct v4l2_format *fmt1 = &ctx->format; | |
| 88 | ✗ | int ret = V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? | |
| 89 | ✗ | fmt1->fmt.pix_mp.width != fmt2->fmt.pix_mp.width || | |
| 90 | ✗ | fmt1->fmt.pix_mp.height != fmt2->fmt.pix_mp.height | |
| 91 | ✗ | : | |
| 92 | ✗ | fmt1->fmt.pix.width != fmt2->fmt.pix.width || | |
| 93 | ✗ | fmt1->fmt.pix.height != fmt2->fmt.pix.height; | |
| 94 | |||
| 95 | ✗ | if (ret) | |
| 96 | ✗ | av_log(logger(ctx), AV_LOG_DEBUG, "%s changed (%dx%d) -> (%dx%d)\n", | |
| 97 | ctx->name, | ||
| 98 | v4l2_get_width(fmt1), v4l2_get_height(fmt1), | ||
| 99 | v4l2_get_width(fmt2), v4l2_get_height(fmt2)); | ||
| 100 | |||
| 101 | ✗ | return ret; | |
| 102 | } | ||
| 103 | |||
| 104 | ✗ | static inline int v4l2_type_supported(V4L2Context *ctx) | |
| 105 | { | ||
| 106 | ✗ | return ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE || | |
| 107 | ✗ | ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE || | |
| 108 | ✗ | ctx->type == V4L2_BUF_TYPE_VIDEO_CAPTURE || | |
| 109 | ✗ | ctx->type == V4L2_BUF_TYPE_VIDEO_OUTPUT; | |
| 110 | } | ||
| 111 | |||
| 112 | ✗ | static inline int v4l2_get_framesize_compressed(V4L2Context* ctx, int width, int height) | |
| 113 | { | ||
| 114 | ✗ | V4L2m2mContext *s = ctx_to_m2mctx(ctx); | |
| 115 | ✗ | const int SZ_4K = 0x1000; | |
| 116 | int size; | ||
| 117 | |||
| 118 | ✗ | if (s->avctx && av_codec_is_decoder(s->avctx->codec)) | |
| 119 | ✗ | return ((width * height * 3 / 2) / 2) + 128; | |
| 120 | |||
| 121 | /* encoder */ | ||
| 122 | ✗ | size = FFALIGN(height, 32) * FFALIGN(width, 32) * 3 / 2 / 2; | |
| 123 | ✗ | return FFALIGN(size, SZ_4K); | |
| 124 | } | ||
| 125 | |||
| 126 | ✗ | static inline void v4l2_save_to_context(V4L2Context* ctx, struct v4l2_format_update *fmt) | |
| 127 | { | ||
| 128 | ✗ | ctx->format.type = ctx->type; | |
| 129 | |||
| 130 | ✗ | if (fmt->update_avfmt) | |
| 131 | ✗ | ctx->av_pix_fmt = fmt->av_fmt; | |
| 132 | |||
| 133 | ✗ | if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { | |
| 134 | /* update the sizes to handle the reconfiguration of the capture stream at runtime */ | ||
| 135 | ✗ | ctx->format.fmt.pix_mp.height = ctx->height; | |
| 136 | ✗ | ctx->format.fmt.pix_mp.width = ctx->width; | |
| 137 | ✗ | if (fmt->update_v4l2) { | |
| 138 | ✗ | ctx->format.fmt.pix_mp.pixelformat = fmt->v4l2_fmt; | |
| 139 | |||
| 140 | /* s5p-mfc requires the user to specify a buffer size */ | ||
| 141 | ✗ | ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage = | |
| 142 | ✗ | v4l2_get_framesize_compressed(ctx, ctx->width, ctx->height); | |
| 143 | } | ||
| 144 | } else { | ||
| 145 | ✗ | ctx->format.fmt.pix.height = ctx->height; | |
| 146 | ✗ | ctx->format.fmt.pix.width = ctx->width; | |
| 147 | ✗ | if (fmt->update_v4l2) { | |
| 148 | ✗ | ctx->format.fmt.pix.pixelformat = fmt->v4l2_fmt; | |
| 149 | |||
| 150 | /* s5p-mfc requires the user to specify a buffer size */ | ||
| 151 | ✗ | ctx->format.fmt.pix.sizeimage = | |
| 152 | ✗ | v4l2_get_framesize_compressed(ctx, ctx->width, ctx->height); | |
| 153 | } | ||
| 154 | } | ||
| 155 | ✗ | } | |
| 156 | |||
| 157 | ✗ | static int v4l2_start_decode(V4L2Context *ctx) | |
| 158 | { | ||
| 159 | ✗ | struct v4l2_decoder_cmd cmd = { | |
| 160 | .cmd = V4L2_DEC_CMD_START, | ||
| 161 | .flags = 0, | ||
| 162 | }; | ||
| 163 | int ret; | ||
| 164 | |||
| 165 | ✗ | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DECODER_CMD, &cmd); | |
| 166 | ✗ | if (ret) | |
| 167 | ✗ | return AVERROR(errno); | |
| 168 | |||
| 169 | ✗ | return 0; | |
| 170 | } | ||
| 171 | |||
| 172 | /** | ||
| 173 | * handle resolution change event and end of stream event | ||
| 174 | * returns 1 if reinit was successful, negative if it failed | ||
| 175 | * returns 0 if reinit was not executed | ||
| 176 | */ | ||
| 177 | ✗ | static int v4l2_handle_event(V4L2Context *ctx) | |
| 178 | { | ||
| 179 | ✗ | V4L2m2mContext *s = ctx_to_m2mctx(ctx); | |
| 180 | ✗ | struct v4l2_format cap_fmt = s->capture.format; | |
| 181 | ✗ | struct v4l2_event evt = { 0 }; | |
| 182 | int ret; | ||
| 183 | |||
| 184 | ✗ | ret = ioctl(s->fd, VIDIOC_DQEVENT, &evt); | |
| 185 | ✗ | if (ret < 0) { | |
| 186 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_DQEVENT\n", ctx->name); | |
| 187 | ✗ | return 0; | |
| 188 | } | ||
| 189 | |||
| 190 | ✗ | if (evt.type == V4L2_EVENT_EOS) { | |
| 191 | ✗ | ctx->done = 1; | |
| 192 | ✗ | return 0; | |
| 193 | } | ||
| 194 | |||
| 195 | ✗ | if (evt.type != V4L2_EVENT_SOURCE_CHANGE) | |
| 196 | ✗ | return 0; | |
| 197 | |||
| 198 | ✗ | ret = ioctl(s->fd, VIDIOC_G_FMT, &cap_fmt); | |
| 199 | ✗ | if (ret) { | |
| 200 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT\n", s->capture.name); | |
| 201 | ✗ | return 0; | |
| 202 | } | ||
| 203 | |||
| 204 | ✗ | if (v4l2_resolution_changed(&s->capture, &cap_fmt)) { | |
| 205 | ✗ | s->capture.height = v4l2_get_height(&cap_fmt); | |
| 206 | ✗ | s->capture.width = v4l2_get_width(&cap_fmt); | |
| 207 | ✗ | s->capture.sample_aspect_ratio = v4l2_get_sar(&s->capture); | |
| 208 | } else { | ||
| 209 | ✗ | v4l2_start_decode(ctx); | |
| 210 | ✗ | return 0; | |
| 211 | } | ||
| 212 | |||
| 213 | ✗ | s->reinit = 1; | |
| 214 | |||
| 215 | ✗ | if (s->avctx) | |
| 216 | ✗ | ret = ff_set_dimensions(s->avctx, s->capture.width, s->capture.height); | |
| 217 | ✗ | if (ret < 0) | |
| 218 | ✗ | av_log(logger(ctx), AV_LOG_WARNING, "update avcodec height and width\n"); | |
| 219 | |||
| 220 | ✗ | ret = ff_v4l2_m2m_codec_reinit(s); | |
| 221 | ✗ | if (ret) { | |
| 222 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "v4l2_m2m_codec_reinit\n"); | |
| 223 | ✗ | return AVERROR(EINVAL); | |
| 224 | } | ||
| 225 | |||
| 226 | /* reinit executed */ | ||
| 227 | ✗ | return 1; | |
| 228 | } | ||
| 229 | |||
| 230 | ✗ | static int v4l2_stop_decode(V4L2Context *ctx) | |
| 231 | { | ||
| 232 | ✗ | struct v4l2_decoder_cmd cmd = { | |
| 233 | .cmd = V4L2_DEC_CMD_STOP, | ||
| 234 | .flags = 0, | ||
| 235 | }; | ||
| 236 | int ret; | ||
| 237 | |||
| 238 | ✗ | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DECODER_CMD, &cmd); | |
| 239 | ✗ | if (ret) { | |
| 240 | /* DECODER_CMD is optional */ | ||
| 241 | ✗ | if (errno == ENOTTY) | |
| 242 | ✗ | return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF); | |
| 243 | else | ||
| 244 | ✗ | return AVERROR(errno); | |
| 245 | } | ||
| 246 | |||
| 247 | ✗ | return 0; | |
| 248 | } | ||
| 249 | |||
| 250 | ✗ | static int v4l2_stop_encode(V4L2Context *ctx) | |
| 251 | { | ||
| 252 | ✗ | struct v4l2_encoder_cmd cmd = { | |
| 253 | .cmd = V4L2_ENC_CMD_STOP, | ||
| 254 | .flags = 0, | ||
| 255 | }; | ||
| 256 | int ret; | ||
| 257 | |||
| 258 | ✗ | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENCODER_CMD, &cmd); | |
| 259 | ✗ | if (ret) { | |
| 260 | /* ENCODER_CMD is optional */ | ||
| 261 | ✗ | if (errno == ENOTTY) | |
| 262 | ✗ | return ff_v4l2_context_set_status(ctx, VIDIOC_STREAMOFF); | |
| 263 | else | ||
| 264 | ✗ | return AVERROR(errno); | |
| 265 | } | ||
| 266 | |||
| 267 | ✗ | return 0; | |
| 268 | } | ||
| 269 | |||
| 270 | ✗ | static V4L2Buffer* v4l2_dequeue_v4l2buf(V4L2Context *ctx, int timeout) | |
| 271 | { | ||
| 272 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | ||
| 273 | ✗ | struct v4l2_buffer buf = { 0 }; | |
| 274 | V4L2Buffer *avbuf; | ||
| 275 | ✗ | struct pollfd pfd = { | |
| 276 | .events = POLLIN | POLLRDNORM | POLLPRI | POLLOUT | POLLWRNORM, /* default blocking capture */ | ||
| 277 | ✗ | .fd = ctx_to_m2mctx(ctx)->fd, | |
| 278 | }; | ||
| 279 | int i, ret; | ||
| 280 | |||
| 281 | ✗ | if (!V4L2_TYPE_IS_OUTPUT(ctx->type) && ctx->buffers) { | |
| 282 | ✗ | for (i = 0; i < ctx->num_buffers; i++) { | |
| 283 | ✗ | if (ctx->buffers[i].status == V4L2BUF_IN_DRIVER) | |
| 284 | ✗ | break; | |
| 285 | } | ||
| 286 | ✗ | if (i == ctx->num_buffers) | |
| 287 | ✗ | av_log(logger(ctx), AV_LOG_WARNING, "All capture buffers returned to " | |
| 288 | "userspace. Increase num_capture_buffers " | ||
| 289 | "to prevent device deadlock or dropped " | ||
| 290 | "packets/frames.\n"); | ||
| 291 | } | ||
| 292 | |||
| 293 | /* if we are draining and there are no more capture buffers queued in the driver we are done */ | ||
| 294 | ✗ | if (!V4L2_TYPE_IS_OUTPUT(ctx->type) && ctx_to_m2mctx(ctx)->draining) { | |
| 295 | ✗ | for (i = 0; i < ctx->num_buffers; i++) { | |
| 296 | /* capture buffer initialization happens during decode hence | ||
| 297 | * detection happens at runtime | ||
| 298 | */ | ||
| 299 | ✗ | if (!ctx->buffers) | |
| 300 | ✗ | break; | |
| 301 | |||
| 302 | ✗ | if (ctx->buffers[i].status == V4L2BUF_IN_DRIVER) | |
| 303 | ✗ | goto start; | |
| 304 | } | ||
| 305 | ✗ | ctx->done = 1; | |
| 306 | ✗ | return NULL; | |
| 307 | } | ||
| 308 | |||
| 309 | ✗ | start: | |
| 310 | ✗ | if (V4L2_TYPE_IS_OUTPUT(ctx->type)) | |
| 311 | ✗ | pfd.events = POLLOUT | POLLWRNORM; | |
| 312 | else { | ||
| 313 | /* no need to listen to requests for more input while draining */ | ||
| 314 | ✗ | if (ctx_to_m2mctx(ctx)->draining) | |
| 315 | ✗ | pfd.events = POLLIN | POLLRDNORM | POLLPRI; | |
| 316 | } | ||
| 317 | |||
| 318 | for (;;) { | ||
| 319 | ✗ | ret = poll(&pfd, 1, timeout); | |
| 320 | ✗ | if (ret > 0) | |
| 321 | ✗ | break; | |
| 322 | ✗ | if (errno == EINTR) | |
| 323 | ✗ | continue; | |
| 324 | ✗ | return NULL; | |
| 325 | } | ||
| 326 | |||
| 327 | /* 0. handle errors */ | ||
| 328 | ✗ | if (pfd.revents & POLLERR) { | |
| 329 | /* if we are trying to get free buffers but none have been queued yet, | ||
| 330 | * or if no buffers have been allocated yet, no need to raise a warning | ||
| 331 | */ | ||
| 332 | ✗ | if (timeout == 0) { | |
| 333 | ✗ | if (!ctx->buffers) | |
| 334 | ✗ | return NULL; | |
| 335 | |||
| 336 | ✗ | for (i = 0; i < ctx->num_buffers; i++) { | |
| 337 | ✗ | if (ctx->buffers[i].status != V4L2BUF_AVAILABLE) | |
| 338 | ✗ | av_log(logger(ctx), AV_LOG_WARNING, "%s POLLERR\n", ctx->name); | |
| 339 | } | ||
| 340 | } | ||
| 341 | else | ||
| 342 | ✗ | av_log(logger(ctx), AV_LOG_WARNING, "%s POLLERR\n", ctx->name); | |
| 343 | |||
| 344 | ✗ | return NULL; | |
| 345 | } | ||
| 346 | |||
| 347 | /* 1. handle resolution changes */ | ||
| 348 | ✗ | if (pfd.revents & POLLPRI) { | |
| 349 | ✗ | ret = v4l2_handle_event(ctx); | |
| 350 | ✗ | if (ret < 0) { | |
| 351 | /* if re-init failed, abort */ | ||
| 352 | ✗ | ctx->done = 1; | |
| 353 | ✗ | return NULL; | |
| 354 | } | ||
| 355 | ✗ | if (ret) { | |
| 356 | /* if re-init was successful drop the buffer (if there was one) | ||
| 357 | * since we had to reconfigure capture (unmap all buffers) | ||
| 358 | */ | ||
| 359 | ✗ | return NULL; | |
| 360 | } | ||
| 361 | } | ||
| 362 | |||
| 363 | /* 2. dequeue the buffer */ | ||
| 364 | ✗ | if (pfd.revents & (POLLIN | POLLRDNORM | POLLOUT | POLLWRNORM)) { | |
| 365 | |||
| 366 | ✗ | if (!V4L2_TYPE_IS_OUTPUT(ctx->type)) { | |
| 367 | /* there is a capture buffer ready */ | ||
| 368 | ✗ | if (pfd.revents & (POLLIN | POLLRDNORM)) | |
| 369 | ✗ | goto dequeue; | |
| 370 | |||
| 371 | /* the driver is ready to accept more input; instead of waiting for the capture | ||
| 372 | * buffer to complete we return NULL so input can proceed (we are single threaded) | ||
| 373 | */ | ||
| 374 | ✗ | if (pfd.revents & (POLLOUT | POLLWRNORM)) | |
| 375 | ✗ | return NULL; | |
| 376 | } | ||
| 377 | |||
| 378 | ✗ | dequeue: | |
| 379 | ✗ | memset(&buf, 0, sizeof(buf)); | |
| 380 | ✗ | buf.memory = V4L2_MEMORY_MMAP; | |
| 381 | ✗ | buf.type = ctx->type; | |
| 382 | ✗ | if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { | |
| 383 | ✗ | memset(planes, 0, sizeof(planes)); | |
| 384 | ✗ | buf.length = VIDEO_MAX_PLANES; | |
| 385 | ✗ | buf.m.planes = planes; | |
| 386 | } | ||
| 387 | |||
| 388 | ✗ | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_DQBUF, &buf); | |
| 389 | ✗ | if (ret) { | |
| 390 | ✗ | if (errno != EAGAIN) { | |
| 391 | ✗ | ctx->done = 1; | |
| 392 | ✗ | if (errno != EPIPE) | |
| 393 | ✗ | av_log(logger(ctx), AV_LOG_DEBUG, "%s VIDIOC_DQBUF, errno (%s)\n", | |
| 394 | ✗ | ctx->name, av_err2str(AVERROR(errno))); | |
| 395 | } | ||
| 396 | ✗ | return NULL; | |
| 397 | } | ||
| 398 | |||
| 399 | ✗ | if (ctx_to_m2mctx(ctx)->draining && !V4L2_TYPE_IS_OUTPUT(ctx->type)) { | |
| 400 | ✗ | int bytesused = V4L2_TYPE_IS_MULTIPLANAR(buf.type) ? | |
| 401 | ✗ | buf.m.planes[0].bytesused : buf.bytesused; | |
| 402 | ✗ | if (bytesused == 0) { | |
| 403 | ✗ | ctx->done = 1; | |
| 404 | ✗ | return NULL; | |
| 405 | } | ||
| 406 | #ifdef V4L2_BUF_FLAG_LAST | ||
| 407 | ✗ | if (buf.flags & V4L2_BUF_FLAG_LAST) | |
| 408 | ✗ | ctx->done = 1; | |
| 409 | #endif | ||
| 410 | } | ||
| 411 | |||
| 412 | ✗ | avbuf = &ctx->buffers[buf.index]; | |
| 413 | ✗ | avbuf->status = V4L2BUF_AVAILABLE; | |
| 414 | ✗ | avbuf->buf = buf; | |
| 415 | ✗ | if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) { | |
| 416 | ✗ | memcpy(avbuf->planes, planes, sizeof(planes)); | |
| 417 | ✗ | avbuf->buf.m.planes = avbuf->planes; | |
| 418 | } | ||
| 419 | ✗ | return avbuf; | |
| 420 | } | ||
| 421 | |||
| 422 | ✗ | return NULL; | |
| 423 | } | ||
| 424 | |||
| 425 | ✗ | static V4L2Buffer* v4l2_getfree_v4l2buf(V4L2Context *ctx) | |
| 426 | { | ||
| 427 | ✗ | int timeout = 0; /* return when no more buffers to dequeue */ | |
| 428 | int i; | ||
| 429 | |||
| 430 | /* get back as many output buffers as possible */ | ||
| 431 | ✗ | if (V4L2_TYPE_IS_OUTPUT(ctx->type)) { | |
| 432 | do { | ||
| 433 | ✗ | } while (v4l2_dequeue_v4l2buf(ctx, timeout)); | |
| 434 | } | ||
| 435 | |||
| 436 | ✗ | for (i = 0; i < ctx->num_buffers; i++) { | |
| 437 | ✗ | if (ctx->buffers[i].status == V4L2BUF_AVAILABLE) | |
| 438 | ✗ | return &ctx->buffers[i]; | |
| 439 | } | ||
| 440 | |||
| 441 | ✗ | return NULL; | |
| 442 | } | ||
| 443 | |||
| 444 | ✗ | static int v4l2_release_buffers(V4L2Context* ctx) | |
| 445 | { | ||
| 446 | ✗ | struct v4l2_requestbuffers req = { | |
| 447 | .memory = V4L2_MEMORY_MMAP, | ||
| 448 | ✗ | .type = ctx->type, | |
| 449 | .count = 0, /* 0 -> unmaps buffers from the driver */ | ||
| 450 | }; | ||
| 451 | int i, j; | ||
| 452 | |||
| 453 | ✗ | for (i = 0; i < ctx->num_buffers; i++) { | |
| 454 | ✗ | V4L2Buffer *buffer = &ctx->buffers[i]; | |
| 455 | |||
| 456 | ✗ | for (j = 0; j < buffer->num_planes; j++) { | |
| 457 | ✗ | struct V4L2Plane_info *p = &buffer->plane_info[j]; | |
| 458 | ✗ | if (p->mm_addr && p->length) | |
| 459 | ✗ | if (munmap(p->mm_addr, p->length) < 0) | |
| 460 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "%s unmap plane (%s))\n", ctx->name, av_err2str(AVERROR(errno))); | |
| 461 | } | ||
| 462 | } | ||
| 463 | |||
| 464 | ✗ | return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_REQBUFS, &req); | |
| 465 | } | ||
| 466 | |||
| 467 | ✗ | static inline int v4l2_try_raw_format(V4L2Context* ctx, enum AVPixelFormat pixfmt) | |
| 468 | { | ||
| 469 | ✗ | struct v4l2_format *fmt = &ctx->format; | |
| 470 | uint32_t v4l2_fmt; | ||
| 471 | int ret; | ||
| 472 | |||
| 473 | ✗ | v4l2_fmt = ff_v4l2_format_avfmt_to_v4l2(pixfmt); | |
| 474 | ✗ | if (!v4l2_fmt) | |
| 475 | ✗ | return AVERROR(EINVAL); | |
| 476 | |||
| 477 | ✗ | if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) | |
| 478 | ✗ | fmt->fmt.pix_mp.pixelformat = v4l2_fmt; | |
| 479 | else | ||
| 480 | ✗ | fmt->fmt.pix.pixelformat = v4l2_fmt; | |
| 481 | |||
| 482 | ✗ | fmt->type = ctx->type; | |
| 483 | |||
| 484 | ✗ | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, fmt); | |
| 485 | ✗ | if (ret) | |
| 486 | ✗ | return AVERROR(EINVAL); | |
| 487 | |||
| 488 | ✗ | return 0; | |
| 489 | } | ||
| 490 | |||
| 491 | ✗ | static int v4l2_get_raw_format(V4L2Context* ctx, enum AVPixelFormat *p) | |
| 492 | { | ||
| 493 | ✗ | enum AVPixelFormat pixfmt = ctx->av_pix_fmt; | |
| 494 | struct v4l2_fmtdesc fdesc; | ||
| 495 | int ret; | ||
| 496 | |||
| 497 | ✗ | memset(&fdesc, 0, sizeof(fdesc)); | |
| 498 | ✗ | fdesc.type = ctx->type; | |
| 499 | |||
| 500 | ✗ | if (pixfmt != AV_PIX_FMT_NONE) { | |
| 501 | ✗ | ret = v4l2_try_raw_format(ctx, pixfmt); | |
| 502 | ✗ | if (!ret) | |
| 503 | ✗ | return 0; | |
| 504 | } | ||
| 505 | |||
| 506 | for (;;) { | ||
| 507 | ✗ | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc); | |
| 508 | ✗ | if (ret) | |
| 509 | ✗ | return AVERROR(EINVAL); | |
| 510 | |||
| 511 | ✗ | pixfmt = ff_v4l2_format_v4l2_to_avfmt(fdesc.pixelformat, AV_CODEC_ID_RAWVIDEO); | |
| 512 | ✗ | ret = v4l2_try_raw_format(ctx, pixfmt); | |
| 513 | ✗ | if (ret){ | |
| 514 | ✗ | fdesc.index++; | |
| 515 | ✗ | continue; | |
| 516 | } | ||
| 517 | |||
| 518 | ✗ | *p = pixfmt; | |
| 519 | |||
| 520 | ✗ | return 0; | |
| 521 | } | ||
| 522 | |||
| 523 | return AVERROR(EINVAL); | ||
| 524 | } | ||
| 525 | |||
| 526 | ✗ | static int v4l2_get_coded_format(V4L2Context* ctx, uint32_t *p) | |
| 527 | { | ||
| 528 | struct v4l2_fmtdesc fdesc; | ||
| 529 | uint32_t v4l2_fmt; | ||
| 530 | int ret; | ||
| 531 | |||
| 532 | /* translate to a valid v4l2 format */ | ||
| 533 | ✗ | v4l2_fmt = ff_v4l2_format_avcodec_to_v4l2(ctx->av_codec_id); | |
| 534 | ✗ | if (!v4l2_fmt) | |
| 535 | ✗ | return AVERROR(EINVAL); | |
| 536 | |||
| 537 | /* check if the driver supports this format */ | ||
| 538 | ✗ | memset(&fdesc, 0, sizeof(fdesc)); | |
| 539 | ✗ | fdesc.type = ctx->type; | |
| 540 | |||
| 541 | for (;;) { | ||
| 542 | ✗ | ret = ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_ENUM_FMT, &fdesc); | |
| 543 | ✗ | if (ret) | |
| 544 | ✗ | return AVERROR(EINVAL); | |
| 545 | |||
| 546 | ✗ | if (fdesc.pixelformat == v4l2_fmt) | |
| 547 | ✗ | break; | |
| 548 | |||
| 549 | ✗ | fdesc.index++; | |
| 550 | } | ||
| 551 | |||
| 552 | ✗ | *p = v4l2_fmt; | |
| 553 | |||
| 554 | ✗ | return 0; | |
| 555 | } | ||
| 556 | |||
| 557 | /***************************************************************************** | ||
| 558 | * | ||
| 559 | * V4L2 Context Interface | ||
| 560 | * | ||
| 561 | *****************************************************************************/ | ||
| 562 | |||
| 563 | ✗ | int ff_v4l2_context_set_status(V4L2Context* ctx, uint32_t cmd) | |
| 564 | { | ||
| 565 | ✗ | int type = ctx->type; | |
| 566 | int ret; | ||
| 567 | |||
| 568 | ✗ | ret = ioctl(ctx_to_m2mctx(ctx)->fd, cmd, &type); | |
| 569 | ✗ | if (ret < 0) | |
| 570 | ✗ | return AVERROR(errno); | |
| 571 | |||
| 572 | ✗ | ctx->streamon = (cmd == VIDIOC_STREAMON); | |
| 573 | |||
| 574 | ✗ | return 0; | |
| 575 | } | ||
| 576 | |||
| 577 | ✗ | int ff_v4l2_context_enqueue_frame(V4L2Context* ctx, const AVFrame* frame) | |
| 578 | { | ||
| 579 | ✗ | V4L2m2mContext *s = ctx_to_m2mctx(ctx); | |
| 580 | V4L2Buffer* avbuf; | ||
| 581 | int ret; | ||
| 582 | |||
| 583 | ✗ | if (!frame) { | |
| 584 | ✗ | ret = v4l2_stop_encode(ctx); | |
| 585 | ✗ | if (ret) | |
| 586 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "%s stop_encode\n", ctx->name); | |
| 587 | ✗ | s->draining= 1; | |
| 588 | ✗ | return 0; | |
| 589 | } | ||
| 590 | |||
| 591 | ✗ | avbuf = v4l2_getfree_v4l2buf(ctx); | |
| 592 | ✗ | if (!avbuf) | |
| 593 | ✗ | return AVERROR(EAGAIN); | |
| 594 | |||
| 595 | ✗ | ret = ff_v4l2_buffer_avframe_to_buf(frame, avbuf); | |
| 596 | ✗ | if (ret) | |
| 597 | ✗ | return ret; | |
| 598 | |||
| 599 | ✗ | return ff_v4l2_buffer_enqueue(avbuf); | |
| 600 | } | ||
| 601 | |||
| 602 | ✗ | int ff_v4l2_context_enqueue_packet(V4L2Context* ctx, const AVPacket* pkt) | |
| 603 | { | ||
| 604 | ✗ | V4L2m2mContext *s = ctx_to_m2mctx(ctx); | |
| 605 | V4L2Buffer* avbuf; | ||
| 606 | int ret; | ||
| 607 | |||
| 608 | ✗ | if (!pkt->size) { | |
| 609 | ✗ | ret = v4l2_stop_decode(ctx); | |
| 610 | ✗ | if (ret) | |
| 611 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "%s stop_decode\n", ctx->name); | |
| 612 | ✗ | s->draining = 1; | |
| 613 | ✗ | return 0; | |
| 614 | } | ||
| 615 | |||
| 616 | ✗ | avbuf = v4l2_getfree_v4l2buf(ctx); | |
| 617 | ✗ | if (!avbuf) | |
| 618 | ✗ | return AVERROR(EAGAIN); | |
| 619 | |||
| 620 | ✗ | ret = ff_v4l2_buffer_avpkt_to_buf(pkt, avbuf); | |
| 621 | ✗ | if (ret) | |
| 622 | ✗ | return ret; | |
| 623 | |||
| 624 | ✗ | return ff_v4l2_buffer_enqueue(avbuf); | |
| 625 | } | ||
| 626 | |||
| 627 | ✗ | int ff_v4l2_context_dequeue_frame(V4L2Context* ctx, AVFrame* frame, int timeout) | |
| 628 | { | ||
| 629 | V4L2Buffer *avbuf; | ||
| 630 | |||
| 631 | /* | ||
| 632 | * timeout=-1 blocks until: | ||
| 633 | * 1. decoded frame available | ||
| 634 | * 2. an input buffer is ready to be dequeued | ||
| 635 | */ | ||
| 636 | ✗ | avbuf = v4l2_dequeue_v4l2buf(ctx, timeout); | |
| 637 | ✗ | if (!avbuf) { | |
| 638 | ✗ | if (ctx->done) | |
| 639 | ✗ | return AVERROR_EOF; | |
| 640 | |||
| 641 | ✗ | return AVERROR(EAGAIN); | |
| 642 | } | ||
| 643 | |||
| 644 | ✗ | return ff_v4l2_buffer_buf_to_avframe(frame, avbuf); | |
| 645 | } | ||
| 646 | |||
| 647 | ✗ | int ff_v4l2_context_dequeue_packet(V4L2Context* ctx, AVPacket* pkt) | |
| 648 | { | ||
| 649 | V4L2Buffer *avbuf; | ||
| 650 | |||
| 651 | /* | ||
| 652 | * blocks until: | ||
| 653 | * 1. encoded packet available | ||
| 654 | * 2. an input buffer ready to be dequeued | ||
| 655 | */ | ||
| 656 | ✗ | avbuf = v4l2_dequeue_v4l2buf(ctx, -1); | |
| 657 | ✗ | if (!avbuf) { | |
| 658 | ✗ | if (ctx->done) | |
| 659 | ✗ | return AVERROR_EOF; | |
| 660 | |||
| 661 | ✗ | return AVERROR(EAGAIN); | |
| 662 | } | ||
| 663 | |||
| 664 | ✗ | return ff_v4l2_buffer_buf_to_avpkt(pkt, avbuf); | |
| 665 | } | ||
| 666 | |||
| 667 | ✗ | int ff_v4l2_context_get_format(V4L2Context* ctx, int probe) | |
| 668 | { | ||
| 669 | ✗ | struct v4l2_format_update fmt = { 0 }; | |
| 670 | int ret; | ||
| 671 | |||
| 672 | ✗ | if (ctx->av_codec_id == AV_CODEC_ID_RAWVIDEO) { | |
| 673 | ✗ | ret = v4l2_get_raw_format(ctx, &fmt.av_fmt); | |
| 674 | ✗ | if (ret) | |
| 675 | ✗ | return ret; | |
| 676 | |||
| 677 | ✗ | fmt.update_avfmt = !probe; | |
| 678 | ✗ | v4l2_save_to_context(ctx, &fmt); | |
| 679 | |||
| 680 | /* format has been tried already */ | ||
| 681 | ✗ | return ret; | |
| 682 | } | ||
| 683 | |||
| 684 | ✗ | ret = v4l2_get_coded_format(ctx, &fmt.v4l2_fmt); | |
| 685 | ✗ | if (ret) | |
| 686 | ✗ | return ret; | |
| 687 | |||
| 688 | ✗ | fmt.update_v4l2 = 1; | |
| 689 | ✗ | v4l2_save_to_context(ctx, &fmt); | |
| 690 | |||
| 691 | ✗ | return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_TRY_FMT, &ctx->format); | |
| 692 | } | ||
| 693 | |||
| 694 | ✗ | int ff_v4l2_context_set_format(V4L2Context* ctx) | |
| 695 | { | ||
| 696 | ✗ | return ioctl(ctx_to_m2mctx(ctx)->fd, VIDIOC_S_FMT, &ctx->format); | |
| 697 | } | ||
| 698 | |||
| 699 | ✗ | void ff_v4l2_context_release(V4L2Context* ctx) | |
| 700 | { | ||
| 701 | int ret; | ||
| 702 | |||
| 703 | ✗ | if (!ctx->buffers) | |
| 704 | ✗ | return; | |
| 705 | |||
| 706 | ✗ | ret = v4l2_release_buffers(ctx); | |
| 707 | ✗ | if (ret) | |
| 708 | ✗ | av_log(logger(ctx), AV_LOG_WARNING, "V4L2 failed to unmap the %s buffers\n", ctx->name); | |
| 709 | |||
| 710 | ✗ | av_freep(&ctx->buffers); | |
| 711 | } | ||
| 712 | |||
| 713 | ✗ | int ff_v4l2_context_init(V4L2Context* ctx) | |
| 714 | { | ||
| 715 | ✗ | V4L2m2mContext *s = ctx_to_m2mctx(ctx); | |
| 716 | struct v4l2_requestbuffers req; | ||
| 717 | int ret, i; | ||
| 718 | |||
| 719 | ✗ | if (!v4l2_type_supported(ctx)) { | |
| 720 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "type %i not supported\n", ctx->type); | |
| 721 | ✗ | return AVERROR_PATCHWELCOME; | |
| 722 | } | ||
| 723 | |||
| 724 | ✗ | ret = ioctl(s->fd, VIDIOC_G_FMT, &ctx->format); | |
| 725 | ✗ | if (ret) | |
| 726 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_G_FMT failed\n", ctx->name); | |
| 727 | |||
| 728 | ✗ | memset(&req, 0, sizeof(req)); | |
| 729 | ✗ | req.count = ctx->num_buffers; | |
| 730 | ✗ | req.memory = V4L2_MEMORY_MMAP; | |
| 731 | ✗ | req.type = ctx->type; | |
| 732 | ✗ | ret = ioctl(s->fd, VIDIOC_REQBUFS, &req); | |
| 733 | ✗ | if (ret < 0) { | |
| 734 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "%s VIDIOC_REQBUFS failed: %s\n", ctx->name, strerror(errno)); | |
| 735 | ✗ | return AVERROR(errno); | |
| 736 | } | ||
| 737 | |||
| 738 | ✗ | ctx->num_buffers = req.count; | |
| 739 | ✗ | ctx->buffers = av_mallocz(ctx->num_buffers * sizeof(V4L2Buffer)); | |
| 740 | ✗ | if (!ctx->buffers) { | |
| 741 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "%s malloc enomem\n", ctx->name); | |
| 742 | ✗ | return AVERROR(ENOMEM); | |
| 743 | } | ||
| 744 | |||
| 745 | ✗ | for (i = 0; i < req.count; i++) { | |
| 746 | ✗ | ctx->buffers[i].context = ctx; | |
| 747 | ✗ | ret = ff_v4l2_buffer_initialize(&ctx->buffers[i], i); | |
| 748 | ✗ | if (ret < 0) { | |
| 749 | ✗ | av_log(logger(ctx), AV_LOG_ERROR, "%s buffer[%d] initialization (%s)\n", ctx->name, i, av_err2str(ret)); | |
| 750 | ✗ | goto error; | |
| 751 | } | ||
| 752 | } | ||
| 753 | |||
| 754 | ✗ | av_log(logger(ctx), AV_LOG_DEBUG, "%s: %s %02d buffers initialized: %04ux%04u, sizeimage %08u, bytesperline %08u\n", ctx->name, | |
| 755 | ✗ | V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? av_fourcc2str(ctx->format.fmt.pix_mp.pixelformat) : av_fourcc2str(ctx->format.fmt.pix.pixelformat), | |
| 756 | req.count, | ||
| 757 | v4l2_get_width(&ctx->format), | ||
| 758 | v4l2_get_height(&ctx->format), | ||
| 759 | ✗ | V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].sizeimage : ctx->format.fmt.pix.sizeimage, | |
| 760 | ✗ | V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ? ctx->format.fmt.pix_mp.plane_fmt[0].bytesperline : ctx->format.fmt.pix.bytesperline); | |
| 761 | |||
| 762 | ✗ | return 0; | |
| 763 | |||
| 764 | ✗ | error: | |
| 765 | ✗ | v4l2_release_buffers(ctx); | |
| 766 | |||
| 767 | ✗ | av_freep(&ctx->buffers); | |
| 768 | |||
| 769 | ✗ | return ret; | |
| 770 | } | ||
| 771 |