GCC Code Coverage Report
Directory: ../../../ffmpeg/ Exec Total Coverage
File: src/libavcodec/v4l2_buffers.c Lines: 0 296 0.0 %
Date: 2020-08-14 10:39:37 Branches: 0 228 0.0 %

Line Branch Exec Source
1
/*
2
 * V4L2 buffer helper functions.
3
 *
4
 * Copyright (C) 2017 Alexis Ballier <aballier@gentoo.org>
5
 * Copyright (C) 2017 Jorge Ramirez <jorge.ramirez-ortiz@linaro.org>
6
 *
7
 * This file is part of FFmpeg.
8
 *
9
 * FFmpeg is free software; you can redistribute it and/or
10
 * modify it under the terms of the GNU Lesser General Public
11
 * License as published by the Free Software Foundation; either
12
 * version 2.1 of the License, or (at your option) any later version.
13
 *
14
 * FFmpeg is distributed in the hope that it will be useful,
15
 * but WITHOUT ANY WARRANTY; without even the implied warranty of
16
 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17
 * Lesser General Public License for more details.
18
 *
19
 * You should have received a copy of the GNU Lesser General Public
20
 * License along with FFmpeg; if not, write to the Free Software
21
 * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
22
 */
23
24
#include <linux/videodev2.h>
25
#include <sys/ioctl.h>
26
#include <sys/mman.h>
27
#include <unistd.h>
28
#include <fcntl.h>
29
#include <poll.h>
30
#include "libavcodec/avcodec.h"
31
#include "libavcodec/internal.h"
32
#include "libavutil/pixdesc.h"
33
#include "v4l2_context.h"
34
#include "v4l2_buffers.h"
35
#include "v4l2_m2m.h"
36
37
#define USEC_PER_SEC 1000000
38
static AVRational v4l2_timebase = { 1, USEC_PER_SEC };
39
40
static inline V4L2m2mContext *buf_to_m2mctx(V4L2Buffer *buf)
41
{
42
    return V4L2_TYPE_IS_OUTPUT(buf->context->type) ?
43
        container_of(buf->context, V4L2m2mContext, output) :
44
        container_of(buf->context, V4L2m2mContext, capture);
45
}
46
47
static inline AVCodecContext *logger(V4L2Buffer *buf)
48
{
49
    return buf_to_m2mctx(buf)->avctx;
50
}
51
52
static inline AVRational v4l2_get_timebase(V4L2Buffer *avbuf)
53
{
54
    V4L2m2mContext *s = buf_to_m2mctx(avbuf);
55
56
    if (s->avctx->pkt_timebase.num)
57
        return s->avctx->pkt_timebase;
58
    return s->avctx->time_base;
59
}
60
61
static inline void v4l2_set_pts(V4L2Buffer *out, int64_t pts)
62
{
63
    int64_t v4l2_pts;
64
65
    if (pts == AV_NOPTS_VALUE)
66
        pts = 0;
67
68
    /* convert pts to v4l2 timebase */
69
    v4l2_pts = av_rescale_q(pts, v4l2_get_timebase(out), v4l2_timebase);
70
    out->buf.timestamp.tv_usec = v4l2_pts % USEC_PER_SEC;
71
    out->buf.timestamp.tv_sec = v4l2_pts / USEC_PER_SEC;
72
}
73
74
static inline int64_t v4l2_get_pts(V4L2Buffer *avbuf)
75
{
76
    int64_t v4l2_pts;
77
78
    /* convert pts back to encoder timebase */
79
    v4l2_pts = (int64_t)avbuf->buf.timestamp.tv_sec * USEC_PER_SEC +
80
                        avbuf->buf.timestamp.tv_usec;
81
82
    return av_rescale_q(v4l2_pts, v4l2_timebase, v4l2_get_timebase(avbuf));
83
}
84
85
static enum AVColorPrimaries v4l2_get_color_primaries(V4L2Buffer *buf)
86
{
87
    enum v4l2_ycbcr_encoding ycbcr;
88
    enum v4l2_colorspace cs;
89
90
    cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
91
        buf->context->format.fmt.pix_mp.colorspace :
92
        buf->context->format.fmt.pix.colorspace;
93
94
    ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
95
        buf->context->format.fmt.pix_mp.ycbcr_enc:
96
        buf->context->format.fmt.pix.ycbcr_enc;
97
98
    switch(ycbcr) {
99
    case V4L2_YCBCR_ENC_XV709:
100
    case V4L2_YCBCR_ENC_709: return AVCOL_PRI_BT709;
101
    case V4L2_YCBCR_ENC_XV601:
102
    case V4L2_YCBCR_ENC_601:return AVCOL_PRI_BT470M;
103
    default:
104
        break;
105
    }
106
107
    switch(cs) {
108
    case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_PRI_BT470BG;
109
    case V4L2_COLORSPACE_SMPTE170M: return AVCOL_PRI_SMPTE170M;
110
    case V4L2_COLORSPACE_SMPTE240M: return AVCOL_PRI_SMPTE240M;
111
    case V4L2_COLORSPACE_BT2020: return AVCOL_PRI_BT2020;
112
    default:
113
        break;
114
    }
115
116
    return AVCOL_PRI_UNSPECIFIED;
117
}
118
119
static enum AVColorRange v4l2_get_color_range(V4L2Buffer *buf)
120
{
121
    enum v4l2_quantization qt;
122
123
    qt = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
124
        buf->context->format.fmt.pix_mp.quantization :
125
        buf->context->format.fmt.pix.quantization;
126
127
    switch (qt) {
128
    case V4L2_QUANTIZATION_LIM_RANGE: return AVCOL_RANGE_MPEG;
129
    case V4L2_QUANTIZATION_FULL_RANGE: return AVCOL_RANGE_JPEG;
130
    default:
131
        break;
132
    }
133
134
     return AVCOL_RANGE_UNSPECIFIED;
135
}
136
137
static enum AVColorSpace v4l2_get_color_space(V4L2Buffer *buf)
138
{
139
    enum v4l2_ycbcr_encoding ycbcr;
140
    enum v4l2_colorspace cs;
141
142
    cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
143
        buf->context->format.fmt.pix_mp.colorspace :
144
        buf->context->format.fmt.pix.colorspace;
145
146
    ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
147
        buf->context->format.fmt.pix_mp.ycbcr_enc:
148
        buf->context->format.fmt.pix.ycbcr_enc;
149
150
    switch(cs) {
151
    case V4L2_COLORSPACE_SRGB: return AVCOL_SPC_RGB;
152
    case V4L2_COLORSPACE_REC709: return AVCOL_SPC_BT709;
153
    case V4L2_COLORSPACE_470_SYSTEM_M: return AVCOL_SPC_FCC;
154
    case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_SPC_BT470BG;
155
    case V4L2_COLORSPACE_SMPTE170M: return AVCOL_SPC_SMPTE170M;
156
    case V4L2_COLORSPACE_SMPTE240M: return AVCOL_SPC_SMPTE240M;
157
    case V4L2_COLORSPACE_BT2020:
158
        if (ycbcr == V4L2_YCBCR_ENC_BT2020_CONST_LUM)
159
            return AVCOL_SPC_BT2020_CL;
160
        else
161
             return AVCOL_SPC_BT2020_NCL;
162
    default:
163
        break;
164
    }
165
166
    return AVCOL_SPC_UNSPECIFIED;
167
}
168
169
static enum AVColorTransferCharacteristic v4l2_get_color_trc(V4L2Buffer *buf)
170
{
171
    enum v4l2_ycbcr_encoding ycbcr;
172
    enum v4l2_xfer_func xfer;
173
    enum v4l2_colorspace cs;
174
175
    cs = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
176
        buf->context->format.fmt.pix_mp.colorspace :
177
        buf->context->format.fmt.pix.colorspace;
178
179
    ycbcr = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
180
        buf->context->format.fmt.pix_mp.ycbcr_enc:
181
        buf->context->format.fmt.pix.ycbcr_enc;
182
183
    xfer = V4L2_TYPE_IS_MULTIPLANAR(buf->buf.type) ?
184
        buf->context->format.fmt.pix_mp.xfer_func:
185
        buf->context->format.fmt.pix.xfer_func;
186
187
    switch (xfer) {
188
    case V4L2_XFER_FUNC_709: return AVCOL_TRC_BT709;
189
    case V4L2_XFER_FUNC_SRGB: return AVCOL_TRC_IEC61966_2_1;
190
    default:
191
        break;
192
    }
193
194
    switch (cs) {
195
    case V4L2_COLORSPACE_470_SYSTEM_M: return AVCOL_TRC_GAMMA22;
196
    case V4L2_COLORSPACE_470_SYSTEM_BG: return AVCOL_TRC_GAMMA28;
197
    case V4L2_COLORSPACE_SMPTE170M: return AVCOL_TRC_SMPTE170M;
198
    case V4L2_COLORSPACE_SMPTE240M: return AVCOL_TRC_SMPTE240M;
199
    default:
200
        break;
201
    }
202
203
    switch (ycbcr) {
204
    case V4L2_YCBCR_ENC_XV709:
205
    case V4L2_YCBCR_ENC_XV601: return AVCOL_TRC_BT1361_ECG;
206
    default:
207
        break;
208
    }
209
210
    return AVCOL_TRC_UNSPECIFIED;
211
}
212
213
static void v4l2_free_buffer(void *opaque, uint8_t *unused)
214
{
215
    V4L2Buffer* avbuf = opaque;
216
    V4L2m2mContext *s = buf_to_m2mctx(avbuf);
217
218
    if (atomic_fetch_sub(&avbuf->context_refcount, 1) == 1) {
219
        atomic_fetch_sub_explicit(&s->refcount, 1, memory_order_acq_rel);
220
221
        if (s->reinit) {
222
            if (!atomic_load(&s->refcount))
223
                sem_post(&s->refsync);
224
        } else {
225
            if (s->draining && V4L2_TYPE_IS_OUTPUT(avbuf->context->type)) {
226
                /* no need to queue more buffers to the driver */
227
                avbuf->status = V4L2BUF_AVAILABLE;
228
            }
229
            else if (avbuf->context->streamon)
230
                ff_v4l2_buffer_enqueue(avbuf);
231
        }
232
233
        av_buffer_unref(&avbuf->context_ref);
234
    }
235
}
236
237
static int v4l2_buf_increase_ref(V4L2Buffer *in)
238
{
239
    V4L2m2mContext *s = buf_to_m2mctx(in);
240
241
    if (in->context_ref)
242
        atomic_fetch_add(&in->context_refcount, 1);
243
    else {
244
        in->context_ref = av_buffer_ref(s->self_ref);
245
        if (!in->context_ref)
246
            return AVERROR(ENOMEM);
247
248
        in->context_refcount = 1;
249
    }
250
251
    in->status = V4L2BUF_RET_USER;
252
    atomic_fetch_add_explicit(&s->refcount, 1, memory_order_relaxed);
253
254
    return 0;
255
}
256
257
static int v4l2_buf_to_bufref(V4L2Buffer *in, int plane, AVBufferRef **buf)
258
{
259
    int ret;
260
261
    if (plane >= in->num_planes)
262
        return AVERROR(EINVAL);
263
264
    /* even though most encoders return 0 in data_offset encoding vp8 does require this value */
265
    *buf = av_buffer_create((char *)in->plane_info[plane].mm_addr + in->planes[plane].data_offset,
266
                            in->plane_info[plane].length, v4l2_free_buffer, in, 0);
267
    if (!*buf)
268
        return AVERROR(ENOMEM);
269
270
    ret = v4l2_buf_increase_ref(in);
271
    if (ret)
272
        av_buffer_unref(buf);
273
274
    return ret;
275
}
276
277
static int v4l2_bufref_to_buf(V4L2Buffer *out, int plane, const uint8_t* data, int size, int offset, AVBufferRef* bref)
278
{
279
    unsigned int bytesused, length;
280
281
    if (plane >= out->num_planes)
282
        return AVERROR(EINVAL);
283
284
    length = out->plane_info[plane].length;
285
    bytesused = FFMIN(size+offset, length);
286
287
    memcpy((uint8_t*)out->plane_info[plane].mm_addr+offset, data, FFMIN(size, length-offset));
288
289
    if (V4L2_TYPE_IS_MULTIPLANAR(out->buf.type)) {
290
        out->planes[plane].bytesused = bytesused;
291
        out->planes[plane].length = length;
292
    } else {
293
        out->buf.bytesused = bytesused;
294
        out->buf.length = length;
295
    }
296
297
    return 0;
298
}
299
300
static int v4l2_buffer_buf_to_swframe(AVFrame *frame, V4L2Buffer *avbuf)
301
{
302
    int i, ret;
303
304
    frame->format = avbuf->context->av_pix_fmt;
305
306
    for (i = 0; i < avbuf->num_planes; i++) {
307
        ret = v4l2_buf_to_bufref(avbuf, i, &frame->buf[i]);
308
        if (ret)
309
            return ret;
310
311
        frame->linesize[i] = avbuf->plane_info[i].bytesperline;
312
        frame->data[i] = frame->buf[i]->data;
313
    }
314
315
    /* fixup special cases */
316
    switch (avbuf->context->av_pix_fmt) {
317
    case AV_PIX_FMT_NV12:
318
    case AV_PIX_FMT_NV21:
319
        if (avbuf->num_planes > 1)
320
            break;
321
        frame->linesize[1] = avbuf->plane_info[0].bytesperline;
322
        frame->data[1] = frame->buf[0]->data + avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height;
323
        break;
324
325
    case AV_PIX_FMT_YUV420P:
326
        if (avbuf->num_planes > 1)
327
            break;
328
        frame->linesize[1] = avbuf->plane_info[0].bytesperline >> 1;
329
        frame->linesize[2] = avbuf->plane_info[0].bytesperline >> 1;
330
        frame->data[1] = frame->buf[0]->data + avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height;
331
        frame->data[2] = frame->data[1] + ((avbuf->plane_info[0].bytesperline * avbuf->context->format.fmt.pix_mp.height) >> 2);
332
        break;
333
334
    default:
335
        break;
336
    }
337
338
    return 0;
339
}
340
341
static int v4l2_buffer_swframe_to_buf(const AVFrame *frame, V4L2Buffer *out)
342
{
343
    int i, ret;
344
    struct v4l2_format fmt = out->context->format;
345
    int pixel_format = V4L2_TYPE_IS_MULTIPLANAR(fmt.type) ?
346
                       fmt.fmt.pix_mp.pixelformat : fmt.fmt.pix.pixelformat;
347
    int height       = V4L2_TYPE_IS_MULTIPLANAR(fmt.type) ?
348
                       fmt.fmt.pix_mp.height : fmt.fmt.pix.height;
349
    int is_planar_format = 0;
350
351
    switch (pixel_format) {
352
    case V4L2_PIX_FMT_YUV420M:
353
    case V4L2_PIX_FMT_YVU420M:
354
#ifdef V4L2_PIX_FMT_YUV422M
355
    case V4L2_PIX_FMT_YUV422M:
356
#endif
357
#ifdef V4L2_PIX_FMT_YVU422M
358
    case V4L2_PIX_FMT_YVU422M:
359
#endif
360
#ifdef V4L2_PIX_FMT_YUV444M
361
    case V4L2_PIX_FMT_YUV444M:
362
#endif
363
#ifdef V4L2_PIX_FMT_YVU444M
364
    case V4L2_PIX_FMT_YVU444M:
365
#endif
366
    case V4L2_PIX_FMT_NV12M:
367
    case V4L2_PIX_FMT_NV21M:
368
    case V4L2_PIX_FMT_NV12MT_16X16:
369
    case V4L2_PIX_FMT_NV12MT:
370
    case V4L2_PIX_FMT_NV16M:
371
    case V4L2_PIX_FMT_NV61M:
372
        is_planar_format = 1;
373
    }
374
375
    if (!is_planar_format) {
376
        const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(frame->format);
377
        int planes_nb = 0;
378
        int offset = 0;
379
380
        for (i = 0; i < desc->nb_components; i++)
381
            planes_nb = FFMAX(planes_nb, desc->comp[i].plane + 1);
382
383
        for (i = 0; i < planes_nb; i++) {
384
            int size, h = height;
385
            if (i == 1 || i == 2) {
386
                h = AV_CEIL_RSHIFT(h, desc->log2_chroma_h);
387
            }
388
            size = frame->linesize[i] * h;
389
            ret = v4l2_bufref_to_buf(out, 0, frame->data[i], size, offset, frame->buf[i]);
390
            if (ret)
391
                return ret;
392
            offset += size;
393
        }
394
        return 0;
395
    }
396
397
    for (i = 0; i < out->num_planes; i++) {
398
        ret = v4l2_bufref_to_buf(out, i, frame->buf[i]->data, frame->buf[i]->size, 0, frame->buf[i]);
399
        if (ret)
400
            return ret;
401
    }
402
403
    return 0;
404
}
405
406
/******************************************************************************
407
 *
408
 *              V4L2Buffer interface
409
 *
410
 ******************************************************************************/
411
412
int ff_v4l2_buffer_avframe_to_buf(const AVFrame *frame, V4L2Buffer *out)
413
{
414
    v4l2_set_pts(out, frame->pts);
415
416
    return v4l2_buffer_swframe_to_buf(frame, out);
417
}
418
419
int ff_v4l2_buffer_buf_to_avframe(AVFrame *frame, V4L2Buffer *avbuf)
420
{
421
    int ret;
422
423
    av_frame_unref(frame);
424
425
    /* 1. get references to the actual data */
426
    ret = v4l2_buffer_buf_to_swframe(frame, avbuf);
427
    if (ret)
428
        return ret;
429
430
    /* 2. get frame information */
431
    frame->key_frame = !!(avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME);
432
    frame->color_primaries = v4l2_get_color_primaries(avbuf);
433
    frame->colorspace = v4l2_get_color_space(avbuf);
434
    frame->color_range = v4l2_get_color_range(avbuf);
435
    frame->color_trc = v4l2_get_color_trc(avbuf);
436
    frame->pts = v4l2_get_pts(avbuf);
437
    frame->pkt_dts = AV_NOPTS_VALUE;
438
439
    /* these values are updated also during re-init in v4l2_process_driver_event */
440
    frame->height = avbuf->context->height;
441
    frame->width = avbuf->context->width;
442
    frame->sample_aspect_ratio = avbuf->context->sample_aspect_ratio;
443
444
    /* 3. report errors upstream */
445
    if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) {
446
        av_log(logger(avbuf), AV_LOG_ERROR, "%s: driver decode error\n", avbuf->context->name);
447
        frame->decode_error_flags |= FF_DECODE_ERROR_INVALID_BITSTREAM;
448
    }
449
450
    return 0;
451
}
452
453
int ff_v4l2_buffer_buf_to_avpkt(AVPacket *pkt, V4L2Buffer *avbuf)
454
{
455
    int ret;
456
457
    av_packet_unref(pkt);
458
    ret = v4l2_buf_to_bufref(avbuf, 0, &pkt->buf);
459
    if (ret)
460
        return ret;
461
462
    pkt->size = V4L2_TYPE_IS_MULTIPLANAR(avbuf->buf.type) ? avbuf->buf.m.planes[0].bytesused : avbuf->buf.bytesused;
463
    pkt->data = pkt->buf->data;
464
465
    if (avbuf->buf.flags & V4L2_BUF_FLAG_KEYFRAME)
466
        pkt->flags |= AV_PKT_FLAG_KEY;
467
468
    if (avbuf->buf.flags & V4L2_BUF_FLAG_ERROR) {
469
        av_log(logger(avbuf), AV_LOG_ERROR, "%s driver encode error\n", avbuf->context->name);
470
        pkt->flags |= AV_PKT_FLAG_CORRUPT;
471
    }
472
473
    pkt->dts = pkt->pts = v4l2_get_pts(avbuf);
474
475
    return 0;
476
}
477
478
int ff_v4l2_buffer_avpkt_to_buf(const AVPacket *pkt, V4L2Buffer *out)
479
{
480
    int ret;
481
482
    ret = v4l2_bufref_to_buf(out, 0, pkt->data, pkt->size, 0, pkt->buf);
483
    if (ret)
484
        return ret;
485
486
    v4l2_set_pts(out, pkt->pts);
487
488
    if (pkt->flags & AV_PKT_FLAG_KEY)
489
        out->flags = V4L2_BUF_FLAG_KEYFRAME;
490
491
    return 0;
492
}
493
494
int ff_v4l2_buffer_initialize(V4L2Buffer* avbuf, int index)
495
{
496
    V4L2Context *ctx = avbuf->context;
497
    int ret, i;
498
499
    avbuf->buf.memory = V4L2_MEMORY_MMAP;
500
    avbuf->buf.type = ctx->type;
501
    avbuf->buf.index = index;
502
503
    if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
504
        avbuf->buf.length = VIDEO_MAX_PLANES;
505
        avbuf->buf.m.planes = avbuf->planes;
506
    }
507
508
    ret = ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_QUERYBUF, &avbuf->buf);
509
    if (ret < 0)
510
        return AVERROR(errno);
511
512
    if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
513
        avbuf->num_planes = 0;
514
        /* in MP, the V4L2 API states that buf.length means num_planes */
515
        for (i = 0; i < avbuf->buf.length; i++) {
516
            if (avbuf->buf.m.planes[i].length)
517
                avbuf->num_planes++;
518
        }
519
    } else
520
        avbuf->num_planes = 1;
521
522
    for (i = 0; i < avbuf->num_planes; i++) {
523
524
        avbuf->plane_info[i].bytesperline = V4L2_TYPE_IS_MULTIPLANAR(ctx->type) ?
525
            ctx->format.fmt.pix_mp.plane_fmt[i].bytesperline :
526
            ctx->format.fmt.pix.bytesperline;
527
528
        if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
529
            avbuf->plane_info[i].length = avbuf->buf.m.planes[i].length;
530
            avbuf->plane_info[i].mm_addr = mmap(NULL, avbuf->buf.m.planes[i].length,
531
                                           PROT_READ | PROT_WRITE, MAP_SHARED,
532
                                           buf_to_m2mctx(avbuf)->fd, avbuf->buf.m.planes[i].m.mem_offset);
533
        } else {
534
            avbuf->plane_info[i].length = avbuf->buf.length;
535
            avbuf->plane_info[i].mm_addr = mmap(NULL, avbuf->buf.length,
536
                                          PROT_READ | PROT_WRITE, MAP_SHARED,
537
                                          buf_to_m2mctx(avbuf)->fd, avbuf->buf.m.offset);
538
        }
539
540
        if (avbuf->plane_info[i].mm_addr == MAP_FAILED)
541
            return AVERROR(ENOMEM);
542
    }
543
544
    avbuf->status = V4L2BUF_AVAILABLE;
545
546
    if (V4L2_TYPE_IS_OUTPUT(ctx->type))
547
        return 0;
548
549
    if (V4L2_TYPE_IS_MULTIPLANAR(ctx->type)) {
550
        avbuf->buf.m.planes = avbuf->planes;
551
        avbuf->buf.length   = avbuf->num_planes;
552
553
    } else {
554
        avbuf->buf.bytesused = avbuf->planes[0].bytesused;
555
        avbuf->buf.length    = avbuf->planes[0].length;
556
    }
557
558
    return ff_v4l2_buffer_enqueue(avbuf);
559
}
560
561
int ff_v4l2_buffer_enqueue(V4L2Buffer* avbuf)
562
{
563
    int ret;
564
565
    avbuf->buf.flags = avbuf->flags;
566
567
    ret = ioctl(buf_to_m2mctx(avbuf)->fd, VIDIOC_QBUF, &avbuf->buf);
568
    if (ret < 0)
569
        return AVERROR(errno);
570
571
    avbuf->status = V4L2BUF_IN_DRIVER;
572
573
    return 0;
574
}