Line |
Branch |
Exec |
Source |
1 |
|
|
/* |
2 |
|
|
* This file is part of FFmpeg. |
3 |
|
|
* |
4 |
|
|
* FFmpeg is free software; you can redistribute it and/or |
5 |
|
|
* modify it under the terms of the GNU Lesser General Public |
6 |
|
|
* License as published by the Free Software Foundation; either |
7 |
|
|
* version 2.1 of the License, or (at your option) any later version. |
8 |
|
|
* |
9 |
|
|
* FFmpeg is distributed in the hope that it will be useful, |
10 |
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
11 |
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
12 |
|
|
* Lesser General Public License for more details. |
13 |
|
|
* |
14 |
|
|
* You should have received a copy of the GNU Lesser General Public |
15 |
|
|
* License along with FFmpeg; if not, write to the Free Software |
16 |
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
17 |
|
|
*/ |
18 |
|
|
|
19 |
|
|
#include "config.h" |
20 |
|
|
|
21 |
|
|
#include <stdint.h> |
22 |
|
|
#include <string.h> |
23 |
|
|
|
24 |
|
|
#include <vdpau/vdpau.h> |
25 |
|
|
|
26 |
|
|
#include "buffer.h" |
27 |
|
|
#include "common.h" |
28 |
|
|
#include "hwcontext.h" |
29 |
|
|
#include "hwcontext_internal.h" |
30 |
|
|
#include "hwcontext_vdpau.h" |
31 |
|
|
#include "mem.h" |
32 |
|
|
#include "pixfmt.h" |
33 |
|
|
#include "pixdesc.h" |
34 |
|
|
|
35 |
|
|
typedef struct VDPAUPixFmtMap { |
36 |
|
|
VdpYCbCrFormat vdpau_fmt; |
37 |
|
|
enum AVPixelFormat pix_fmt; |
38 |
|
|
} VDPAUPixFmtMap; |
39 |
|
|
|
40 |
|
|
static const VDPAUPixFmtMap pix_fmts_420[] = { |
41 |
|
|
{ VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV12 }, |
42 |
|
|
{ VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV420P }, |
43 |
|
|
#ifdef VDP_YCBCR_FORMAT_P016 |
44 |
|
|
{ VDP_YCBCR_FORMAT_P016, AV_PIX_FMT_P016 }, |
45 |
|
|
{ VDP_YCBCR_FORMAT_P010, AV_PIX_FMT_P010 }, |
46 |
|
|
#endif |
47 |
|
|
{ 0, AV_PIX_FMT_NONE, }, |
48 |
|
|
}; |
49 |
|
|
|
50 |
|
|
static const VDPAUPixFmtMap pix_fmts_422[] = { |
51 |
|
|
{ VDP_YCBCR_FORMAT_NV12, AV_PIX_FMT_NV16 }, |
52 |
|
|
{ VDP_YCBCR_FORMAT_YV12, AV_PIX_FMT_YUV422P }, |
53 |
|
|
{ VDP_YCBCR_FORMAT_UYVY, AV_PIX_FMT_UYVY422 }, |
54 |
|
|
{ VDP_YCBCR_FORMAT_YUYV, AV_PIX_FMT_YUYV422 }, |
55 |
|
|
{ 0, AV_PIX_FMT_NONE, }, |
56 |
|
|
}; |
57 |
|
|
|
58 |
|
|
static const VDPAUPixFmtMap pix_fmts_444[] = { |
59 |
|
|
#ifdef VDP_YCBCR_FORMAT_Y_U_V_444 |
60 |
|
|
{ VDP_YCBCR_FORMAT_Y_U_V_444, AV_PIX_FMT_YUV444P }, |
61 |
|
|
#endif |
62 |
|
|
#ifdef VDP_YCBCR_FORMAT_P016 |
63 |
|
|
{VDP_YCBCR_FORMAT_Y_U_V_444_16, AV_PIX_FMT_YUV444P16}, |
64 |
|
|
#endif |
65 |
|
|
{ 0, AV_PIX_FMT_NONE, }, |
66 |
|
|
}; |
67 |
|
|
|
68 |
|
|
static const struct { |
69 |
|
|
VdpChromaType chroma_type; |
70 |
|
|
enum AVPixelFormat frames_sw_format; |
71 |
|
|
const VDPAUPixFmtMap *map; |
72 |
|
|
} vdpau_pix_fmts[] = { |
73 |
|
|
{ VDP_CHROMA_TYPE_420, AV_PIX_FMT_YUV420P, pix_fmts_420 }, |
74 |
|
|
{ VDP_CHROMA_TYPE_422, AV_PIX_FMT_YUV422P, pix_fmts_422 }, |
75 |
|
|
{ VDP_CHROMA_TYPE_444, AV_PIX_FMT_YUV444P, pix_fmts_444 }, |
76 |
|
|
#ifdef VDP_YCBCR_FORMAT_P016 |
77 |
|
|
{ VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P10, pix_fmts_420 }, |
78 |
|
|
{ VDP_CHROMA_TYPE_420_16, AV_PIX_FMT_YUV420P12, pix_fmts_420 }, |
79 |
|
|
{ VDP_CHROMA_TYPE_422_16, AV_PIX_FMT_YUV422P10, pix_fmts_422 }, |
80 |
|
|
{ VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P10, pix_fmts_444 }, |
81 |
|
|
{ VDP_CHROMA_TYPE_444_16, AV_PIX_FMT_YUV444P12, pix_fmts_444 }, |
82 |
|
|
#endif |
83 |
|
|
}; |
84 |
|
|
|
85 |
|
|
typedef struct VDPAUDeviceContext { |
86 |
|
|
/** |
87 |
|
|
* The public AVVDPAUDeviceContext. See hwcontext_vdpau.h for it. |
88 |
|
|
*/ |
89 |
|
|
AVVDPAUDeviceContext p; |
90 |
|
|
|
91 |
|
|
VdpVideoSurfaceQueryGetPutBitsYCbCrCapabilities *get_transfer_caps; |
92 |
|
|
VdpVideoSurfaceGetBitsYCbCr *get_data; |
93 |
|
|
VdpVideoSurfacePutBitsYCbCr *put_data; |
94 |
|
|
VdpVideoSurfaceCreate *surf_create; |
95 |
|
|
VdpVideoSurfaceDestroy *surf_destroy; |
96 |
|
|
|
97 |
|
|
enum AVPixelFormat *pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]; |
98 |
|
|
int nb_pix_fmts[FF_ARRAY_ELEMS(vdpau_pix_fmts)]; |
99 |
|
|
} VDPAUDeviceContext; |
100 |
|
|
|
101 |
|
|
typedef struct VDPAUFramesContext { |
102 |
|
|
VdpVideoSurfaceGetBitsYCbCr *get_data; |
103 |
|
|
VdpVideoSurfacePutBitsYCbCr *put_data; |
104 |
|
|
VdpChromaType chroma_type; |
105 |
|
|
int chroma_idx; |
106 |
|
|
|
107 |
|
|
const enum AVPixelFormat *pix_fmts; |
108 |
|
|
int nb_pix_fmts; |
109 |
|
|
} VDPAUFramesContext; |
110 |
|
|
|
111 |
|
✗ |
static int count_pixfmts(const VDPAUPixFmtMap *map) |
112 |
|
|
{ |
113 |
|
✗ |
int count = 0; |
114 |
|
✗ |
while (map->pix_fmt != AV_PIX_FMT_NONE) { |
115 |
|
✗ |
map++; |
116 |
|
✗ |
count++; |
117 |
|
|
} |
118 |
|
✗ |
return count; |
119 |
|
|
} |
120 |
|
|
|
121 |
|
✗ |
static int vdpau_init_pixmfts(AVHWDeviceContext *ctx) |
122 |
|
|
{ |
123 |
|
✗ |
VDPAUDeviceContext *priv = ctx->hwctx; |
124 |
|
✗ |
AVVDPAUDeviceContext *hwctx = &priv->p; |
125 |
|
|
int i; |
126 |
|
|
|
127 |
|
✗ |
for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) { |
128 |
|
✗ |
const VDPAUPixFmtMap *map = vdpau_pix_fmts[i].map; |
129 |
|
|
int nb_pix_fmts; |
130 |
|
|
|
131 |
|
✗ |
nb_pix_fmts = count_pixfmts(map); |
132 |
|
✗ |
priv->pix_fmts[i] = av_malloc_array(nb_pix_fmts + 1, sizeof(*priv->pix_fmts[i])); |
133 |
|
✗ |
if (!priv->pix_fmts[i]) |
134 |
|
✗ |
return AVERROR(ENOMEM); |
135 |
|
|
|
136 |
|
✗ |
nb_pix_fmts = 0; |
137 |
|
✗ |
while (map->pix_fmt != AV_PIX_FMT_NONE) { |
138 |
|
|
VdpBool supported; |
139 |
|
✗ |
VdpStatus err = priv->get_transfer_caps(hwctx->device, vdpau_pix_fmts[i].chroma_type, |
140 |
|
✗ |
map->vdpau_fmt, &supported); |
141 |
|
✗ |
if (err == VDP_STATUS_OK && supported) |
142 |
|
✗ |
priv->pix_fmts[i][nb_pix_fmts++] = map->pix_fmt; |
143 |
|
✗ |
map++; |
144 |
|
|
} |
145 |
|
✗ |
priv->pix_fmts[i][nb_pix_fmts++] = AV_PIX_FMT_NONE; |
146 |
|
✗ |
priv->nb_pix_fmts[i] = nb_pix_fmts; |
147 |
|
|
} |
148 |
|
|
|
149 |
|
✗ |
return 0; |
150 |
|
|
} |
151 |
|
|
|
152 |
|
|
#define GET_CALLBACK(id, result) \ |
153 |
|
|
do { \ |
154 |
|
|
void *tmp; \ |
155 |
|
|
err = hwctx->get_proc_address(hwctx->device, id, &tmp); \ |
156 |
|
|
if (err != VDP_STATUS_OK) { \ |
157 |
|
|
av_log(ctx, AV_LOG_ERROR, "Error getting the " #id " callback.\n"); \ |
158 |
|
|
return AVERROR_UNKNOWN; \ |
159 |
|
|
} \ |
160 |
|
|
result = tmp; \ |
161 |
|
|
} while (0) |
162 |
|
|
|
163 |
|
✗ |
static int vdpau_device_init(AVHWDeviceContext *ctx) |
164 |
|
|
{ |
165 |
|
✗ |
VDPAUDeviceContext *priv = ctx->hwctx; |
166 |
|
✗ |
AVVDPAUDeviceContext *hwctx = &priv->p; |
167 |
|
|
VdpStatus err; |
168 |
|
|
int ret; |
169 |
|
|
|
170 |
|
✗ |
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_QUERY_GET_PUT_BITS_Y_CB_CR_CAPABILITIES, |
171 |
|
|
priv->get_transfer_caps); |
172 |
|
✗ |
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_GET_BITS_Y_CB_CR, priv->get_data); |
173 |
|
✗ |
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_PUT_BITS_Y_CB_CR, priv->put_data); |
174 |
|
✗ |
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_CREATE, priv->surf_create); |
175 |
|
✗ |
GET_CALLBACK(VDP_FUNC_ID_VIDEO_SURFACE_DESTROY, priv->surf_destroy); |
176 |
|
|
|
177 |
|
✗ |
ret = vdpau_init_pixmfts(ctx); |
178 |
|
✗ |
if (ret < 0) { |
179 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, "Error querying the supported pixel formats\n"); |
180 |
|
✗ |
return ret; |
181 |
|
|
} |
182 |
|
|
|
183 |
|
✗ |
return 0; |
184 |
|
|
} |
185 |
|
|
|
186 |
|
✗ |
static void vdpau_device_uninit(AVHWDeviceContext *ctx) |
187 |
|
|
{ |
188 |
|
✗ |
VDPAUDeviceContext *priv = ctx->hwctx; |
189 |
|
|
int i; |
190 |
|
|
|
191 |
|
✗ |
for (i = 0; i < FF_ARRAY_ELEMS(priv->pix_fmts); i++) |
192 |
|
✗ |
av_freep(&priv->pix_fmts[i]); |
193 |
|
✗ |
} |
194 |
|
|
|
195 |
|
✗ |
static int vdpau_frames_get_constraints(AVHWDeviceContext *ctx, |
196 |
|
|
const void *hwconfig, |
197 |
|
|
AVHWFramesConstraints *constraints) |
198 |
|
|
{ |
199 |
|
✗ |
VDPAUDeviceContext *priv = ctx->hwctx; |
200 |
|
✗ |
int nb_sw_formats = 0; |
201 |
|
|
int i; |
202 |
|
|
|
203 |
|
✗ |
constraints->valid_sw_formats = av_malloc_array(FF_ARRAY_ELEMS(vdpau_pix_fmts) + 1, |
204 |
|
|
sizeof(*constraints->valid_sw_formats)); |
205 |
|
✗ |
if (!constraints->valid_sw_formats) |
206 |
|
✗ |
return AVERROR(ENOMEM); |
207 |
|
|
|
208 |
|
✗ |
for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) { |
209 |
|
✗ |
if (priv->nb_pix_fmts[i] > 1) |
210 |
|
✗ |
constraints->valid_sw_formats[nb_sw_formats++] = vdpau_pix_fmts[i].frames_sw_format; |
211 |
|
|
} |
212 |
|
✗ |
constraints->valid_sw_formats[nb_sw_formats] = AV_PIX_FMT_NONE; |
213 |
|
|
|
214 |
|
✗ |
constraints->valid_hw_formats = av_malloc_array(2, sizeof(*constraints->valid_hw_formats)); |
215 |
|
✗ |
if (!constraints->valid_hw_formats) |
216 |
|
✗ |
return AVERROR(ENOMEM); |
217 |
|
|
|
218 |
|
✗ |
constraints->valid_hw_formats[0] = AV_PIX_FMT_VDPAU; |
219 |
|
✗ |
constraints->valid_hw_formats[1] = AV_PIX_FMT_NONE; |
220 |
|
|
|
221 |
|
✗ |
return 0; |
222 |
|
|
} |
223 |
|
|
|
224 |
|
✗ |
static void vdpau_buffer_free(void *opaque, uint8_t *data) |
225 |
|
|
{ |
226 |
|
✗ |
AVHWFramesContext *ctx = opaque; |
227 |
|
✗ |
VDPAUDeviceContext *device_priv = ctx->device_ctx->hwctx; |
228 |
|
✗ |
VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)data; |
229 |
|
|
|
230 |
|
✗ |
device_priv->surf_destroy(surf); |
231 |
|
✗ |
} |
232 |
|
|
|
233 |
|
✗ |
static AVBufferRef *vdpau_pool_alloc(void *opaque, size_t size) |
234 |
|
|
{ |
235 |
|
✗ |
AVHWFramesContext *ctx = opaque; |
236 |
|
✗ |
VDPAUFramesContext *priv = ctx->hwctx; |
237 |
|
✗ |
VDPAUDeviceContext *device_priv = ctx->device_ctx->hwctx; |
238 |
|
✗ |
AVVDPAUDeviceContext *device_hwctx = &device_priv->p; |
239 |
|
|
|
240 |
|
|
AVBufferRef *ret; |
241 |
|
|
VdpVideoSurface surf; |
242 |
|
|
VdpStatus err; |
243 |
|
|
|
244 |
|
✗ |
err = device_priv->surf_create(device_hwctx->device, priv->chroma_type, |
245 |
|
✗ |
ctx->width, ctx->height, &surf); |
246 |
|
✗ |
if (err != VDP_STATUS_OK) { |
247 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, "Error allocating a VDPAU video surface\n"); |
248 |
|
✗ |
return NULL; |
249 |
|
|
} |
250 |
|
|
|
251 |
|
✗ |
ret = av_buffer_create((uint8_t*)(uintptr_t)surf, sizeof(surf), |
252 |
|
|
vdpau_buffer_free, ctx, AV_BUFFER_FLAG_READONLY); |
253 |
|
✗ |
if (!ret) { |
254 |
|
✗ |
device_priv->surf_destroy(surf); |
255 |
|
✗ |
return NULL; |
256 |
|
|
} |
257 |
|
|
|
258 |
|
✗ |
return ret; |
259 |
|
|
} |
260 |
|
|
|
261 |
|
✗ |
static int vdpau_frames_init(AVHWFramesContext *ctx) |
262 |
|
|
{ |
263 |
|
✗ |
VDPAUDeviceContext *device_priv = ctx->device_ctx->hwctx; |
264 |
|
✗ |
VDPAUFramesContext *priv = ctx->hwctx; |
265 |
|
|
|
266 |
|
|
int i; |
267 |
|
|
|
268 |
|
✗ |
for (i = 0; i < FF_ARRAY_ELEMS(vdpau_pix_fmts); i++) { |
269 |
|
✗ |
if (vdpau_pix_fmts[i].frames_sw_format == ctx->sw_format) { |
270 |
|
✗ |
priv->chroma_type = vdpau_pix_fmts[i].chroma_type; |
271 |
|
✗ |
priv->chroma_idx = i; |
272 |
|
✗ |
priv->pix_fmts = device_priv->pix_fmts[i]; |
273 |
|
✗ |
priv->nb_pix_fmts = device_priv->nb_pix_fmts[i]; |
274 |
|
✗ |
break; |
275 |
|
|
} |
276 |
|
|
} |
277 |
|
✗ |
if (priv->nb_pix_fmts < 2) { |
278 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, "Unsupported sw format: %s\n", |
279 |
|
|
av_get_pix_fmt_name(ctx->sw_format)); |
280 |
|
✗ |
return AVERROR(ENOSYS); |
281 |
|
|
} |
282 |
|
|
|
283 |
|
✗ |
if (!ctx->pool) { |
284 |
|
✗ |
ffhwframesctx(ctx)->pool_internal = |
285 |
|
✗ |
av_buffer_pool_init2(sizeof(VdpVideoSurface), ctx, |
286 |
|
|
vdpau_pool_alloc, NULL); |
287 |
|
✗ |
if (!ffhwframesctx(ctx)->pool_internal) |
288 |
|
✗ |
return AVERROR(ENOMEM); |
289 |
|
|
} |
290 |
|
|
|
291 |
|
✗ |
priv->get_data = device_priv->get_data; |
292 |
|
✗ |
priv->put_data = device_priv->put_data; |
293 |
|
|
|
294 |
|
✗ |
return 0; |
295 |
|
|
} |
296 |
|
|
|
297 |
|
✗ |
static int vdpau_get_buffer(AVHWFramesContext *ctx, AVFrame *frame) |
298 |
|
|
{ |
299 |
|
✗ |
frame->buf[0] = av_buffer_pool_get(ctx->pool); |
300 |
|
✗ |
if (!frame->buf[0]) |
301 |
|
✗ |
return AVERROR(ENOMEM); |
302 |
|
|
|
303 |
|
✗ |
frame->data[3] = frame->buf[0]->data; |
304 |
|
✗ |
frame->format = AV_PIX_FMT_VDPAU; |
305 |
|
✗ |
frame->width = ctx->width; |
306 |
|
✗ |
frame->height = ctx->height; |
307 |
|
|
|
308 |
|
✗ |
return 0; |
309 |
|
|
} |
310 |
|
|
|
311 |
|
✗ |
static int vdpau_transfer_get_formats(AVHWFramesContext *ctx, |
312 |
|
|
enum AVHWFrameTransferDirection dir, |
313 |
|
|
enum AVPixelFormat **formats) |
314 |
|
|
{ |
315 |
|
✗ |
VDPAUFramesContext *priv = ctx->hwctx; |
316 |
|
|
|
317 |
|
|
enum AVPixelFormat *fmts; |
318 |
|
|
|
319 |
|
✗ |
if (priv->nb_pix_fmts == 1) { |
320 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, |
321 |
|
|
"No target formats are supported for this chroma type\n"); |
322 |
|
✗ |
return AVERROR(ENOSYS); |
323 |
|
|
} |
324 |
|
|
|
325 |
|
✗ |
fmts = av_malloc_array(priv->nb_pix_fmts, sizeof(*fmts)); |
326 |
|
✗ |
if (!fmts) |
327 |
|
✗ |
return AVERROR(ENOMEM); |
328 |
|
|
|
329 |
|
✗ |
memcpy(fmts, priv->pix_fmts, sizeof(*fmts) * (priv->nb_pix_fmts)); |
330 |
|
✗ |
*formats = fmts; |
331 |
|
|
|
332 |
|
✗ |
return 0; |
333 |
|
|
} |
334 |
|
|
|
335 |
|
✗ |
static int vdpau_transfer_data_from(AVHWFramesContext *ctx, AVFrame *dst, |
336 |
|
|
const AVFrame *src) |
337 |
|
|
{ |
338 |
|
✗ |
VDPAUFramesContext *priv = ctx->hwctx; |
339 |
|
✗ |
VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)src->data[3]; |
340 |
|
|
|
341 |
|
|
void *data[3]; |
342 |
|
|
uint32_t linesize[3]; |
343 |
|
|
|
344 |
|
|
const VDPAUPixFmtMap *map; |
345 |
|
|
VdpYCbCrFormat vdpau_format; |
346 |
|
|
VdpStatus err; |
347 |
|
|
int i; |
348 |
|
|
|
349 |
|
✗ |
for (i = 0; i< FF_ARRAY_ELEMS(data) && dst->data[i]; i++) { |
350 |
|
✗ |
data[i] = dst->data[i]; |
351 |
|
✗ |
if (dst->linesize[i] < 0 || dst->linesize[i] > UINT32_MAX) { |
352 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, |
353 |
|
|
"The linesize %d cannot be represented as uint32\n", |
354 |
|
|
dst->linesize[i]); |
355 |
|
✗ |
return AVERROR(ERANGE); |
356 |
|
|
} |
357 |
|
✗ |
linesize[i] = dst->linesize[i]; |
358 |
|
|
} |
359 |
|
|
|
360 |
|
✗ |
map = vdpau_pix_fmts[priv->chroma_idx].map; |
361 |
|
✗ |
for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) { |
362 |
|
✗ |
if (map[i].pix_fmt == dst->format) { |
363 |
|
✗ |
vdpau_format = map[i].vdpau_fmt; |
364 |
|
✗ |
break; |
365 |
|
|
} |
366 |
|
|
} |
367 |
|
✗ |
if (map[i].pix_fmt == AV_PIX_FMT_NONE) { |
368 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, |
369 |
|
|
"Unsupported target pixel format: %s\n", |
370 |
|
✗ |
av_get_pix_fmt_name(dst->format)); |
371 |
|
✗ |
return AVERROR(EINVAL); |
372 |
|
|
} |
373 |
|
|
|
374 |
|
✗ |
if ((vdpau_format == VDP_YCBCR_FORMAT_YV12) |
375 |
|
|
#ifdef VDP_YCBCR_FORMAT_Y_U_V_444 |
376 |
|
✗ |
|| (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444) |
377 |
|
|
#endif |
378 |
|
|
#ifdef VDP_YCBCR_FORMAT_P016 |
379 |
|
✗ |
|| (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444_16) |
380 |
|
|
#endif |
381 |
|
|
) |
382 |
|
✗ |
FFSWAP(void*, data[1], data[2]); |
383 |
|
|
|
384 |
|
✗ |
err = priv->get_data(surf, vdpau_format, data, linesize); |
385 |
|
✗ |
if (err != VDP_STATUS_OK) { |
386 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, "Error retrieving the data from a VDPAU surface\n"); |
387 |
|
✗ |
return AVERROR_UNKNOWN; |
388 |
|
|
} |
389 |
|
|
|
390 |
|
✗ |
return 0; |
391 |
|
|
} |
392 |
|
|
|
393 |
|
✗ |
static int vdpau_transfer_data_to(AVHWFramesContext *ctx, AVFrame *dst, |
394 |
|
|
const AVFrame *src) |
395 |
|
|
{ |
396 |
|
✗ |
VDPAUFramesContext *priv = ctx->hwctx; |
397 |
|
✗ |
VdpVideoSurface surf = (VdpVideoSurface)(uintptr_t)dst->data[3]; |
398 |
|
|
|
399 |
|
|
const void *data[3]; |
400 |
|
|
uint32_t linesize[3]; |
401 |
|
|
|
402 |
|
|
const VDPAUPixFmtMap *map; |
403 |
|
|
VdpYCbCrFormat vdpau_format; |
404 |
|
|
VdpStatus err; |
405 |
|
|
int i; |
406 |
|
|
|
407 |
|
✗ |
for (i = 0; i< FF_ARRAY_ELEMS(data) && src->data[i]; i++) { |
408 |
|
✗ |
data[i] = src->data[i]; |
409 |
|
✗ |
if (src->linesize[i] < 0 || src->linesize[i] > UINT32_MAX) { |
410 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, |
411 |
|
|
"The linesize %d cannot be represented as uint32\n", |
412 |
|
✗ |
src->linesize[i]); |
413 |
|
✗ |
return AVERROR(ERANGE); |
414 |
|
|
} |
415 |
|
✗ |
linesize[i] = src->linesize[i]; |
416 |
|
|
} |
417 |
|
|
|
418 |
|
✗ |
map = vdpau_pix_fmts[priv->chroma_idx].map; |
419 |
|
✗ |
for (i = 0; map[i].pix_fmt != AV_PIX_FMT_NONE; i++) { |
420 |
|
✗ |
if (map[i].pix_fmt == src->format) { |
421 |
|
✗ |
vdpau_format = map[i].vdpau_fmt; |
422 |
|
✗ |
break; |
423 |
|
|
} |
424 |
|
|
} |
425 |
|
✗ |
if (map[i].pix_fmt == AV_PIX_FMT_NONE) { |
426 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, |
427 |
|
|
"Unsupported source pixel format: %s\n", |
428 |
|
✗ |
av_get_pix_fmt_name(src->format)); |
429 |
|
✗ |
return AVERROR(EINVAL); |
430 |
|
|
} |
431 |
|
|
|
432 |
|
✗ |
if ((vdpau_format == VDP_YCBCR_FORMAT_YV12) |
433 |
|
|
#ifdef VDP_YCBCR_FORMAT_Y_U_V_444 |
434 |
|
✗ |
|| (vdpau_format == VDP_YCBCR_FORMAT_Y_U_V_444) |
435 |
|
|
#endif |
436 |
|
|
) |
437 |
|
✗ |
FFSWAP(const void*, data[1], data[2]); |
438 |
|
|
|
439 |
|
✗ |
err = priv->put_data(surf, vdpau_format, data, linesize); |
440 |
|
✗ |
if (err != VDP_STATUS_OK) { |
441 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, "Error uploading the data to a VDPAU surface\n"); |
442 |
|
✗ |
return AVERROR_UNKNOWN; |
443 |
|
|
} |
444 |
|
|
|
445 |
|
✗ |
return 0; |
446 |
|
|
} |
447 |
|
|
|
448 |
|
|
#if HAVE_VDPAU_X11 |
449 |
|
|
#include <vdpau/vdpau_x11.h> |
450 |
|
|
#include <X11/Xlib.h> |
451 |
|
|
|
452 |
|
|
typedef struct VDPAUDevicePriv { |
453 |
|
|
VdpDeviceDestroy *device_destroy; |
454 |
|
|
Display *dpy; |
455 |
|
|
} VDPAUDevicePriv; |
456 |
|
|
|
457 |
|
✗ |
static void vdpau_device_free(AVHWDeviceContext *ctx) |
458 |
|
|
{ |
459 |
|
✗ |
AVVDPAUDeviceContext *hwctx = ctx->hwctx; |
460 |
|
✗ |
VDPAUDevicePriv *priv = ctx->user_opaque; |
461 |
|
|
|
462 |
|
✗ |
if (priv->device_destroy) |
463 |
|
✗ |
priv->device_destroy(hwctx->device); |
464 |
|
✗ |
if (priv->dpy) |
465 |
|
✗ |
XCloseDisplay(priv->dpy); |
466 |
|
✗ |
av_freep(&priv); |
467 |
|
✗ |
} |
468 |
|
|
|
469 |
|
✗ |
static int vdpau_device_create(AVHWDeviceContext *ctx, const char *device, |
470 |
|
|
AVDictionary *opts, int flags) |
471 |
|
|
{ |
472 |
|
✗ |
AVVDPAUDeviceContext *hwctx = ctx->hwctx; |
473 |
|
|
|
474 |
|
|
VDPAUDevicePriv *priv; |
475 |
|
|
VdpStatus err; |
476 |
|
|
VdpGetInformationString *get_information_string; |
477 |
|
|
const char *display, *vendor; |
478 |
|
|
|
479 |
|
✗ |
priv = av_mallocz(sizeof(*priv)); |
480 |
|
✗ |
if (!priv) |
481 |
|
✗ |
return AVERROR(ENOMEM); |
482 |
|
|
|
483 |
|
✗ |
ctx->user_opaque = priv; |
484 |
|
✗ |
ctx->free = vdpau_device_free; |
485 |
|
|
|
486 |
|
✗ |
priv->dpy = XOpenDisplay(device); |
487 |
|
✗ |
if (!priv->dpy) { |
488 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, "Cannot open the X11 display %s.\n", |
489 |
|
|
XDisplayName(device)); |
490 |
|
✗ |
return AVERROR_UNKNOWN; |
491 |
|
|
} |
492 |
|
✗ |
display = XDisplayString(priv->dpy); |
493 |
|
|
|
494 |
|
✗ |
err = vdp_device_create_x11(priv->dpy, XDefaultScreen(priv->dpy), |
495 |
|
|
&hwctx->device, &hwctx->get_proc_address); |
496 |
|
✗ |
if (err != VDP_STATUS_OK) { |
497 |
|
✗ |
av_log(ctx, AV_LOG_ERROR, "VDPAU device creation on X11 display %s failed.\n", |
498 |
|
|
display); |
499 |
|
✗ |
return AVERROR_UNKNOWN; |
500 |
|
|
} |
501 |
|
|
|
502 |
|
✗ |
GET_CALLBACK(VDP_FUNC_ID_GET_INFORMATION_STRING, get_information_string); |
503 |
|
✗ |
GET_CALLBACK(VDP_FUNC_ID_DEVICE_DESTROY, priv->device_destroy); |
504 |
|
|
|
505 |
|
✗ |
get_information_string(&vendor); |
506 |
|
✗ |
av_log(ctx, AV_LOG_VERBOSE, "Successfully created a VDPAU device (%s) on " |
507 |
|
|
"X11 display %s\n", vendor, display); |
508 |
|
|
|
509 |
|
✗ |
return 0; |
510 |
|
|
} |
511 |
|
|
#endif |
512 |
|
|
|
513 |
|
|
const HWContextType ff_hwcontext_type_vdpau = { |
514 |
|
|
.type = AV_HWDEVICE_TYPE_VDPAU, |
515 |
|
|
.name = "VDPAU", |
516 |
|
|
|
517 |
|
|
.device_hwctx_size = sizeof(VDPAUDeviceContext), |
518 |
|
|
.frames_hwctx_size = sizeof(VDPAUFramesContext), |
519 |
|
|
|
520 |
|
|
#if HAVE_VDPAU_X11 |
521 |
|
|
.device_create = vdpau_device_create, |
522 |
|
|
#endif |
523 |
|
|
.device_init = vdpau_device_init, |
524 |
|
|
.device_uninit = vdpau_device_uninit, |
525 |
|
|
.frames_get_constraints = vdpau_frames_get_constraints, |
526 |
|
|
.frames_init = vdpau_frames_init, |
527 |
|
|
.frames_get_buffer = vdpau_get_buffer, |
528 |
|
|
.transfer_get_formats = vdpau_transfer_get_formats, |
529 |
|
|
.transfer_data_to = vdpau_transfer_data_to, |
530 |
|
|
.transfer_data_from = vdpau_transfer_data_from, |
531 |
|
|
|
532 |
|
|
.pix_fmts = (const enum AVPixelFormat[]){ AV_PIX_FMT_VDPAU, AV_PIX_FMT_NONE }, |
533 |
|
|
}; |
534 |
|
|
|