Line |
Branch |
Exec |
Source |
1 |
|
|
/* |
2 |
|
|
* This file is part of FFmpeg. |
3 |
|
|
* |
4 |
|
|
* FFmpeg is free software; you can redistribute it and/or |
5 |
|
|
* modify it under the terms of the GNU Lesser General Public |
6 |
|
|
* License as published by the Free Software Foundation; either |
7 |
|
|
* version 2.1 of the License, or (at your option) any later version. |
8 |
|
|
* |
9 |
|
|
* FFmpeg is distributed in the hope that it will be useful, |
10 |
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
11 |
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
12 |
|
|
* Lesser General Public License for more details. |
13 |
|
|
* |
14 |
|
|
* You should have received a copy of the GNU Lesser General Public |
15 |
|
|
* License along with FFmpeg; if not, write to the Free Software |
16 |
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
17 |
|
|
*/ |
18 |
|
|
|
19 |
|
|
#include <string.h> |
20 |
|
|
|
21 |
|
|
#include "libavutil/avassert.h" |
22 |
|
|
#include "libavutil/mem.h" |
23 |
|
|
#include "libavutil/pixdesc.h" |
24 |
|
|
|
25 |
|
|
#include "filters.h" |
26 |
|
|
#include "formats.h" |
27 |
|
|
#include "vaapi_vpp.h" |
28 |
|
|
|
29 |
|
✗ |
int ff_vaapi_vpp_query_formats(const AVFilterContext *avctx, |
30 |
|
|
AVFilterFormatsConfig **cfg_in, |
31 |
|
|
AVFilterFormatsConfig **cfg_out) |
32 |
|
|
{ |
33 |
|
|
static const enum AVPixelFormat pix_fmts[] = { |
34 |
|
|
AV_PIX_FMT_VAAPI, AV_PIX_FMT_NONE, |
35 |
|
|
}; |
36 |
|
|
int err; |
37 |
|
|
|
38 |
|
✗ |
err = ff_set_common_formats_from_list2(avctx, cfg_in, cfg_out, pix_fmts); |
39 |
|
✗ |
if (err < 0) |
40 |
|
✗ |
return err; |
41 |
|
|
|
42 |
|
✗ |
return 0; |
43 |
|
|
} |
44 |
|
|
|
45 |
|
✗ |
void ff_vaapi_vpp_pipeline_uninit(AVFilterContext *avctx) |
46 |
|
|
{ |
47 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
48 |
|
|
int i; |
49 |
|
✗ |
for (i = 0; i < ctx->nb_filter_buffers; i++) { |
50 |
|
✗ |
if (ctx->filter_buffers[i] != VA_INVALID_ID) { |
51 |
|
✗ |
vaDestroyBuffer(ctx->hwctx->display, ctx->filter_buffers[i]); |
52 |
|
✗ |
ctx->filter_buffers[i] = VA_INVALID_ID; |
53 |
|
|
} |
54 |
|
|
} |
55 |
|
✗ |
ctx->nb_filter_buffers = 0; |
56 |
|
|
|
57 |
|
✗ |
if (ctx->va_context != VA_INVALID_ID) { |
58 |
|
✗ |
vaDestroyContext(ctx->hwctx->display, ctx->va_context); |
59 |
|
✗ |
ctx->va_context = VA_INVALID_ID; |
60 |
|
|
} |
61 |
|
|
|
62 |
|
✗ |
if (ctx->va_config != VA_INVALID_ID) { |
63 |
|
✗ |
vaDestroyConfig(ctx->hwctx->display, ctx->va_config); |
64 |
|
✗ |
ctx->va_config = VA_INVALID_ID; |
65 |
|
|
} |
66 |
|
|
|
67 |
|
✗ |
av_buffer_unref(&ctx->device_ref); |
68 |
|
✗ |
ctx->hwctx = NULL; |
69 |
|
✗ |
} |
70 |
|
|
|
71 |
|
✗ |
int ff_vaapi_vpp_config_input(AVFilterLink *inlink) |
72 |
|
|
{ |
73 |
|
✗ |
FilterLink *l = ff_filter_link(inlink); |
74 |
|
✗ |
AVFilterContext *avctx = inlink->dst; |
75 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
76 |
|
|
|
77 |
|
✗ |
if (ctx->pipeline_uninit) |
78 |
|
✗ |
ctx->pipeline_uninit(avctx); |
79 |
|
|
|
80 |
|
✗ |
if (!l->hw_frames_ctx) { |
81 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is " |
82 |
|
|
"required to associate the processing device.\n"); |
83 |
|
✗ |
return AVERROR(EINVAL); |
84 |
|
|
} |
85 |
|
|
|
86 |
|
✗ |
ctx->input_frames_ref = av_buffer_ref(l->hw_frames_ctx); |
87 |
|
✗ |
if (!ctx->input_frames_ref) { |
88 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "A input frames reference create " |
89 |
|
|
"failed.\n"); |
90 |
|
✗ |
return AVERROR(ENOMEM); |
91 |
|
|
} |
92 |
|
✗ |
ctx->input_frames = (AVHWFramesContext*)ctx->input_frames_ref->data; |
93 |
|
|
|
94 |
|
✗ |
return 0; |
95 |
|
|
} |
96 |
|
|
|
97 |
|
✗ |
int ff_vaapi_vpp_config_output(AVFilterLink *outlink) |
98 |
|
|
{ |
99 |
|
✗ |
FilterLink *outl = ff_filter_link(outlink); |
100 |
|
✗ |
AVFilterContext *avctx = outlink->src; |
101 |
|
✗ |
AVFilterLink *inlink = avctx->inputs[0]; |
102 |
|
✗ |
FilterLink *inl = ff_filter_link(inlink); |
103 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
104 |
|
✗ |
AVVAAPIHWConfig *hwconfig = NULL; |
105 |
|
✗ |
AVHWFramesConstraints *constraints = NULL; |
106 |
|
|
AVHWFramesContext *output_frames; |
107 |
|
|
AVVAAPIFramesContext *va_frames; |
108 |
|
|
VAStatus vas; |
109 |
|
|
int err, i; |
110 |
|
|
|
111 |
|
✗ |
if (ctx->pipeline_uninit) |
112 |
|
✗ |
ctx->pipeline_uninit(avctx); |
113 |
|
|
|
114 |
|
✗ |
if (!ctx->output_width) |
115 |
|
✗ |
ctx->output_width = avctx->inputs[0]->w; |
116 |
|
✗ |
if (!ctx->output_height) |
117 |
|
✗ |
ctx->output_height = avctx->inputs[0]->h; |
118 |
|
|
|
119 |
|
✗ |
outlink->w = ctx->output_width; |
120 |
|
✗ |
outlink->h = ctx->output_height; |
121 |
|
|
|
122 |
|
✗ |
if (ctx->passthrough) { |
123 |
|
✗ |
if (inl->hw_frames_ctx) |
124 |
|
✗ |
outl->hw_frames_ctx = av_buffer_ref(inl->hw_frames_ctx); |
125 |
|
✗ |
av_log(ctx, AV_LOG_VERBOSE, "Using VAAPI filter passthrough mode.\n"); |
126 |
|
|
|
127 |
|
✗ |
return 0; |
128 |
|
|
} |
129 |
|
|
|
130 |
|
✗ |
av_assert0(ctx->input_frames); |
131 |
|
✗ |
ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref); |
132 |
|
✗ |
if (!ctx->device_ref) { |
133 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "A device reference create " |
134 |
|
|
"failed.\n"); |
135 |
|
✗ |
return AVERROR(ENOMEM); |
136 |
|
|
} |
137 |
|
✗ |
ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx; |
138 |
|
|
|
139 |
|
✗ |
av_assert0(ctx->va_config == VA_INVALID_ID); |
140 |
|
✗ |
vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone, |
141 |
|
|
VAEntrypointVideoProc, NULL, 0, &ctx->va_config); |
142 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
143 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline " |
144 |
|
|
"config: %d (%s).\n", vas, vaErrorStr(vas)); |
145 |
|
✗ |
err = AVERROR(EIO); |
146 |
|
✗ |
goto fail; |
147 |
|
|
} |
148 |
|
|
|
149 |
|
✗ |
hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref); |
150 |
|
✗ |
if (!hwconfig) { |
151 |
|
✗ |
err = AVERROR(ENOMEM); |
152 |
|
✗ |
goto fail; |
153 |
|
|
} |
154 |
|
✗ |
hwconfig->config_id = ctx->va_config; |
155 |
|
|
|
156 |
|
✗ |
constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref, |
157 |
|
|
hwconfig); |
158 |
|
✗ |
if (!constraints) { |
159 |
|
✗ |
err = AVERROR(ENOMEM); |
160 |
|
✗ |
goto fail; |
161 |
|
|
} |
162 |
|
|
|
163 |
|
✗ |
if (ctx->output_format == AV_PIX_FMT_NONE) |
164 |
|
✗ |
ctx->output_format = ctx->input_frames->sw_format; |
165 |
|
✗ |
if (constraints->valid_sw_formats) { |
166 |
|
✗ |
for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) { |
167 |
|
✗ |
if (ctx->output_format == constraints->valid_sw_formats[i]) |
168 |
|
✗ |
break; |
169 |
|
|
} |
170 |
|
✗ |
if (constraints->valid_sw_formats[i] == AV_PIX_FMT_NONE) { |
171 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Hardware does not support output " |
172 |
|
|
"format %s.\n", av_get_pix_fmt_name(ctx->output_format)); |
173 |
|
✗ |
err = AVERROR(EINVAL); |
174 |
|
✗ |
goto fail; |
175 |
|
|
} |
176 |
|
|
} |
177 |
|
|
|
178 |
|
✗ |
if (ctx->output_width < constraints->min_width || |
179 |
|
✗ |
ctx->output_height < constraints->min_height || |
180 |
|
✗ |
ctx->output_width > constraints->max_width || |
181 |
|
✗ |
ctx->output_height > constraints->max_height) { |
182 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Hardware does not support scaling to " |
183 |
|
|
"size %dx%d (constraints: width %d-%d height %d-%d).\n", |
184 |
|
|
ctx->output_width, ctx->output_height, |
185 |
|
✗ |
constraints->min_width, constraints->max_width, |
186 |
|
✗ |
constraints->min_height, constraints->max_height); |
187 |
|
✗ |
err = AVERROR(EINVAL); |
188 |
|
✗ |
goto fail; |
189 |
|
|
} |
190 |
|
|
|
191 |
|
✗ |
outl->hw_frames_ctx = av_hwframe_ctx_alloc(ctx->device_ref); |
192 |
|
✗ |
if (!outl->hw_frames_ctx) { |
193 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to create HW frame context " |
194 |
|
|
"for output.\n"); |
195 |
|
✗ |
err = AVERROR(ENOMEM); |
196 |
|
✗ |
goto fail; |
197 |
|
|
} |
198 |
|
|
|
199 |
|
✗ |
output_frames = (AVHWFramesContext*)outl->hw_frames_ctx->data; |
200 |
|
|
|
201 |
|
✗ |
output_frames->format = AV_PIX_FMT_VAAPI; |
202 |
|
✗ |
output_frames->sw_format = ctx->output_format; |
203 |
|
✗ |
output_frames->width = ctx->output_width; |
204 |
|
✗ |
output_frames->height = ctx->output_height; |
205 |
|
|
|
206 |
|
|
if (CONFIG_VAAPI_1) |
207 |
|
✗ |
output_frames->initial_pool_size = 0; |
208 |
|
|
else |
209 |
|
|
output_frames->initial_pool_size = 4; |
210 |
|
|
|
211 |
|
✗ |
err = ff_filter_init_hw_frames(avctx, outlink, 10); |
212 |
|
✗ |
if (err < 0) |
213 |
|
✗ |
goto fail; |
214 |
|
|
|
215 |
|
✗ |
err = av_hwframe_ctx_init(outl->hw_frames_ctx); |
216 |
|
✗ |
if (err < 0) { |
217 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame " |
218 |
|
|
"context for output: %d\n", err); |
219 |
|
✗ |
goto fail; |
220 |
|
|
} |
221 |
|
|
|
222 |
|
✗ |
va_frames = output_frames->hwctx; |
223 |
|
|
|
224 |
|
✗ |
av_assert0(ctx->va_context == VA_INVALID_ID); |
225 |
|
✗ |
av_assert0(output_frames->initial_pool_size || |
226 |
|
|
(va_frames->surface_ids == NULL && va_frames->nb_surfaces == 0)); |
227 |
|
✗ |
vas = vaCreateContext(ctx->hwctx->display, ctx->va_config, |
228 |
|
|
ctx->output_width, ctx->output_height, |
229 |
|
|
VA_PROGRESSIVE, |
230 |
|
|
va_frames->surface_ids, va_frames->nb_surfaces, |
231 |
|
|
&ctx->va_context); |
232 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
233 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline " |
234 |
|
|
"context: %d (%s).\n", vas, vaErrorStr(vas)); |
235 |
|
✗ |
return AVERROR(EIO); |
236 |
|
|
} |
237 |
|
|
|
238 |
|
✗ |
if (ctx->build_filter_params) { |
239 |
|
✗ |
err = ctx->build_filter_params(avctx); |
240 |
|
✗ |
if (err < 0) |
241 |
|
✗ |
goto fail; |
242 |
|
|
} |
243 |
|
|
|
244 |
|
✗ |
av_freep(&hwconfig); |
245 |
|
✗ |
av_hwframe_constraints_free(&constraints); |
246 |
|
✗ |
return 0; |
247 |
|
|
|
248 |
|
✗ |
fail: |
249 |
|
✗ |
av_buffer_unref(&outl->hw_frames_ctx); |
250 |
|
✗ |
av_freep(&hwconfig); |
251 |
|
✗ |
av_hwframe_constraints_free(&constraints); |
252 |
|
✗ |
return err; |
253 |
|
|
} |
254 |
|
|
|
255 |
|
|
typedef struct VAAPIColourProperties { |
256 |
|
|
VAProcColorStandardType va_color_standard; |
257 |
|
|
|
258 |
|
|
enum AVColorPrimaries color_primaries; |
259 |
|
|
enum AVColorTransferCharacteristic color_trc; |
260 |
|
|
enum AVColorSpace colorspace; |
261 |
|
|
|
262 |
|
|
uint8_t va_chroma_sample_location; |
263 |
|
|
uint8_t va_color_range; |
264 |
|
|
|
265 |
|
|
enum AVColorRange color_range; |
266 |
|
|
enum AVChromaLocation chroma_sample_location; |
267 |
|
|
} VAAPIColourProperties; |
268 |
|
|
|
269 |
|
|
static const VAAPIColourProperties vaapi_colour_standard_map[] = { |
270 |
|
|
{ VAProcColorStandardBT601, 5, 6, 5 }, |
271 |
|
|
{ VAProcColorStandardBT601, 6, 6, 6 }, |
272 |
|
|
{ VAProcColorStandardBT709, 1, 1, 1 }, |
273 |
|
|
{ VAProcColorStandardBT470M, 4, 4, 4 }, |
274 |
|
|
{ VAProcColorStandardBT470BG, 5, 5, 5 }, |
275 |
|
|
{ VAProcColorStandardSMPTE170M, 6, 6, 6 }, |
276 |
|
|
{ VAProcColorStandardSMPTE240M, 7, 7, 7 }, |
277 |
|
|
{ VAProcColorStandardGenericFilm, 8, 1, 1 }, |
278 |
|
|
#if VA_CHECK_VERSION(1, 1, 0) |
279 |
|
|
{ VAProcColorStandardSRGB, 1, 13, 0 }, |
280 |
|
|
{ VAProcColorStandardXVYCC601, 1, 11, 5 }, |
281 |
|
|
{ VAProcColorStandardXVYCC709, 1, 11, 1 }, |
282 |
|
|
{ VAProcColorStandardBT2020, 9, 14, 9 }, |
283 |
|
|
#endif |
284 |
|
|
}; |
285 |
|
|
|
286 |
|
✗ |
static void vaapi_vpp_fill_colour_standard(VAAPIColourProperties *props, |
287 |
|
|
VAProcColorStandardType *vacs, |
288 |
|
|
int nb_vacs) |
289 |
|
|
{ |
290 |
|
|
const VAAPIColourProperties *t; |
291 |
|
|
int i, j, score, best_score, worst_score; |
292 |
|
|
VAProcColorStandardType best_standard; |
293 |
|
|
|
294 |
|
|
#if VA_CHECK_VERSION(1, 3, 0) |
295 |
|
|
// If the driver supports explicit use of the standard values then just |
296 |
|
|
// use them and avoid doing any mapping. (The driver may not support |
297 |
|
|
// some particular code point, but it still has enough information to |
298 |
|
|
// make a better fallback choice than we do in that case.) |
299 |
|
✗ |
for (i = 0; i < nb_vacs; i++) { |
300 |
|
✗ |
if (vacs[i] == VAProcColorStandardExplicit) { |
301 |
|
✗ |
props->va_color_standard = VAProcColorStandardExplicit; |
302 |
|
✗ |
return; |
303 |
|
|
} |
304 |
|
|
} |
305 |
|
|
#endif |
306 |
|
|
|
307 |
|
|
// Give scores to the possible options and choose the lowest one. |
308 |
|
|
// An exact match will score zero and therefore always be chosen, as |
309 |
|
|
// will a partial match where all unmatched elements are explicitly |
310 |
|
|
// unspecified. If no options match at all then just pass "none" to |
311 |
|
|
// the driver and let it make its own choice. |
312 |
|
✗ |
best_standard = VAProcColorStandardNone; |
313 |
|
✗ |
best_score = -1; |
314 |
|
✗ |
worst_score = 4 * (props->colorspace != AVCOL_SPC_UNSPECIFIED && |
315 |
|
✗ |
props->colorspace != AVCOL_SPC_RGB) + |
316 |
|
✗ |
2 * (props->color_trc != AVCOL_TRC_UNSPECIFIED) + |
317 |
|
✗ |
(props->color_primaries != AVCOL_PRI_UNSPECIFIED); |
318 |
|
|
|
319 |
|
✗ |
if (worst_score == 0) { |
320 |
|
|
// No properties are specified, so we aren't going to be able to |
321 |
|
|
// make a useful choice. |
322 |
|
✗ |
props->va_color_standard = VAProcColorStandardNone; |
323 |
|
✗ |
return; |
324 |
|
|
} |
325 |
|
|
|
326 |
|
✗ |
for (i = 0; i < nb_vacs; i++) { |
327 |
|
✗ |
for (j = 0; j < FF_ARRAY_ELEMS(vaapi_colour_standard_map); j++) { |
328 |
|
✗ |
t = &vaapi_colour_standard_map[j]; |
329 |
|
✗ |
if (t->va_color_standard != vacs[i]) |
330 |
|
✗ |
continue; |
331 |
|
|
|
332 |
|
✗ |
score = 0; |
333 |
|
✗ |
if (props->colorspace != AVCOL_SPC_UNSPECIFIED && |
334 |
|
✗ |
props->colorspace != AVCOL_SPC_RGB) |
335 |
|
✗ |
score += 4 * (props->colorspace != t->colorspace); |
336 |
|
✗ |
if (props->color_trc != AVCOL_TRC_UNSPECIFIED) |
337 |
|
✗ |
score += 2 * (props->color_trc != t->color_trc); |
338 |
|
✗ |
if (props->color_primaries != AVCOL_PRI_UNSPECIFIED) |
339 |
|
✗ |
score += (props->color_primaries != t->color_primaries); |
340 |
|
|
|
341 |
|
|
// Only include choices which matched something. |
342 |
|
✗ |
if (score < worst_score && |
343 |
|
✗ |
(best_score == -1 || score < best_score)) { |
344 |
|
✗ |
best_score = score; |
345 |
|
✗ |
best_standard = t->va_color_standard; |
346 |
|
|
} |
347 |
|
|
} |
348 |
|
|
} |
349 |
|
✗ |
props->va_color_standard = best_standard; |
350 |
|
|
} |
351 |
|
|
|
352 |
|
✗ |
static void vaapi_vpp_fill_chroma_sample_location(VAAPIColourProperties *props) |
353 |
|
|
{ |
354 |
|
|
#if VA_CHECK_VERSION(1, 1, 0) |
355 |
|
|
static const struct { |
356 |
|
|
enum AVChromaLocation av; |
357 |
|
|
uint8_t va; |
358 |
|
|
} csl_map[] = { |
359 |
|
|
{ AVCHROMA_LOC_UNSPECIFIED, VA_CHROMA_SITING_UNKNOWN }, |
360 |
|
|
{ AVCHROMA_LOC_LEFT, VA_CHROMA_SITING_VERTICAL_CENTER | |
361 |
|
|
VA_CHROMA_SITING_HORIZONTAL_LEFT }, |
362 |
|
|
{ AVCHROMA_LOC_CENTER, VA_CHROMA_SITING_VERTICAL_CENTER | |
363 |
|
|
VA_CHROMA_SITING_HORIZONTAL_CENTER }, |
364 |
|
|
{ AVCHROMA_LOC_TOPLEFT, VA_CHROMA_SITING_VERTICAL_TOP | |
365 |
|
|
VA_CHROMA_SITING_HORIZONTAL_LEFT }, |
366 |
|
|
{ AVCHROMA_LOC_TOP, VA_CHROMA_SITING_VERTICAL_TOP | |
367 |
|
|
VA_CHROMA_SITING_HORIZONTAL_CENTER }, |
368 |
|
|
{ AVCHROMA_LOC_BOTTOMLEFT, VA_CHROMA_SITING_VERTICAL_BOTTOM | |
369 |
|
|
VA_CHROMA_SITING_HORIZONTAL_LEFT }, |
370 |
|
|
{ AVCHROMA_LOC_BOTTOM, VA_CHROMA_SITING_VERTICAL_BOTTOM | |
371 |
|
|
VA_CHROMA_SITING_HORIZONTAL_CENTER }, |
372 |
|
|
}; |
373 |
|
|
int i; |
374 |
|
|
|
375 |
|
✗ |
for (i = 0; i < FF_ARRAY_ELEMS(csl_map); i++) { |
376 |
|
✗ |
if (props->chroma_sample_location == csl_map[i].av) { |
377 |
|
✗ |
props->va_chroma_sample_location = csl_map[i].va; |
378 |
|
✗ |
return; |
379 |
|
|
} |
380 |
|
|
} |
381 |
|
✗ |
props->va_chroma_sample_location = VA_CHROMA_SITING_UNKNOWN; |
382 |
|
|
#else |
383 |
|
|
props->va_chroma_sample_location = 0; |
384 |
|
|
#endif |
385 |
|
|
} |
386 |
|
|
|
387 |
|
✗ |
static void vaapi_vpp_fill_colour_range(VAAPIColourProperties *props) |
388 |
|
|
{ |
389 |
|
|
#if VA_CHECK_VERSION(1, 1, 0) |
390 |
|
✗ |
switch (props->color_range) { |
391 |
|
✗ |
case AVCOL_RANGE_MPEG: |
392 |
|
✗ |
props->va_color_range = VA_SOURCE_RANGE_REDUCED; |
393 |
|
✗ |
break; |
394 |
|
✗ |
case AVCOL_RANGE_JPEG: |
395 |
|
✗ |
props->va_color_range = VA_SOURCE_RANGE_FULL; |
396 |
|
✗ |
break; |
397 |
|
✗ |
case AVCOL_RANGE_UNSPECIFIED: |
398 |
|
|
default: |
399 |
|
✗ |
props->va_color_range = VA_SOURCE_RANGE_UNKNOWN; |
400 |
|
|
} |
401 |
|
|
#else |
402 |
|
|
props->va_color_range = 0; |
403 |
|
|
#endif |
404 |
|
✗ |
} |
405 |
|
|
|
406 |
|
✗ |
static void vaapi_vpp_fill_colour_properties(AVFilterContext *avctx, |
407 |
|
|
VAAPIColourProperties *props, |
408 |
|
|
VAProcColorStandardType *vacs, |
409 |
|
|
int nb_vacs) |
410 |
|
|
{ |
411 |
|
✗ |
vaapi_vpp_fill_colour_standard(props, vacs, nb_vacs); |
412 |
|
✗ |
vaapi_vpp_fill_chroma_sample_location(props); |
413 |
|
✗ |
vaapi_vpp_fill_colour_range(props); |
414 |
|
|
|
415 |
|
✗ |
av_log(avctx, AV_LOG_DEBUG, "Mapped colour properties %s %s/%s/%s %s " |
416 |
|
|
"to VA standard %d chroma siting %#x range %#x.\n", |
417 |
|
|
av_color_range_name(props->color_range), |
418 |
|
|
av_color_space_name(props->colorspace), |
419 |
|
|
av_color_primaries_name(props->color_primaries), |
420 |
|
|
av_color_transfer_name(props->color_trc), |
421 |
|
|
av_chroma_location_name(props->chroma_sample_location), |
422 |
|
✗ |
props->va_color_standard, |
423 |
|
✗ |
props->va_chroma_sample_location, props->va_color_range); |
424 |
|
✗ |
} |
425 |
|
|
|
426 |
|
✗ |
static int vaapi_vpp_frame_is_rgb(const AVFrame *frame) |
427 |
|
|
{ |
428 |
|
|
const AVHWFramesContext *hwfc; |
429 |
|
|
const AVPixFmtDescriptor *desc; |
430 |
|
✗ |
av_assert0(frame->format == AV_PIX_FMT_VAAPI && |
431 |
|
|
frame->hw_frames_ctx); |
432 |
|
✗ |
hwfc = (const AVHWFramesContext*)frame->hw_frames_ctx->data; |
433 |
|
✗ |
desc = av_pix_fmt_desc_get(hwfc->sw_format); |
434 |
|
✗ |
av_assert0(desc); |
435 |
|
✗ |
return !!(desc->flags & AV_PIX_FMT_FLAG_RGB); |
436 |
|
|
} |
437 |
|
|
|
438 |
|
✗ |
static int vaapi_vpp_colour_properties(AVFilterContext *avctx, |
439 |
|
|
VAProcPipelineParameterBuffer *params, |
440 |
|
|
const AVFrame *input_frame, |
441 |
|
|
AVFrame *output_frame) |
442 |
|
|
{ |
443 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
444 |
|
|
VAAPIColourProperties input_props, output_props; |
445 |
|
|
VAProcPipelineCaps caps; |
446 |
|
|
VAStatus vas; |
447 |
|
|
|
448 |
|
✗ |
vas = vaQueryVideoProcPipelineCaps(ctx->hwctx->display, ctx->va_context, |
449 |
|
✗ |
ctx->filter_buffers, ctx->nb_filter_buffers, |
450 |
|
|
&caps); |
451 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
452 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to query capabilities for " |
453 |
|
|
"colour standard support: %d (%s).\n", vas, vaErrorStr(vas)); |
454 |
|
✗ |
return AVERROR_EXTERNAL; |
455 |
|
|
} |
456 |
|
|
|
457 |
|
✗ |
input_props = (VAAPIColourProperties) { |
458 |
|
✗ |
.colorspace = vaapi_vpp_frame_is_rgb(input_frame) |
459 |
|
✗ |
? AVCOL_SPC_RGB : input_frame->colorspace, |
460 |
|
✗ |
.color_primaries = input_frame->color_primaries, |
461 |
|
✗ |
.color_trc = input_frame->color_trc, |
462 |
|
✗ |
.color_range = input_frame->color_range, |
463 |
|
✗ |
.chroma_sample_location = input_frame->chroma_location, |
464 |
|
|
}; |
465 |
|
|
|
466 |
|
✗ |
vaapi_vpp_fill_colour_properties(avctx, &input_props, |
467 |
|
|
caps.input_color_standards, |
468 |
|
✗ |
caps.num_input_color_standards); |
469 |
|
|
|
470 |
|
✗ |
output_props = (VAAPIColourProperties) { |
471 |
|
✗ |
.colorspace = vaapi_vpp_frame_is_rgb(output_frame) |
472 |
|
✗ |
? AVCOL_SPC_RGB : output_frame->colorspace, |
473 |
|
✗ |
.color_primaries = output_frame->color_primaries, |
474 |
|
✗ |
.color_trc = output_frame->color_trc, |
475 |
|
✗ |
.color_range = output_frame->color_range, |
476 |
|
✗ |
.chroma_sample_location = output_frame->chroma_location, |
477 |
|
|
}; |
478 |
|
✗ |
vaapi_vpp_fill_colour_properties(avctx, &output_props, |
479 |
|
|
caps.output_color_standards, |
480 |
|
✗ |
caps.num_output_color_standards); |
481 |
|
|
|
482 |
|
|
// If the properties weren't filled completely in the output frame and |
483 |
|
|
// we chose a fixed standard then fill the known values in here. |
484 |
|
|
#if VA_CHECK_VERSION(1, 3, 0) |
485 |
|
✗ |
if (output_props.va_color_standard != VAProcColorStandardExplicit) |
486 |
|
|
#endif |
487 |
|
|
{ |
488 |
|
✗ |
const VAAPIColourProperties *output_standard = NULL; |
489 |
|
|
int i; |
490 |
|
|
|
491 |
|
✗ |
for (i = 0; i < FF_ARRAY_ELEMS(vaapi_colour_standard_map); i++) { |
492 |
|
✗ |
if (output_props.va_color_standard == |
493 |
|
✗ |
vaapi_colour_standard_map[i].va_color_standard) { |
494 |
|
✗ |
output_standard = &vaapi_colour_standard_map[i]; |
495 |
|
✗ |
break; |
496 |
|
|
} |
497 |
|
|
} |
498 |
|
✗ |
if (output_standard) { |
499 |
|
✗ |
output_frame->colorspace = vaapi_vpp_frame_is_rgb(output_frame) |
500 |
|
✗ |
? AVCOL_SPC_RGB : output_standard->colorspace; |
501 |
|
✗ |
output_frame->color_primaries = output_standard->color_primaries; |
502 |
|
✗ |
output_frame->color_trc = output_standard->color_trc; |
503 |
|
|
} |
504 |
|
|
} |
505 |
|
|
|
506 |
|
✗ |
params->surface_color_standard = input_props.va_color_standard; |
507 |
|
✗ |
params->output_color_standard = output_props.va_color_standard; |
508 |
|
|
|
509 |
|
|
#if VA_CHECK_VERSION(1, 1, 0) |
510 |
|
✗ |
params->input_color_properties = (VAProcColorProperties) { |
511 |
|
✗ |
.chroma_sample_location = input_props.va_chroma_sample_location, |
512 |
|
✗ |
.color_range = input_props.va_color_range, |
513 |
|
|
#if VA_CHECK_VERSION(1, 3, 0) |
514 |
|
✗ |
.colour_primaries = input_props.color_primaries, |
515 |
|
✗ |
.transfer_characteristics = input_props.color_trc, |
516 |
|
✗ |
.matrix_coefficients = input_props.colorspace, |
517 |
|
|
#endif |
518 |
|
|
}; |
519 |
|
✗ |
params->output_color_properties = (VAProcColorProperties) { |
520 |
|
✗ |
.chroma_sample_location = output_props.va_chroma_sample_location, |
521 |
|
✗ |
.color_range = output_props.va_color_range, |
522 |
|
|
#if VA_CHECK_VERSION(1, 3, 0) |
523 |
|
✗ |
.colour_primaries = output_props.color_primaries, |
524 |
|
✗ |
.transfer_characteristics = output_props.color_trc, |
525 |
|
✗ |
.matrix_coefficients = output_props.colorspace, |
526 |
|
|
#endif |
527 |
|
|
}; |
528 |
|
|
#endif |
529 |
|
|
|
530 |
|
✗ |
return 0; |
531 |
|
|
} |
532 |
|
|
|
533 |
|
✗ |
int ff_vaapi_vpp_init_params(AVFilterContext *avctx, |
534 |
|
|
VAProcPipelineParameterBuffer *params, |
535 |
|
|
const AVFrame *input_frame, |
536 |
|
|
AVFrame *output_frame) |
537 |
|
|
{ |
538 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
539 |
|
|
int err; |
540 |
|
|
|
541 |
|
✗ |
ctx->input_region = (VARectangle) { |
542 |
|
✗ |
.x = input_frame->crop_left, |
543 |
|
✗ |
.y = input_frame->crop_top, |
544 |
|
✗ |
.width = input_frame->width - |
545 |
|
✗ |
(input_frame->crop_left + input_frame->crop_right), |
546 |
|
✗ |
.height = input_frame->height - |
547 |
|
✗ |
(input_frame->crop_top + input_frame->crop_bottom), |
548 |
|
|
}; |
549 |
|
✗ |
output_frame->crop_top = 0; |
550 |
|
✗ |
output_frame->crop_bottom = 0; |
551 |
|
✗ |
output_frame->crop_left = 0; |
552 |
|
✗ |
output_frame->crop_right = 0; |
553 |
|
|
|
554 |
|
✗ |
*params = (VAProcPipelineParameterBuffer) { |
555 |
|
✗ |
.surface = ff_vaapi_vpp_get_surface_id(input_frame), |
556 |
|
✗ |
.surface_region = &ctx->input_region, |
557 |
|
|
.output_region = NULL, |
558 |
|
|
.output_background_color = VAAPI_VPP_BACKGROUND_BLACK, |
559 |
|
|
.pipeline_flags = 0, |
560 |
|
|
.filter_flags = VA_FRAME_PICTURE, |
561 |
|
|
|
562 |
|
|
// Filter and reference data filled by the filter itself. |
563 |
|
|
|
564 |
|
|
#if VA_CHECK_VERSION(1, 1, 0) |
565 |
|
|
.rotation_state = VA_ROTATION_NONE, |
566 |
|
|
.mirror_state = VA_MIRROR_NONE, |
567 |
|
|
#endif |
568 |
|
|
}; |
569 |
|
|
|
570 |
|
✗ |
err = vaapi_vpp_colour_properties(avctx, params, |
571 |
|
|
input_frame, output_frame); |
572 |
|
✗ |
if (err < 0) |
573 |
|
✗ |
return err; |
574 |
|
|
|
575 |
|
✗ |
av_log(avctx, AV_LOG_DEBUG, "Filter frame from surface %#x to %#x.\n", |
576 |
|
|
ff_vaapi_vpp_get_surface_id(input_frame), |
577 |
|
|
ff_vaapi_vpp_get_surface_id(output_frame)); |
578 |
|
|
|
579 |
|
✗ |
return 0; |
580 |
|
|
} |
581 |
|
|
|
582 |
|
✗ |
int ff_vaapi_vpp_make_param_buffers(AVFilterContext *avctx, |
583 |
|
|
int type, |
584 |
|
|
const void *data, |
585 |
|
|
size_t size, |
586 |
|
|
int count) |
587 |
|
|
{ |
588 |
|
|
VAStatus vas; |
589 |
|
|
VABufferID buffer; |
590 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
591 |
|
|
|
592 |
|
✗ |
av_assert0(ctx->nb_filter_buffers + 1 <= VAProcFilterCount); |
593 |
|
|
|
594 |
|
✗ |
vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context, |
595 |
|
|
type, size, count, (void*)data, &buffer); |
596 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
597 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to create parameter " |
598 |
|
|
"buffer (type %d): %d (%s).\n", |
599 |
|
|
type, vas, vaErrorStr(vas)); |
600 |
|
✗ |
return AVERROR(EIO); |
601 |
|
|
} |
602 |
|
|
|
603 |
|
✗ |
ctx->filter_buffers[ctx->nb_filter_buffers++] = buffer; |
604 |
|
|
|
605 |
|
✗ |
av_log(avctx, AV_LOG_DEBUG, "Param buffer (type %d, %zu bytes, count %d) " |
606 |
|
|
"is %#x.\n", type, size, count, buffer); |
607 |
|
✗ |
return 0; |
608 |
|
|
} |
609 |
|
|
|
610 |
|
✗ |
static int vaapi_vpp_render_single_pipeline_buffer(AVFilterContext *avctx, |
611 |
|
|
VAProcPipelineParameterBuffer *params, |
612 |
|
|
VABufferID *params_id) |
613 |
|
|
{ |
614 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
615 |
|
|
VAStatus vas; |
616 |
|
|
|
617 |
|
✗ |
vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context, |
618 |
|
|
VAProcPipelineParameterBufferType, |
619 |
|
|
sizeof(*params), 1, params, params_id); |
620 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
621 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: " |
622 |
|
|
"%d (%s).\n", vas, vaErrorStr(vas)); |
623 |
|
✗ |
*params_id = VA_INVALID_ID; |
624 |
|
|
|
625 |
|
✗ |
return AVERROR(EIO); |
626 |
|
|
} |
627 |
|
✗ |
av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n", *params_id); |
628 |
|
|
|
629 |
|
✗ |
vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context, params_id, 1); |
630 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
631 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: " |
632 |
|
|
"%d (%s).\n", vas, vaErrorStr(vas)); |
633 |
|
✗ |
return AVERROR(EIO); |
634 |
|
|
} |
635 |
|
|
|
636 |
|
✗ |
return 0; |
637 |
|
|
} |
638 |
|
|
|
639 |
|
✗ |
int ff_vaapi_vpp_render_pictures(AVFilterContext *avctx, |
640 |
|
|
VAProcPipelineParameterBuffer *params_list, |
641 |
|
|
int cout, |
642 |
|
|
AVFrame *output_frame) |
643 |
|
|
{ |
644 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
645 |
|
|
VABufferID *params_ids; |
646 |
|
|
VAStatus vas; |
647 |
|
|
int err; |
648 |
|
|
|
649 |
|
✗ |
params_ids = (VABufferID *)av_malloc_array(cout, sizeof(VABufferID)); |
650 |
|
✗ |
if (!params_ids) |
651 |
|
✗ |
return AVERROR(ENOMEM); |
652 |
|
|
|
653 |
|
✗ |
for (int i = 0; i < cout; i++) |
654 |
|
✗ |
params_ids[i] = VA_INVALID_ID; |
655 |
|
|
|
656 |
|
✗ |
vas = vaBeginPicture(ctx->hwctx->display, |
657 |
|
|
ctx->va_context, ff_vaapi_vpp_get_surface_id(output_frame)); |
658 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
659 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: " |
660 |
|
|
"%d (%s).\n", vas, vaErrorStr(vas)); |
661 |
|
✗ |
err = AVERROR(EIO); |
662 |
|
✗ |
goto fail; |
663 |
|
|
} |
664 |
|
|
|
665 |
|
✗ |
for (int i = 0; i < cout; i++) { |
666 |
|
✗ |
err = vaapi_vpp_render_single_pipeline_buffer(avctx, ¶ms_list[i], ¶ms_ids[i]); |
667 |
|
✗ |
if (err) |
668 |
|
✗ |
goto fail_after_begin; |
669 |
|
|
} |
670 |
|
|
|
671 |
|
✗ |
vas = vaEndPicture(ctx->hwctx->display, ctx->va_context); |
672 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
673 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: " |
674 |
|
|
"%d (%s).\n", vas, vaErrorStr(vas)); |
675 |
|
✗ |
err = AVERROR(EIO); |
676 |
|
✗ |
goto fail_after_render; |
677 |
|
|
} |
678 |
|
|
|
679 |
|
|
if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks & |
680 |
|
|
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) { |
681 |
|
✗ |
for (int i = 0; i < cout && params_ids[i] != VA_INVALID_ID; i++) { |
682 |
|
✗ |
vas = vaDestroyBuffer(ctx->hwctx->display, params_ids[i]); |
683 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
684 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: " |
685 |
|
|
"%d (%s).\n", vas, vaErrorStr(vas)); |
686 |
|
|
// And ignore. |
687 |
|
|
} |
688 |
|
|
} |
689 |
|
|
} |
690 |
|
|
|
691 |
|
✗ |
av_freep(¶ms_ids); |
692 |
|
✗ |
return 0; |
693 |
|
|
|
694 |
|
|
// We want to make sure that if vaBeginPicture has been called, we also |
695 |
|
|
// call vaRenderPicture and vaEndPicture. These calls may well fail or |
696 |
|
|
// do something else nasty, but once we're in this failure case there |
697 |
|
|
// isn't much else we can do. |
698 |
|
✗ |
fail_after_begin: |
699 |
|
✗ |
vaRenderPicture(ctx->hwctx->display, ctx->va_context, ¶ms_ids[0], 1); |
700 |
|
✗ |
fail_after_render: |
701 |
|
✗ |
vaEndPicture(ctx->hwctx->display, ctx->va_context); |
702 |
|
✗ |
fail: |
703 |
|
✗ |
av_freep(¶ms_ids); |
704 |
|
✗ |
return err; |
705 |
|
|
} |
706 |
|
|
|
707 |
|
✗ |
int ff_vaapi_vpp_render_picture(AVFilterContext *avctx, |
708 |
|
|
VAProcPipelineParameterBuffer *params, |
709 |
|
|
AVFrame *output_frame) |
710 |
|
|
{ |
711 |
|
✗ |
return ff_vaapi_vpp_render_pictures(avctx, params, 1, output_frame); |
712 |
|
|
} |
713 |
|
|
|
714 |
|
✗ |
void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx) |
715 |
|
|
{ |
716 |
|
|
int i; |
717 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
718 |
|
|
|
719 |
|
✗ |
ctx->va_config = VA_INVALID_ID; |
720 |
|
✗ |
ctx->va_context = VA_INVALID_ID; |
721 |
|
✗ |
ctx->valid_ids = 1; |
722 |
|
|
|
723 |
|
✗ |
for (i = 0; i < VAProcFilterCount; i++) |
724 |
|
✗ |
ctx->filter_buffers[i] = VA_INVALID_ID; |
725 |
|
✗ |
ctx->nb_filter_buffers = 0; |
726 |
|
✗ |
} |
727 |
|
|
|
728 |
|
✗ |
void ff_vaapi_vpp_ctx_uninit(AVFilterContext *avctx) |
729 |
|
|
{ |
730 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
731 |
|
✗ |
if (ctx->valid_ids && ctx->pipeline_uninit) |
732 |
|
✗ |
ctx->pipeline_uninit(avctx); |
733 |
|
|
|
734 |
|
✗ |
av_buffer_unref(&ctx->input_frames_ref); |
735 |
|
✗ |
av_buffer_unref(&ctx->device_ref); |
736 |
|
✗ |
} |
737 |
|
|
|