Line |
Branch |
Exec |
Source |
1 |
|
|
/* |
2 |
|
|
* This file is part of FFmpeg. |
3 |
|
|
* |
4 |
|
|
* FFmpeg is free software; you can redistribute it and/or |
5 |
|
|
* modify it under the terms of the GNU Lesser General Public |
6 |
|
|
* License as published by the Free Software Foundation; either |
7 |
|
|
* version 2.1 of the License, or (at your option) any later version. |
8 |
|
|
* |
9 |
|
|
* FFmpeg is distributed in the hope that it will be useful, |
10 |
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
11 |
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
12 |
|
|
* Lesser General Public License for more details. |
13 |
|
|
* |
14 |
|
|
* You should have received a copy of the GNU Lesser General Public |
15 |
|
|
* License along with FFmpeg; if not, write to the Free Software |
16 |
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
17 |
|
|
*/ |
18 |
|
|
|
19 |
|
|
#include <string.h> |
20 |
|
|
|
21 |
|
|
#include "libavutil/avassert.h" |
22 |
|
|
#include "libavutil/pixdesc.h" |
23 |
|
|
#include "formats.h" |
24 |
|
|
#include "internal.h" |
25 |
|
|
#include "vaapi_vpp.h" |
26 |
|
|
|
27 |
|
✗ |
int ff_vaapi_vpp_query_formats(AVFilterContext *avctx) |
28 |
|
|
{ |
29 |
|
✗ |
enum AVPixelFormat pix_fmts[] = { |
30 |
|
|
AV_PIX_FMT_VAAPI, AV_PIX_FMT_NONE, |
31 |
|
|
}; |
32 |
|
|
int err; |
33 |
|
|
|
34 |
|
✗ |
if ((err = ff_formats_ref(ff_make_format_list(pix_fmts), |
35 |
|
✗ |
&avctx->inputs[0]->outcfg.formats)) < 0) |
36 |
|
✗ |
return err; |
37 |
|
✗ |
if ((err = ff_formats_ref(ff_make_format_list(pix_fmts), |
38 |
|
✗ |
&avctx->outputs[0]->incfg.formats)) < 0) |
39 |
|
✗ |
return err; |
40 |
|
|
|
41 |
|
✗ |
return 0; |
42 |
|
|
} |
43 |
|
|
|
44 |
|
✗ |
void ff_vaapi_vpp_pipeline_uninit(AVFilterContext *avctx) |
45 |
|
|
{ |
46 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
47 |
|
|
int i; |
48 |
|
✗ |
for (i = 0; i < ctx->nb_filter_buffers; i++) { |
49 |
|
✗ |
if (ctx->filter_buffers[i] != VA_INVALID_ID) { |
50 |
|
✗ |
vaDestroyBuffer(ctx->hwctx->display, ctx->filter_buffers[i]); |
51 |
|
✗ |
ctx->filter_buffers[i] = VA_INVALID_ID; |
52 |
|
|
} |
53 |
|
|
} |
54 |
|
✗ |
ctx->nb_filter_buffers = 0; |
55 |
|
|
|
56 |
|
✗ |
if (ctx->va_context != VA_INVALID_ID) { |
57 |
|
✗ |
vaDestroyContext(ctx->hwctx->display, ctx->va_context); |
58 |
|
✗ |
ctx->va_context = VA_INVALID_ID; |
59 |
|
|
} |
60 |
|
|
|
61 |
|
✗ |
if (ctx->va_config != VA_INVALID_ID) { |
62 |
|
✗ |
vaDestroyConfig(ctx->hwctx->display, ctx->va_config); |
63 |
|
✗ |
ctx->va_config = VA_INVALID_ID; |
64 |
|
|
} |
65 |
|
|
|
66 |
|
✗ |
av_buffer_unref(&ctx->device_ref); |
67 |
|
✗ |
ctx->hwctx = NULL; |
68 |
|
✗ |
} |
69 |
|
|
|
70 |
|
✗ |
int ff_vaapi_vpp_config_input(AVFilterLink *inlink) |
71 |
|
|
{ |
72 |
|
✗ |
AVFilterContext *avctx = inlink->dst; |
73 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
74 |
|
|
|
75 |
|
✗ |
if (ctx->pipeline_uninit) |
76 |
|
✗ |
ctx->pipeline_uninit(avctx); |
77 |
|
|
|
78 |
|
✗ |
if (!inlink->hw_frames_ctx) { |
79 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "A hardware frames reference is " |
80 |
|
|
"required to associate the processing device.\n"); |
81 |
|
✗ |
return AVERROR(EINVAL); |
82 |
|
|
} |
83 |
|
|
|
84 |
|
✗ |
ctx->input_frames_ref = av_buffer_ref(inlink->hw_frames_ctx); |
85 |
|
✗ |
if (!ctx->input_frames_ref) { |
86 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "A input frames reference create " |
87 |
|
|
"failed.\n"); |
88 |
|
✗ |
return AVERROR(ENOMEM); |
89 |
|
|
} |
90 |
|
✗ |
ctx->input_frames = (AVHWFramesContext*)ctx->input_frames_ref->data; |
91 |
|
|
|
92 |
|
✗ |
return 0; |
93 |
|
|
} |
94 |
|
|
|
95 |
|
✗ |
int ff_vaapi_vpp_config_output(AVFilterLink *outlink) |
96 |
|
|
{ |
97 |
|
✗ |
AVFilterContext *avctx = outlink->src; |
98 |
|
✗ |
AVFilterLink *inlink = avctx->inputs[0]; |
99 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
100 |
|
✗ |
AVVAAPIHWConfig *hwconfig = NULL; |
101 |
|
✗ |
AVHWFramesConstraints *constraints = NULL; |
102 |
|
|
AVHWFramesContext *output_frames; |
103 |
|
|
AVVAAPIFramesContext *va_frames; |
104 |
|
|
VAStatus vas; |
105 |
|
|
int err, i; |
106 |
|
|
|
107 |
|
✗ |
if (ctx->pipeline_uninit) |
108 |
|
✗ |
ctx->pipeline_uninit(avctx); |
109 |
|
|
|
110 |
|
✗ |
if (!ctx->output_width) |
111 |
|
✗ |
ctx->output_width = avctx->inputs[0]->w; |
112 |
|
✗ |
if (!ctx->output_height) |
113 |
|
✗ |
ctx->output_height = avctx->inputs[0]->h; |
114 |
|
|
|
115 |
|
✗ |
outlink->w = ctx->output_width; |
116 |
|
✗ |
outlink->h = ctx->output_height; |
117 |
|
|
|
118 |
|
✗ |
if (ctx->passthrough) { |
119 |
|
✗ |
if (inlink->hw_frames_ctx) |
120 |
|
✗ |
outlink->hw_frames_ctx = av_buffer_ref(inlink->hw_frames_ctx); |
121 |
|
✗ |
av_log(ctx, AV_LOG_VERBOSE, "Using VAAPI filter passthrough mode.\n"); |
122 |
|
|
|
123 |
|
✗ |
return 0; |
124 |
|
|
} |
125 |
|
|
|
126 |
|
✗ |
av_assert0(ctx->input_frames); |
127 |
|
✗ |
ctx->device_ref = av_buffer_ref(ctx->input_frames->device_ref); |
128 |
|
✗ |
if (!ctx->device_ref) { |
129 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "A device reference create " |
130 |
|
|
"failed.\n"); |
131 |
|
✗ |
return AVERROR(ENOMEM); |
132 |
|
|
} |
133 |
|
✗ |
ctx->hwctx = ((AVHWDeviceContext*)ctx->device_ref->data)->hwctx; |
134 |
|
|
|
135 |
|
✗ |
av_assert0(ctx->va_config == VA_INVALID_ID); |
136 |
|
✗ |
vas = vaCreateConfig(ctx->hwctx->display, VAProfileNone, |
137 |
|
|
VAEntrypointVideoProc, NULL, 0, &ctx->va_config); |
138 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
139 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline " |
140 |
|
|
"config: %d (%s).\n", vas, vaErrorStr(vas)); |
141 |
|
✗ |
err = AVERROR(EIO); |
142 |
|
✗ |
goto fail; |
143 |
|
|
} |
144 |
|
|
|
145 |
|
✗ |
hwconfig = av_hwdevice_hwconfig_alloc(ctx->device_ref); |
146 |
|
✗ |
if (!hwconfig) { |
147 |
|
✗ |
err = AVERROR(ENOMEM); |
148 |
|
✗ |
goto fail; |
149 |
|
|
} |
150 |
|
✗ |
hwconfig->config_id = ctx->va_config; |
151 |
|
|
|
152 |
|
✗ |
constraints = av_hwdevice_get_hwframe_constraints(ctx->device_ref, |
153 |
|
|
hwconfig); |
154 |
|
✗ |
if (!constraints) { |
155 |
|
✗ |
err = AVERROR(ENOMEM); |
156 |
|
✗ |
goto fail; |
157 |
|
|
} |
158 |
|
|
|
159 |
|
✗ |
if (ctx->output_format == AV_PIX_FMT_NONE) |
160 |
|
✗ |
ctx->output_format = ctx->input_frames->sw_format; |
161 |
|
✗ |
if (constraints->valid_sw_formats) { |
162 |
|
✗ |
for (i = 0; constraints->valid_sw_formats[i] != AV_PIX_FMT_NONE; i++) { |
163 |
|
✗ |
if (ctx->output_format == constraints->valid_sw_formats[i]) |
164 |
|
✗ |
break; |
165 |
|
|
} |
166 |
|
✗ |
if (constraints->valid_sw_formats[i] == AV_PIX_FMT_NONE) { |
167 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Hardware does not support output " |
168 |
|
|
"format %s.\n", av_get_pix_fmt_name(ctx->output_format)); |
169 |
|
✗ |
err = AVERROR(EINVAL); |
170 |
|
✗ |
goto fail; |
171 |
|
|
} |
172 |
|
|
} |
173 |
|
|
|
174 |
|
✗ |
if (ctx->output_width < constraints->min_width || |
175 |
|
✗ |
ctx->output_height < constraints->min_height || |
176 |
|
✗ |
ctx->output_width > constraints->max_width || |
177 |
|
✗ |
ctx->output_height > constraints->max_height) { |
178 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Hardware does not support scaling to " |
179 |
|
|
"size %dx%d (constraints: width %d-%d height %d-%d).\n", |
180 |
|
|
ctx->output_width, ctx->output_height, |
181 |
|
✗ |
constraints->min_width, constraints->max_width, |
182 |
|
✗ |
constraints->min_height, constraints->max_height); |
183 |
|
✗ |
err = AVERROR(EINVAL); |
184 |
|
✗ |
goto fail; |
185 |
|
|
} |
186 |
|
|
|
187 |
|
✗ |
outlink->hw_frames_ctx = av_hwframe_ctx_alloc(ctx->device_ref); |
188 |
|
✗ |
if (!outlink->hw_frames_ctx) { |
189 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to create HW frame context " |
190 |
|
|
"for output.\n"); |
191 |
|
✗ |
err = AVERROR(ENOMEM); |
192 |
|
✗ |
goto fail; |
193 |
|
|
} |
194 |
|
|
|
195 |
|
✗ |
output_frames = (AVHWFramesContext*)outlink->hw_frames_ctx->data; |
196 |
|
|
|
197 |
|
✗ |
output_frames->format = AV_PIX_FMT_VAAPI; |
198 |
|
✗ |
output_frames->sw_format = ctx->output_format; |
199 |
|
✗ |
output_frames->width = ctx->output_width; |
200 |
|
✗ |
output_frames->height = ctx->output_height; |
201 |
|
|
|
202 |
|
✗ |
output_frames->initial_pool_size = 4; |
203 |
|
|
|
204 |
|
✗ |
err = ff_filter_init_hw_frames(avctx, outlink, 10); |
205 |
|
✗ |
if (err < 0) |
206 |
|
✗ |
goto fail; |
207 |
|
|
|
208 |
|
✗ |
err = av_hwframe_ctx_init(outlink->hw_frames_ctx); |
209 |
|
✗ |
if (err < 0) { |
210 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to initialise VAAPI frame " |
211 |
|
|
"context for output: %d\n", err); |
212 |
|
✗ |
goto fail; |
213 |
|
|
} |
214 |
|
|
|
215 |
|
✗ |
va_frames = output_frames->hwctx; |
216 |
|
|
|
217 |
|
✗ |
av_assert0(ctx->va_context == VA_INVALID_ID); |
218 |
|
✗ |
vas = vaCreateContext(ctx->hwctx->display, ctx->va_config, |
219 |
|
|
ctx->output_width, ctx->output_height, |
220 |
|
|
VA_PROGRESSIVE, |
221 |
|
|
va_frames->surface_ids, va_frames->nb_surfaces, |
222 |
|
|
&ctx->va_context); |
223 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
224 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to create processing pipeline " |
225 |
|
|
"context: %d (%s).\n", vas, vaErrorStr(vas)); |
226 |
|
✗ |
return AVERROR(EIO); |
227 |
|
|
} |
228 |
|
|
|
229 |
|
✗ |
if (ctx->build_filter_params) { |
230 |
|
✗ |
err = ctx->build_filter_params(avctx); |
231 |
|
✗ |
if (err < 0) |
232 |
|
✗ |
goto fail; |
233 |
|
|
} |
234 |
|
|
|
235 |
|
✗ |
av_freep(&hwconfig); |
236 |
|
✗ |
av_hwframe_constraints_free(&constraints); |
237 |
|
✗ |
return 0; |
238 |
|
|
|
239 |
|
✗ |
fail: |
240 |
|
✗ |
av_buffer_unref(&outlink->hw_frames_ctx); |
241 |
|
✗ |
av_freep(&hwconfig); |
242 |
|
✗ |
av_hwframe_constraints_free(&constraints); |
243 |
|
✗ |
return err; |
244 |
|
|
} |
245 |
|
|
|
246 |
|
|
typedef struct VAAPIColourProperties { |
247 |
|
|
VAProcColorStandardType va_color_standard; |
248 |
|
|
|
249 |
|
|
enum AVColorPrimaries color_primaries; |
250 |
|
|
enum AVColorTransferCharacteristic color_trc; |
251 |
|
|
enum AVColorSpace colorspace; |
252 |
|
|
|
253 |
|
|
uint8_t va_chroma_sample_location; |
254 |
|
|
uint8_t va_color_range; |
255 |
|
|
|
256 |
|
|
enum AVColorRange color_range; |
257 |
|
|
enum AVChromaLocation chroma_sample_location; |
258 |
|
|
} VAAPIColourProperties; |
259 |
|
|
|
260 |
|
|
static const VAAPIColourProperties vaapi_colour_standard_map[] = { |
261 |
|
|
{ VAProcColorStandardBT601, 5, 6, 5 }, |
262 |
|
|
{ VAProcColorStandardBT601, 6, 6, 6 }, |
263 |
|
|
{ VAProcColorStandardBT709, 1, 1, 1 }, |
264 |
|
|
{ VAProcColorStandardBT470M, 4, 4, 4 }, |
265 |
|
|
{ VAProcColorStandardBT470BG, 5, 5, 5 }, |
266 |
|
|
{ VAProcColorStandardSMPTE170M, 6, 6, 6 }, |
267 |
|
|
{ VAProcColorStandardSMPTE240M, 7, 7, 7 }, |
268 |
|
|
{ VAProcColorStandardGenericFilm, 8, 1, 1 }, |
269 |
|
|
#if VA_CHECK_VERSION(1, 1, 0) |
270 |
|
|
{ VAProcColorStandardSRGB, 1, 13, 0 }, |
271 |
|
|
{ VAProcColorStandardXVYCC601, 1, 11, 5 }, |
272 |
|
|
{ VAProcColorStandardXVYCC709, 1, 11, 1 }, |
273 |
|
|
{ VAProcColorStandardBT2020, 9, 14, 9 }, |
274 |
|
|
#endif |
275 |
|
|
}; |
276 |
|
|
|
277 |
|
✗ |
static void vaapi_vpp_fill_colour_standard(VAAPIColourProperties *props, |
278 |
|
|
VAProcColorStandardType *vacs, |
279 |
|
|
int nb_vacs) |
280 |
|
|
{ |
281 |
|
|
const VAAPIColourProperties *t; |
282 |
|
|
int i, j, score, best_score, worst_score; |
283 |
|
|
VAProcColorStandardType best_standard; |
284 |
|
|
|
285 |
|
|
#if VA_CHECK_VERSION(1, 3, 0) |
286 |
|
|
// If the driver supports explicit use of the standard values then just |
287 |
|
|
// use them and avoid doing any mapping. (The driver may not support |
288 |
|
|
// some particular code point, but it still has enough information to |
289 |
|
|
// make a better fallback choice than we do in that case.) |
290 |
|
✗ |
for (i = 0; i < nb_vacs; i++) { |
291 |
|
✗ |
if (vacs[i] == VAProcColorStandardExplicit) { |
292 |
|
✗ |
props->va_color_standard = VAProcColorStandardExplicit; |
293 |
|
✗ |
return; |
294 |
|
|
} |
295 |
|
|
} |
296 |
|
|
#endif |
297 |
|
|
|
298 |
|
|
// Give scores to the possible options and choose the lowest one. |
299 |
|
|
// An exact match will score zero and therefore always be chosen, as |
300 |
|
|
// will a partial match where all unmatched elements are explicitly |
301 |
|
|
// unspecified. If no options match at all then just pass "none" to |
302 |
|
|
// the driver and let it make its own choice. |
303 |
|
✗ |
best_standard = VAProcColorStandardNone; |
304 |
|
✗ |
best_score = -1; |
305 |
|
✗ |
worst_score = 4 * (props->colorspace != AVCOL_SPC_UNSPECIFIED && |
306 |
|
✗ |
props->colorspace != AVCOL_SPC_RGB) + |
307 |
|
✗ |
2 * (props->color_trc != AVCOL_TRC_UNSPECIFIED) + |
308 |
|
✗ |
(props->color_primaries != AVCOL_PRI_UNSPECIFIED); |
309 |
|
|
|
310 |
|
✗ |
if (worst_score == 0) { |
311 |
|
|
// No properties are specified, so we aren't going to be able to |
312 |
|
|
// make a useful choice. |
313 |
|
✗ |
props->va_color_standard = VAProcColorStandardNone; |
314 |
|
✗ |
return; |
315 |
|
|
} |
316 |
|
|
|
317 |
|
✗ |
for (i = 0; i < nb_vacs; i++) { |
318 |
|
✗ |
for (j = 0; j < FF_ARRAY_ELEMS(vaapi_colour_standard_map); j++) { |
319 |
|
✗ |
t = &vaapi_colour_standard_map[j]; |
320 |
|
✗ |
if (t->va_color_standard != vacs[i]) |
321 |
|
✗ |
continue; |
322 |
|
|
|
323 |
|
✗ |
score = 0; |
324 |
|
✗ |
if (props->colorspace != AVCOL_SPC_UNSPECIFIED && |
325 |
|
✗ |
props->colorspace != AVCOL_SPC_RGB) |
326 |
|
✗ |
score += 4 * (props->colorspace != t->colorspace); |
327 |
|
✗ |
if (props->color_trc != AVCOL_TRC_UNSPECIFIED) |
328 |
|
✗ |
score += 2 * (props->color_trc != t->color_trc); |
329 |
|
✗ |
if (props->color_primaries != AVCOL_PRI_UNSPECIFIED) |
330 |
|
✗ |
score += (props->color_primaries != t->color_primaries); |
331 |
|
|
|
332 |
|
|
// Only include choices which matched something. |
333 |
|
✗ |
if (score < worst_score && |
334 |
|
✗ |
(best_score == -1 || score < best_score)) { |
335 |
|
✗ |
best_score = score; |
336 |
|
✗ |
best_standard = t->va_color_standard; |
337 |
|
|
} |
338 |
|
|
} |
339 |
|
|
} |
340 |
|
✗ |
props->va_color_standard = best_standard; |
341 |
|
|
} |
342 |
|
|
|
343 |
|
✗ |
static void vaapi_vpp_fill_chroma_sample_location(VAAPIColourProperties *props) |
344 |
|
|
{ |
345 |
|
|
#if VA_CHECK_VERSION(1, 1, 0) |
346 |
|
|
static const struct { |
347 |
|
|
enum AVChromaLocation av; |
348 |
|
|
uint8_t va; |
349 |
|
|
} csl_map[] = { |
350 |
|
|
{ AVCHROMA_LOC_UNSPECIFIED, VA_CHROMA_SITING_UNKNOWN }, |
351 |
|
|
{ AVCHROMA_LOC_LEFT, VA_CHROMA_SITING_VERTICAL_CENTER | |
352 |
|
|
VA_CHROMA_SITING_HORIZONTAL_LEFT }, |
353 |
|
|
{ AVCHROMA_LOC_CENTER, VA_CHROMA_SITING_VERTICAL_CENTER | |
354 |
|
|
VA_CHROMA_SITING_HORIZONTAL_CENTER }, |
355 |
|
|
{ AVCHROMA_LOC_TOPLEFT, VA_CHROMA_SITING_VERTICAL_TOP | |
356 |
|
|
VA_CHROMA_SITING_HORIZONTAL_LEFT }, |
357 |
|
|
{ AVCHROMA_LOC_TOP, VA_CHROMA_SITING_VERTICAL_TOP | |
358 |
|
|
VA_CHROMA_SITING_HORIZONTAL_CENTER }, |
359 |
|
|
{ AVCHROMA_LOC_BOTTOMLEFT, VA_CHROMA_SITING_VERTICAL_BOTTOM | |
360 |
|
|
VA_CHROMA_SITING_HORIZONTAL_LEFT }, |
361 |
|
|
{ AVCHROMA_LOC_BOTTOM, VA_CHROMA_SITING_VERTICAL_BOTTOM | |
362 |
|
|
VA_CHROMA_SITING_HORIZONTAL_CENTER }, |
363 |
|
|
}; |
364 |
|
|
int i; |
365 |
|
|
|
366 |
|
✗ |
for (i = 0; i < FF_ARRAY_ELEMS(csl_map); i++) { |
367 |
|
✗ |
if (props->chroma_sample_location == csl_map[i].av) { |
368 |
|
✗ |
props->va_chroma_sample_location = csl_map[i].va; |
369 |
|
✗ |
return; |
370 |
|
|
} |
371 |
|
|
} |
372 |
|
✗ |
props->va_chroma_sample_location = VA_CHROMA_SITING_UNKNOWN; |
373 |
|
|
#else |
374 |
|
|
props->va_chroma_sample_location = 0; |
375 |
|
|
#endif |
376 |
|
|
} |
377 |
|
|
|
378 |
|
✗ |
static void vaapi_vpp_fill_colour_range(VAAPIColourProperties *props) |
379 |
|
|
{ |
380 |
|
|
#if VA_CHECK_VERSION(1, 1, 0) |
381 |
|
✗ |
switch (props->color_range) { |
382 |
|
✗ |
case AVCOL_RANGE_MPEG: |
383 |
|
✗ |
props->va_color_range = VA_SOURCE_RANGE_REDUCED; |
384 |
|
✗ |
break; |
385 |
|
✗ |
case AVCOL_RANGE_JPEG: |
386 |
|
✗ |
props->va_color_range = VA_SOURCE_RANGE_FULL; |
387 |
|
✗ |
break; |
388 |
|
✗ |
case AVCOL_RANGE_UNSPECIFIED: |
389 |
|
|
default: |
390 |
|
✗ |
props->va_color_range = VA_SOURCE_RANGE_UNKNOWN; |
391 |
|
|
} |
392 |
|
|
#else |
393 |
|
|
props->va_color_range = 0; |
394 |
|
|
#endif |
395 |
|
✗ |
} |
396 |
|
|
|
397 |
|
✗ |
static void vaapi_vpp_fill_colour_properties(AVFilterContext *avctx, |
398 |
|
|
VAAPIColourProperties *props, |
399 |
|
|
VAProcColorStandardType *vacs, |
400 |
|
|
int nb_vacs) |
401 |
|
|
{ |
402 |
|
✗ |
vaapi_vpp_fill_colour_standard(props, vacs, nb_vacs); |
403 |
|
✗ |
vaapi_vpp_fill_chroma_sample_location(props); |
404 |
|
✗ |
vaapi_vpp_fill_colour_range(props); |
405 |
|
|
|
406 |
|
✗ |
av_log(avctx, AV_LOG_DEBUG, "Mapped colour properties %s %s/%s/%s %s " |
407 |
|
|
"to VA standard %d chroma siting %#x range %#x.\n", |
408 |
|
|
av_color_range_name(props->color_range), |
409 |
|
|
av_color_space_name(props->colorspace), |
410 |
|
|
av_color_primaries_name(props->color_primaries), |
411 |
|
|
av_color_transfer_name(props->color_trc), |
412 |
|
|
av_chroma_location_name(props->chroma_sample_location), |
413 |
|
✗ |
props->va_color_standard, |
414 |
|
✗ |
props->va_chroma_sample_location, props->va_color_range); |
415 |
|
✗ |
} |
416 |
|
|
|
417 |
|
✗ |
static int vaapi_vpp_frame_is_rgb(const AVFrame *frame) |
418 |
|
|
{ |
419 |
|
|
const AVHWFramesContext *hwfc; |
420 |
|
|
const AVPixFmtDescriptor *desc; |
421 |
|
✗ |
av_assert0(frame->format == AV_PIX_FMT_VAAPI && |
422 |
|
|
frame->hw_frames_ctx); |
423 |
|
✗ |
hwfc = (const AVHWFramesContext*)frame->hw_frames_ctx->data; |
424 |
|
✗ |
desc = av_pix_fmt_desc_get(hwfc->sw_format); |
425 |
|
✗ |
av_assert0(desc); |
426 |
|
✗ |
return !!(desc->flags & AV_PIX_FMT_FLAG_RGB); |
427 |
|
|
} |
428 |
|
|
|
429 |
|
✗ |
static int vaapi_vpp_colour_properties(AVFilterContext *avctx, |
430 |
|
|
VAProcPipelineParameterBuffer *params, |
431 |
|
|
const AVFrame *input_frame, |
432 |
|
|
AVFrame *output_frame) |
433 |
|
|
{ |
434 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
435 |
|
|
VAAPIColourProperties input_props, output_props; |
436 |
|
|
VAProcPipelineCaps caps; |
437 |
|
|
VAStatus vas; |
438 |
|
|
|
439 |
|
✗ |
vas = vaQueryVideoProcPipelineCaps(ctx->hwctx->display, ctx->va_context, |
440 |
|
✗ |
ctx->filter_buffers, ctx->nb_filter_buffers, |
441 |
|
|
&caps); |
442 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
443 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to query capabilities for " |
444 |
|
|
"colour standard support: %d (%s).\n", vas, vaErrorStr(vas)); |
445 |
|
✗ |
return AVERROR_EXTERNAL; |
446 |
|
|
} |
447 |
|
|
|
448 |
|
✗ |
input_props = (VAAPIColourProperties) { |
449 |
|
✗ |
.colorspace = vaapi_vpp_frame_is_rgb(input_frame) |
450 |
|
✗ |
? AVCOL_SPC_RGB : input_frame->colorspace, |
451 |
|
✗ |
.color_primaries = input_frame->color_primaries, |
452 |
|
✗ |
.color_trc = input_frame->color_trc, |
453 |
|
✗ |
.color_range = input_frame->color_range, |
454 |
|
✗ |
.chroma_sample_location = input_frame->chroma_location, |
455 |
|
|
}; |
456 |
|
|
|
457 |
|
✗ |
vaapi_vpp_fill_colour_properties(avctx, &input_props, |
458 |
|
|
caps.input_color_standards, |
459 |
|
✗ |
caps.num_input_color_standards); |
460 |
|
|
|
461 |
|
✗ |
output_props = (VAAPIColourProperties) { |
462 |
|
✗ |
.colorspace = vaapi_vpp_frame_is_rgb(output_frame) |
463 |
|
✗ |
? AVCOL_SPC_RGB : output_frame->colorspace, |
464 |
|
✗ |
.color_primaries = output_frame->color_primaries, |
465 |
|
✗ |
.color_trc = output_frame->color_trc, |
466 |
|
✗ |
.color_range = output_frame->color_range, |
467 |
|
✗ |
.chroma_sample_location = output_frame->chroma_location, |
468 |
|
|
}; |
469 |
|
✗ |
vaapi_vpp_fill_colour_properties(avctx, &output_props, |
470 |
|
|
caps.output_color_standards, |
471 |
|
✗ |
caps.num_output_color_standards); |
472 |
|
|
|
473 |
|
|
// If the properties weren't filled completely in the output frame and |
474 |
|
|
// we chose a fixed standard then fill the known values in here. |
475 |
|
|
#if VA_CHECK_VERSION(1, 3, 0) |
476 |
|
✗ |
if (output_props.va_color_standard != VAProcColorStandardExplicit) |
477 |
|
|
#endif |
478 |
|
|
{ |
479 |
|
✗ |
const VAAPIColourProperties *output_standard = NULL; |
480 |
|
|
int i; |
481 |
|
|
|
482 |
|
✗ |
for (i = 0; i < FF_ARRAY_ELEMS(vaapi_colour_standard_map); i++) { |
483 |
|
✗ |
if (output_props.va_color_standard == |
484 |
|
✗ |
vaapi_colour_standard_map[i].va_color_standard) { |
485 |
|
✗ |
output_standard = &vaapi_colour_standard_map[i]; |
486 |
|
✗ |
break; |
487 |
|
|
} |
488 |
|
|
} |
489 |
|
✗ |
if (output_standard) { |
490 |
|
✗ |
output_frame->colorspace = vaapi_vpp_frame_is_rgb(output_frame) |
491 |
|
✗ |
? AVCOL_SPC_RGB : output_standard->colorspace; |
492 |
|
✗ |
output_frame->color_primaries = output_standard->color_primaries; |
493 |
|
✗ |
output_frame->color_trc = output_standard->color_trc; |
494 |
|
|
} |
495 |
|
|
} |
496 |
|
|
|
497 |
|
✗ |
params->surface_color_standard = input_props.va_color_standard; |
498 |
|
✗ |
params->output_color_standard = output_props.va_color_standard; |
499 |
|
|
|
500 |
|
|
#if VA_CHECK_VERSION(1, 1, 0) |
501 |
|
✗ |
params->input_color_properties = (VAProcColorProperties) { |
502 |
|
✗ |
.chroma_sample_location = input_props.va_chroma_sample_location, |
503 |
|
✗ |
.color_range = input_props.va_color_range, |
504 |
|
|
#if VA_CHECK_VERSION(1, 3, 0) |
505 |
|
✗ |
.colour_primaries = input_props.color_primaries, |
506 |
|
✗ |
.transfer_characteristics = input_props.color_trc, |
507 |
|
✗ |
.matrix_coefficients = input_props.colorspace, |
508 |
|
|
#endif |
509 |
|
|
}; |
510 |
|
✗ |
params->output_color_properties = (VAProcColorProperties) { |
511 |
|
✗ |
.chroma_sample_location = output_props.va_chroma_sample_location, |
512 |
|
✗ |
.color_range = output_props.va_color_range, |
513 |
|
|
#if VA_CHECK_VERSION(1, 3, 0) |
514 |
|
✗ |
.colour_primaries = output_props.color_primaries, |
515 |
|
✗ |
.transfer_characteristics = output_props.color_trc, |
516 |
|
✗ |
.matrix_coefficients = output_props.colorspace, |
517 |
|
|
#endif |
518 |
|
|
}; |
519 |
|
|
#endif |
520 |
|
|
|
521 |
|
✗ |
return 0; |
522 |
|
|
} |
523 |
|
|
|
524 |
|
✗ |
int ff_vaapi_vpp_init_params(AVFilterContext *avctx, |
525 |
|
|
VAProcPipelineParameterBuffer *params, |
526 |
|
|
const AVFrame *input_frame, |
527 |
|
|
AVFrame *output_frame) |
528 |
|
|
{ |
529 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
530 |
|
|
int err; |
531 |
|
|
|
532 |
|
✗ |
ctx->input_region = (VARectangle) { |
533 |
|
✗ |
.x = input_frame->crop_left, |
534 |
|
✗ |
.y = input_frame->crop_top, |
535 |
|
✗ |
.width = input_frame->width - |
536 |
|
✗ |
(input_frame->crop_left + input_frame->crop_right), |
537 |
|
✗ |
.height = input_frame->height - |
538 |
|
✗ |
(input_frame->crop_top + input_frame->crop_bottom), |
539 |
|
|
}; |
540 |
|
✗ |
output_frame->crop_top = 0; |
541 |
|
✗ |
output_frame->crop_bottom = 0; |
542 |
|
✗ |
output_frame->crop_left = 0; |
543 |
|
✗ |
output_frame->crop_right = 0; |
544 |
|
|
|
545 |
|
✗ |
*params = (VAProcPipelineParameterBuffer) { |
546 |
|
✗ |
.surface = ff_vaapi_vpp_get_surface_id(input_frame), |
547 |
|
✗ |
.surface_region = &ctx->input_region, |
548 |
|
|
.output_region = NULL, |
549 |
|
|
.output_background_color = VAAPI_VPP_BACKGROUND_BLACK, |
550 |
|
|
.pipeline_flags = 0, |
551 |
|
|
.filter_flags = VA_FRAME_PICTURE, |
552 |
|
|
|
553 |
|
|
// Filter and reference data filled by the filter itself. |
554 |
|
|
|
555 |
|
|
#if VA_CHECK_VERSION(1, 1, 0) |
556 |
|
|
.rotation_state = VA_ROTATION_NONE, |
557 |
|
|
.mirror_state = VA_MIRROR_NONE, |
558 |
|
|
#endif |
559 |
|
|
}; |
560 |
|
|
|
561 |
|
✗ |
err = vaapi_vpp_colour_properties(avctx, params, |
562 |
|
|
input_frame, output_frame); |
563 |
|
✗ |
if (err < 0) |
564 |
|
✗ |
return err; |
565 |
|
|
|
566 |
|
✗ |
av_log(avctx, AV_LOG_DEBUG, "Filter frame from surface %#x to %#x.\n", |
567 |
|
|
ff_vaapi_vpp_get_surface_id(input_frame), |
568 |
|
|
ff_vaapi_vpp_get_surface_id(output_frame)); |
569 |
|
|
|
570 |
|
✗ |
return 0; |
571 |
|
|
} |
572 |
|
|
|
573 |
|
✗ |
int ff_vaapi_vpp_make_param_buffers(AVFilterContext *avctx, |
574 |
|
|
int type, |
575 |
|
|
const void *data, |
576 |
|
|
size_t size, |
577 |
|
|
int count) |
578 |
|
|
{ |
579 |
|
|
VAStatus vas; |
580 |
|
|
VABufferID buffer; |
581 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
582 |
|
|
|
583 |
|
✗ |
av_assert0(ctx->nb_filter_buffers + 1 <= VAProcFilterCount); |
584 |
|
|
|
585 |
|
✗ |
vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context, |
586 |
|
|
type, size, count, (void*)data, &buffer); |
587 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
588 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to create parameter " |
589 |
|
|
"buffer (type %d): %d (%s).\n", |
590 |
|
|
type, vas, vaErrorStr(vas)); |
591 |
|
✗ |
return AVERROR(EIO); |
592 |
|
|
} |
593 |
|
|
|
594 |
|
✗ |
ctx->filter_buffers[ctx->nb_filter_buffers++] = buffer; |
595 |
|
|
|
596 |
|
✗ |
av_log(avctx, AV_LOG_DEBUG, "Param buffer (type %d, %zu bytes, count %d) " |
597 |
|
|
"is %#x.\n", type, size, count, buffer); |
598 |
|
✗ |
return 0; |
599 |
|
|
} |
600 |
|
|
|
601 |
|
✗ |
static int vaapi_vpp_render_single_pipeline_buffer(AVFilterContext *avctx, |
602 |
|
|
VAProcPipelineParameterBuffer *params, |
603 |
|
|
VABufferID *params_id) |
604 |
|
|
{ |
605 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
606 |
|
|
VAStatus vas; |
607 |
|
|
|
608 |
|
✗ |
vas = vaCreateBuffer(ctx->hwctx->display, ctx->va_context, |
609 |
|
|
VAProcPipelineParameterBufferType, |
610 |
|
|
sizeof(*params), 1, params, params_id); |
611 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
612 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to create parameter buffer: " |
613 |
|
|
"%d (%s).\n", vas, vaErrorStr(vas)); |
614 |
|
✗ |
*params_id = VA_INVALID_ID; |
615 |
|
|
|
616 |
|
✗ |
return AVERROR(EIO); |
617 |
|
|
} |
618 |
|
✗ |
av_log(avctx, AV_LOG_DEBUG, "Pipeline parameter buffer is %#x.\n", *params_id); |
619 |
|
|
|
620 |
|
✗ |
vas = vaRenderPicture(ctx->hwctx->display, ctx->va_context, params_id, 1); |
621 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
622 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to render parameter buffer: " |
623 |
|
|
"%d (%s).\n", vas, vaErrorStr(vas)); |
624 |
|
✗ |
return AVERROR(EIO); |
625 |
|
|
} |
626 |
|
|
|
627 |
|
✗ |
return 0; |
628 |
|
|
} |
629 |
|
|
|
630 |
|
✗ |
int ff_vaapi_vpp_render_pictures(AVFilterContext *avctx, |
631 |
|
|
VAProcPipelineParameterBuffer *params_list, |
632 |
|
|
int cout, |
633 |
|
|
AVFrame *output_frame) |
634 |
|
|
{ |
635 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
636 |
|
|
VABufferID *params_ids; |
637 |
|
|
VAStatus vas; |
638 |
|
|
int err; |
639 |
|
|
|
640 |
|
✗ |
params_ids = (VABufferID *)av_malloc_array(cout, sizeof(VABufferID)); |
641 |
|
✗ |
if (!params_ids) |
642 |
|
✗ |
return AVERROR(ENOMEM); |
643 |
|
|
|
644 |
|
✗ |
for (int i = 0; i < cout; i++) |
645 |
|
✗ |
params_ids[i] = VA_INVALID_ID; |
646 |
|
|
|
647 |
|
✗ |
vas = vaBeginPicture(ctx->hwctx->display, |
648 |
|
|
ctx->va_context, ff_vaapi_vpp_get_surface_id(output_frame)); |
649 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
650 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to attach new picture: " |
651 |
|
|
"%d (%s).\n", vas, vaErrorStr(vas)); |
652 |
|
✗ |
err = AVERROR(EIO); |
653 |
|
✗ |
goto fail; |
654 |
|
|
} |
655 |
|
|
|
656 |
|
✗ |
for (int i = 0; i < cout; i++) { |
657 |
|
✗ |
err = vaapi_vpp_render_single_pipeline_buffer(avctx, ¶ms_list[i], ¶ms_ids[i]); |
658 |
|
✗ |
if (err) |
659 |
|
✗ |
goto fail_after_begin; |
660 |
|
|
} |
661 |
|
|
|
662 |
|
✗ |
vas = vaEndPicture(ctx->hwctx->display, ctx->va_context); |
663 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
664 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to start picture processing: " |
665 |
|
|
"%d (%s).\n", vas, vaErrorStr(vas)); |
666 |
|
✗ |
err = AVERROR(EIO); |
667 |
|
✗ |
goto fail_after_render; |
668 |
|
|
} |
669 |
|
|
|
670 |
|
|
if (CONFIG_VAAPI_1 || ctx->hwctx->driver_quirks & |
671 |
|
|
AV_VAAPI_DRIVER_QUIRK_RENDER_PARAM_BUFFERS) { |
672 |
|
✗ |
for (int i = 0; i < cout && params_ids[i] != VA_INVALID_ID; i++) { |
673 |
|
✗ |
vas = vaDestroyBuffer(ctx->hwctx->display, params_ids[i]); |
674 |
|
✗ |
if (vas != VA_STATUS_SUCCESS) { |
675 |
|
✗ |
av_log(avctx, AV_LOG_ERROR, "Failed to free parameter buffer: " |
676 |
|
|
"%d (%s).\n", vas, vaErrorStr(vas)); |
677 |
|
|
// And ignore. |
678 |
|
|
} |
679 |
|
|
} |
680 |
|
|
} |
681 |
|
|
|
682 |
|
✗ |
av_freep(¶ms_ids); |
683 |
|
✗ |
return 0; |
684 |
|
|
|
685 |
|
|
// We want to make sure that if vaBeginPicture has been called, we also |
686 |
|
|
// call vaRenderPicture and vaEndPicture. These calls may well fail or |
687 |
|
|
// do something else nasty, but once we're in this failure case there |
688 |
|
|
// isn't much else we can do. |
689 |
|
✗ |
fail_after_begin: |
690 |
|
✗ |
vaRenderPicture(ctx->hwctx->display, ctx->va_context, ¶ms_ids[0], 1); |
691 |
|
✗ |
fail_after_render: |
692 |
|
✗ |
vaEndPicture(ctx->hwctx->display, ctx->va_context); |
693 |
|
✗ |
fail: |
694 |
|
✗ |
av_freep(¶ms_ids); |
695 |
|
✗ |
return err; |
696 |
|
|
} |
697 |
|
|
|
698 |
|
✗ |
int ff_vaapi_vpp_render_picture(AVFilterContext *avctx, |
699 |
|
|
VAProcPipelineParameterBuffer *params, |
700 |
|
|
AVFrame *output_frame) |
701 |
|
|
{ |
702 |
|
✗ |
return ff_vaapi_vpp_render_pictures(avctx, params, 1, output_frame); |
703 |
|
|
} |
704 |
|
|
|
705 |
|
✗ |
void ff_vaapi_vpp_ctx_init(AVFilterContext *avctx) |
706 |
|
|
{ |
707 |
|
|
int i; |
708 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
709 |
|
|
|
710 |
|
✗ |
ctx->va_config = VA_INVALID_ID; |
711 |
|
✗ |
ctx->va_context = VA_INVALID_ID; |
712 |
|
✗ |
ctx->valid_ids = 1; |
713 |
|
|
|
714 |
|
✗ |
for (i = 0; i < VAProcFilterCount; i++) |
715 |
|
✗ |
ctx->filter_buffers[i] = VA_INVALID_ID; |
716 |
|
✗ |
ctx->nb_filter_buffers = 0; |
717 |
|
✗ |
} |
718 |
|
|
|
719 |
|
✗ |
void ff_vaapi_vpp_ctx_uninit(AVFilterContext *avctx) |
720 |
|
|
{ |
721 |
|
✗ |
VAAPIVPPContext *ctx = avctx->priv; |
722 |
|
✗ |
if (ctx->valid_ids && ctx->pipeline_uninit) |
723 |
|
✗ |
ctx->pipeline_uninit(avctx); |
724 |
|
|
|
725 |
|
✗ |
av_buffer_unref(&ctx->input_frames_ref); |
726 |
|
✗ |
av_buffer_unref(&ctx->device_ref); |
727 |
|
✗ |
} |
728 |
|
|
|