Line |
Branch |
Exec |
Source |
1 |
|
|
/* |
2 |
|
|
* Video Decode and Presentation API for UNIX (VDPAU) is used for |
3 |
|
|
* HW decode acceleration for MPEG-1/2, MPEG-4 ASP, H.264 and VC-1. |
4 |
|
|
* |
5 |
|
|
* Copyright (c) 2008 NVIDIA |
6 |
|
|
* |
7 |
|
|
* This file is part of FFmpeg. |
8 |
|
|
* |
9 |
|
|
* FFmpeg is free software; you can redistribute it and/or |
10 |
|
|
* modify it under the terms of the GNU Lesser General Public |
11 |
|
|
* License as published by the Free Software Foundation; either |
12 |
|
|
* version 2.1 of the License, or (at your option) any later version. |
13 |
|
|
* |
14 |
|
|
* FFmpeg is distributed in the hope that it will be useful, |
15 |
|
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of |
16 |
|
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
17 |
|
|
* Lesser General Public License for more details. |
18 |
|
|
* |
19 |
|
|
* You should have received a copy of the GNU Lesser General Public |
20 |
|
|
* License along with FFmpeg; if not, write to the Free Software |
21 |
|
|
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA |
22 |
|
|
*/ |
23 |
|
|
|
24 |
|
|
#include "config_components.h" |
25 |
|
|
|
26 |
|
|
#include "libavutil/mem.h" |
27 |
|
|
#include "avcodec.h" |
28 |
|
|
#include "decode.h" |
29 |
|
|
#include "hwaccel_internal.h" |
30 |
|
|
#include "internal.h" |
31 |
|
|
#include "vdpau.h" |
32 |
|
|
#include "vdpau_internal.h" |
33 |
|
|
|
34 |
|
|
/** |
35 |
|
|
* @addtogroup VDPAU_Decoding |
36 |
|
|
* |
37 |
|
|
* @{ |
38 |
|
|
*/ |
39 |
|
|
|
40 |
|
✗ |
static int vdpau_error(VdpStatus status) |
41 |
|
|
{ |
42 |
|
✗ |
switch (status) { |
43 |
|
✗ |
case VDP_STATUS_OK: |
44 |
|
✗ |
return 0; |
45 |
|
✗ |
case VDP_STATUS_NO_IMPLEMENTATION: |
46 |
|
✗ |
return AVERROR(ENOSYS); |
47 |
|
✗ |
case VDP_STATUS_DISPLAY_PREEMPTED: |
48 |
|
✗ |
return AVERROR(EIO); |
49 |
|
✗ |
case VDP_STATUS_INVALID_HANDLE: |
50 |
|
✗ |
return AVERROR(EBADF); |
51 |
|
✗ |
case VDP_STATUS_INVALID_POINTER: |
52 |
|
✗ |
return AVERROR(EFAULT); |
53 |
|
✗ |
case VDP_STATUS_RESOURCES: |
54 |
|
✗ |
return AVERROR(ENOBUFS); |
55 |
|
✗ |
case VDP_STATUS_HANDLE_DEVICE_MISMATCH: |
56 |
|
✗ |
return AVERROR(EXDEV); |
57 |
|
✗ |
case VDP_STATUS_ERROR: |
58 |
|
✗ |
return AVERROR(EIO); |
59 |
|
✗ |
default: |
60 |
|
✗ |
return AVERROR(EINVAL); |
61 |
|
|
} |
62 |
|
|
} |
63 |
|
|
|
64 |
|
✗ |
int av_vdpau_get_surface_parameters(AVCodecContext *avctx, |
65 |
|
|
VdpChromaType *type, |
66 |
|
|
uint32_t *width, uint32_t *height) |
67 |
|
|
{ |
68 |
|
|
VdpChromaType t; |
69 |
|
✗ |
uint32_t w = avctx->coded_width; |
70 |
|
✗ |
uint32_t h = avctx->coded_height; |
71 |
|
|
|
72 |
|
|
/* See <vdpau/vdpau.h> for per-type alignment constraints. */ |
73 |
|
✗ |
switch (avctx->sw_pix_fmt) { |
74 |
|
✗ |
case AV_PIX_FMT_YUV420P: |
75 |
|
|
case AV_PIX_FMT_YUVJ420P: |
76 |
|
|
case AV_PIX_FMT_YUV420P10: |
77 |
|
|
case AV_PIX_FMT_YUV420P12: |
78 |
|
✗ |
t = VDP_CHROMA_TYPE_420; |
79 |
|
✗ |
w = (w + 1) & ~1; |
80 |
|
✗ |
h = (h + 3) & ~3; |
81 |
|
✗ |
break; |
82 |
|
✗ |
case AV_PIX_FMT_YUV422P: |
83 |
|
|
case AV_PIX_FMT_YUVJ422P: |
84 |
|
✗ |
t = VDP_CHROMA_TYPE_422; |
85 |
|
✗ |
w = (w + 1) & ~1; |
86 |
|
✗ |
h = (h + 1) & ~1; |
87 |
|
✗ |
break; |
88 |
|
✗ |
case AV_PIX_FMT_YUV444P: |
89 |
|
|
case AV_PIX_FMT_YUVJ444P: |
90 |
|
|
case AV_PIX_FMT_YUV444P10: |
91 |
|
|
case AV_PIX_FMT_YUV444P12: |
92 |
|
✗ |
t = VDP_CHROMA_TYPE_444; |
93 |
|
✗ |
h = (h + 1) & ~1; |
94 |
|
✗ |
break; |
95 |
|
✗ |
default: |
96 |
|
✗ |
return AVERROR(ENOSYS); |
97 |
|
|
} |
98 |
|
|
|
99 |
|
✗ |
if (type) |
100 |
|
✗ |
*type = t; |
101 |
|
✗ |
if (width) |
102 |
|
✗ |
*width = w; |
103 |
|
✗ |
if (height) |
104 |
|
✗ |
*height = h; |
105 |
|
✗ |
return 0; |
106 |
|
|
} |
107 |
|
|
|
108 |
|
✗ |
int ff_vdpau_common_frame_params(AVCodecContext *avctx, |
109 |
|
|
AVBufferRef *hw_frames_ctx) |
110 |
|
|
{ |
111 |
|
✗ |
AVHWFramesContext *hw_frames = (AVHWFramesContext*)hw_frames_ctx->data; |
112 |
|
|
VdpChromaType type; |
113 |
|
|
uint32_t width; |
114 |
|
|
uint32_t height; |
115 |
|
|
|
116 |
|
✗ |
if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height)) |
117 |
|
✗ |
return AVERROR(EINVAL); |
118 |
|
|
|
119 |
|
✗ |
hw_frames->format = AV_PIX_FMT_VDPAU; |
120 |
|
✗ |
hw_frames->sw_format = avctx->sw_pix_fmt; |
121 |
|
✗ |
hw_frames->width = width; |
122 |
|
✗ |
hw_frames->height = height; |
123 |
|
|
|
124 |
|
✗ |
return 0; |
125 |
|
|
} |
126 |
|
|
|
127 |
|
✗ |
av_cold int ff_vdpau_common_init(AVCodecContext *avctx, |
128 |
|
|
VdpDecoderProfile profile, int level) |
129 |
|
|
{ |
130 |
|
✗ |
VDPAUHWContext *hwctx = avctx->hwaccel_context; |
131 |
|
✗ |
VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data; |
132 |
|
|
VdpVideoSurfaceQueryCapabilities *surface_query_caps; |
133 |
|
|
VdpDecoderQueryCapabilities *decoder_query_caps; |
134 |
|
|
VdpDecoderCreate *create; |
135 |
|
|
VdpGetInformationString *info; |
136 |
|
|
const char *info_string; |
137 |
|
|
void *func; |
138 |
|
|
VdpStatus status; |
139 |
|
|
VdpBool supported; |
140 |
|
|
uint32_t max_level, max_mb, max_width, max_height; |
141 |
|
|
VdpChromaType type; |
142 |
|
|
uint32_t width; |
143 |
|
|
uint32_t height; |
144 |
|
|
int ret; |
145 |
|
|
|
146 |
|
✗ |
vdctx->width = UINT32_MAX; |
147 |
|
✗ |
vdctx->height = UINT32_MAX; |
148 |
|
|
|
149 |
|
✗ |
if (av_vdpau_get_surface_parameters(avctx, &type, &width, &height)) |
150 |
|
✗ |
return AVERROR(ENOSYS); |
151 |
|
|
|
152 |
|
✗ |
if (hwctx) { |
153 |
|
✗ |
hwctx->reset = 0; |
154 |
|
|
|
155 |
|
✗ |
if (hwctx->context.decoder != VDP_INVALID_HANDLE) { |
156 |
|
✗ |
vdctx->decoder = hwctx->context.decoder; |
157 |
|
✗ |
vdctx->render = hwctx->context.render; |
158 |
|
✗ |
vdctx->device = VDP_INVALID_HANDLE; |
159 |
|
✗ |
return 0; /* Decoder created by user */ |
160 |
|
|
} |
161 |
|
|
|
162 |
|
✗ |
vdctx->device = hwctx->device; |
163 |
|
✗ |
vdctx->get_proc_address = hwctx->get_proc_address; |
164 |
|
|
|
165 |
|
✗ |
if (hwctx->flags & AV_HWACCEL_FLAG_IGNORE_LEVEL) |
166 |
|
✗ |
level = 0; |
167 |
|
|
|
168 |
|
✗ |
if (!(hwctx->flags & AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH) && |
169 |
|
✗ |
type != VDP_CHROMA_TYPE_420) |
170 |
|
✗ |
return AVERROR(ENOSYS); |
171 |
|
|
} else { |
172 |
|
|
AVHWFramesContext *frames_ctx; |
173 |
|
|
AVVDPAUDeviceContext *dev_ctx; |
174 |
|
|
|
175 |
|
✗ |
ret = ff_decode_get_hw_frames_ctx(avctx, AV_HWDEVICE_TYPE_VDPAU); |
176 |
|
✗ |
if (ret < 0) |
177 |
|
✗ |
return ret; |
178 |
|
|
|
179 |
|
✗ |
frames_ctx = (AVHWFramesContext*)avctx->hw_frames_ctx->data; |
180 |
|
✗ |
dev_ctx = frames_ctx->device_ctx->hwctx; |
181 |
|
|
|
182 |
|
✗ |
vdctx->device = dev_ctx->device; |
183 |
|
✗ |
vdctx->get_proc_address = dev_ctx->get_proc_address; |
184 |
|
|
|
185 |
|
✗ |
if (avctx->hwaccel_flags & AV_HWACCEL_FLAG_IGNORE_LEVEL) |
186 |
|
✗ |
level = 0; |
187 |
|
|
} |
188 |
|
|
|
189 |
|
✗ |
if (level < 0) |
190 |
|
✗ |
return AVERROR(ENOTSUP); |
191 |
|
|
|
192 |
|
✗ |
status = vdctx->get_proc_address(vdctx->device, |
193 |
|
|
VDP_FUNC_ID_GET_INFORMATION_STRING, |
194 |
|
|
&func); |
195 |
|
✗ |
if (status != VDP_STATUS_OK) |
196 |
|
✗ |
return vdpau_error(status); |
197 |
|
|
else |
198 |
|
✗ |
info = func; |
199 |
|
|
|
200 |
|
✗ |
status = info(&info_string); |
201 |
|
✗ |
if (status != VDP_STATUS_OK) |
202 |
|
✗ |
return vdpau_error(status); |
203 |
|
✗ |
if (avctx->codec_id == AV_CODEC_ID_HEVC && strncmp(info_string, "NVIDIA ", 7) == 0 && |
204 |
|
✗ |
!(avctx->hwaccel_flags & AV_HWACCEL_FLAG_ALLOW_PROFILE_MISMATCH)) { |
205 |
|
✗ |
int driver_version = 0; |
206 |
|
✗ |
sscanf(info_string, "NVIDIA VDPAU Driver Shared Library %d", &driver_version); |
207 |
|
✗ |
if (driver_version < 410) { |
208 |
|
✗ |
av_log(avctx, AV_LOG_VERBOSE, "HEVC with NVIDIA VDPAU drivers is buggy, skipping.\n"); |
209 |
|
✗ |
return AVERROR(ENOTSUP); |
210 |
|
|
} |
211 |
|
|
} |
212 |
|
|
|
213 |
|
✗ |
status = vdctx->get_proc_address(vdctx->device, |
214 |
|
|
VDP_FUNC_ID_VIDEO_SURFACE_QUERY_CAPABILITIES, |
215 |
|
|
&func); |
216 |
|
✗ |
if (status != VDP_STATUS_OK) |
217 |
|
✗ |
return vdpau_error(status); |
218 |
|
|
else |
219 |
|
✗ |
surface_query_caps = func; |
220 |
|
|
|
221 |
|
✗ |
status = surface_query_caps(vdctx->device, type, &supported, |
222 |
|
|
&max_width, &max_height); |
223 |
|
✗ |
if (status != VDP_STATUS_OK) |
224 |
|
✗ |
return vdpau_error(status); |
225 |
|
✗ |
if (supported != VDP_TRUE || |
226 |
|
✗ |
max_width < width || max_height < height) |
227 |
|
✗ |
return AVERROR(ENOTSUP); |
228 |
|
|
|
229 |
|
✗ |
status = vdctx->get_proc_address(vdctx->device, |
230 |
|
|
VDP_FUNC_ID_DECODER_QUERY_CAPABILITIES, |
231 |
|
|
&func); |
232 |
|
✗ |
if (status != VDP_STATUS_OK) |
233 |
|
✗ |
return vdpau_error(status); |
234 |
|
|
else |
235 |
|
✗ |
decoder_query_caps = func; |
236 |
|
|
|
237 |
|
✗ |
status = decoder_query_caps(vdctx->device, profile, &supported, &max_level, |
238 |
|
|
&max_mb, &max_width, &max_height); |
239 |
|
|
#ifdef VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE |
240 |
|
✗ |
if ((status != VDP_STATUS_OK || supported != VDP_TRUE) && profile == VDP_DECODER_PROFILE_H264_CONSTRAINED_BASELINE) { |
241 |
|
✗ |
profile = VDP_DECODER_PROFILE_H264_MAIN; |
242 |
|
✗ |
status = decoder_query_caps(vdctx->device, profile, &supported, |
243 |
|
|
&max_level, &max_mb, |
244 |
|
|
&max_width, &max_height); |
245 |
|
|
} |
246 |
|
|
#endif |
247 |
|
✗ |
if (status != VDP_STATUS_OK) |
248 |
|
✗ |
return vdpau_error(status); |
249 |
|
|
|
250 |
|
✗ |
if (supported != VDP_TRUE || max_level < level || |
251 |
|
✗ |
max_width < width || max_height < height) |
252 |
|
✗ |
return AVERROR(ENOTSUP); |
253 |
|
|
|
254 |
|
✗ |
status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_CREATE, |
255 |
|
|
&func); |
256 |
|
✗ |
if (status != VDP_STATUS_OK) |
257 |
|
✗ |
return vdpau_error(status); |
258 |
|
|
else |
259 |
|
✗ |
create = func; |
260 |
|
|
|
261 |
|
✗ |
status = vdctx->get_proc_address(vdctx->device, VDP_FUNC_ID_DECODER_RENDER, |
262 |
|
|
&func); |
263 |
|
✗ |
if (status != VDP_STATUS_OK) |
264 |
|
✗ |
return vdpau_error(status); |
265 |
|
|
else |
266 |
|
✗ |
vdctx->render = func; |
267 |
|
|
|
268 |
|
✗ |
status = create(vdctx->device, profile, width, height, avctx->refs, |
269 |
|
|
&vdctx->decoder); |
270 |
|
✗ |
if (status == VDP_STATUS_OK) { |
271 |
|
✗ |
vdctx->width = avctx->coded_width; |
272 |
|
✗ |
vdctx->height = avctx->coded_height; |
273 |
|
|
} |
274 |
|
|
|
275 |
|
✗ |
return vdpau_error(status); |
276 |
|
|
} |
277 |
|
|
|
278 |
|
✗ |
av_cold int ff_vdpau_common_uninit(AVCodecContext *avctx) |
279 |
|
|
{ |
280 |
|
✗ |
VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data; |
281 |
|
|
VdpDecoderDestroy *destroy; |
282 |
|
|
void *func; |
283 |
|
|
VdpStatus status; |
284 |
|
|
|
285 |
|
✗ |
if (vdctx->device == VDP_INVALID_HANDLE) |
286 |
|
✗ |
return 0; /* Decoder created and destroyed by user */ |
287 |
|
✗ |
if (vdctx->width == UINT32_MAX && vdctx->height == UINT32_MAX) |
288 |
|
✗ |
return 0; |
289 |
|
|
|
290 |
|
✗ |
status = vdctx->get_proc_address(vdctx->device, |
291 |
|
|
VDP_FUNC_ID_DECODER_DESTROY, &func); |
292 |
|
✗ |
if (status != VDP_STATUS_OK) |
293 |
|
✗ |
return vdpau_error(status); |
294 |
|
|
else |
295 |
|
✗ |
destroy = func; |
296 |
|
|
|
297 |
|
✗ |
status = destroy(vdctx->decoder); |
298 |
|
✗ |
return vdpau_error(status); |
299 |
|
|
} |
300 |
|
|
|
301 |
|
✗ |
static int ff_vdpau_common_reinit(AVCodecContext *avctx) |
302 |
|
|
{ |
303 |
|
✗ |
VDPAUHWContext *hwctx = avctx->hwaccel_context; |
304 |
|
✗ |
VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data; |
305 |
|
|
|
306 |
|
✗ |
if (vdctx->device == VDP_INVALID_HANDLE) |
307 |
|
✗ |
return 0; /* Decoder created by user */ |
308 |
|
✗ |
if (avctx->coded_width == vdctx->width && |
309 |
|
✗ |
avctx->coded_height == vdctx->height && (!hwctx || !hwctx->reset)) |
310 |
|
✗ |
return 0; |
311 |
|
|
|
312 |
|
✗ |
FF_HW_SIMPLE_CALL(avctx, uninit); |
313 |
|
✗ |
return FF_HW_SIMPLE_CALL(avctx, init); |
314 |
|
|
} |
315 |
|
|
|
316 |
|
✗ |
int ff_vdpau_common_start_frame(struct vdpau_picture_context *pic_ctx, |
317 |
|
|
av_unused const uint8_t *buffer, |
318 |
|
|
av_unused uint32_t size) |
319 |
|
|
{ |
320 |
|
✗ |
pic_ctx->bitstream_buffers_allocated = 0; |
321 |
|
✗ |
pic_ctx->bitstream_buffers_used = 0; |
322 |
|
✗ |
pic_ctx->bitstream_buffers = NULL; |
323 |
|
✗ |
return 0; |
324 |
|
|
} |
325 |
|
|
|
326 |
|
✗ |
int ff_vdpau_common_end_frame(AVCodecContext *avctx, AVFrame *frame, |
327 |
|
|
struct vdpau_picture_context *pic_ctx) |
328 |
|
|
{ |
329 |
|
✗ |
VDPAUContext *vdctx = avctx->internal->hwaccel_priv_data; |
330 |
|
✗ |
AVVDPAUContext *hwctx = avctx->hwaccel_context; |
331 |
|
✗ |
VdpVideoSurface surf = ff_vdpau_get_surface_id(frame); |
332 |
|
|
VdpStatus status; |
333 |
|
|
int val; |
334 |
|
|
|
335 |
|
✗ |
val = ff_vdpau_common_reinit(avctx); |
336 |
|
✗ |
if (val < 0) |
337 |
|
✗ |
return val; |
338 |
|
|
|
339 |
|
✗ |
if (hwctx && !hwctx->render && hwctx->render2) { |
340 |
|
✗ |
status = hwctx->render2(avctx, frame, (void *)&pic_ctx->info, |
341 |
|
✗ |
pic_ctx->bitstream_buffers_used, pic_ctx->bitstream_buffers); |
342 |
|
|
} else |
343 |
|
✗ |
status = vdctx->render(vdctx->decoder, surf, &pic_ctx->info, |
344 |
|
✗ |
pic_ctx->bitstream_buffers_used, |
345 |
|
✗ |
pic_ctx->bitstream_buffers); |
346 |
|
|
|
347 |
|
✗ |
av_freep(&pic_ctx->bitstream_buffers); |
348 |
|
|
|
349 |
|
✗ |
return vdpau_error(status); |
350 |
|
|
} |
351 |
|
|
|
352 |
|
|
#if CONFIG_MPEG1_VDPAU_HWACCEL || \ |
353 |
|
|
CONFIG_MPEG2_VDPAU_HWACCEL || CONFIG_MPEG4_VDPAU_HWACCEL || \ |
354 |
|
|
CONFIG_VC1_VDPAU_HWACCEL || CONFIG_WMV3_VDPAU_HWACCEL |
355 |
|
|
#include "mpegvideodec.h" |
356 |
|
|
|
357 |
|
✗ |
int ff_vdpau_mpeg_end_frame(AVCodecContext *avctx) |
358 |
|
|
{ |
359 |
|
✗ |
MpegEncContext *s = avctx->priv_data; |
360 |
|
✗ |
MPVPicture *pic = s->cur_pic.ptr; |
361 |
|
✗ |
struct vdpau_picture_context *pic_ctx = pic->hwaccel_picture_private; |
362 |
|
|
int val; |
363 |
|
|
|
364 |
|
✗ |
val = ff_vdpau_common_end_frame(avctx, pic->f, pic_ctx); |
365 |
|
✗ |
if (val < 0) |
366 |
|
✗ |
return val; |
367 |
|
|
|
368 |
|
✗ |
ff_mpeg_draw_horiz_band(s, 0, s->avctx->height); |
369 |
|
✗ |
return 0; |
370 |
|
|
} |
371 |
|
|
#endif |
372 |
|
|
|
373 |
|
✗ |
int ff_vdpau_add_buffer(struct vdpau_picture_context *pic_ctx, |
374 |
|
|
const uint8_t *buf, uint32_t size) |
375 |
|
|
{ |
376 |
|
✗ |
VdpBitstreamBuffer *buffers = pic_ctx->bitstream_buffers; |
377 |
|
|
|
378 |
|
✗ |
buffers = av_fast_realloc(buffers, &pic_ctx->bitstream_buffers_allocated, |
379 |
|
✗ |
(pic_ctx->bitstream_buffers_used + 1) * sizeof(*buffers)); |
380 |
|
✗ |
if (!buffers) |
381 |
|
✗ |
return AVERROR(ENOMEM); |
382 |
|
|
|
383 |
|
✗ |
pic_ctx->bitstream_buffers = buffers; |
384 |
|
✗ |
buffers += pic_ctx->bitstream_buffers_used++; |
385 |
|
|
|
386 |
|
✗ |
buffers->struct_version = VDP_BITSTREAM_BUFFER_VERSION; |
387 |
|
✗ |
buffers->bitstream = buf; |
388 |
|
✗ |
buffers->bitstream_bytes = size; |
389 |
|
✗ |
return 0; |
390 |
|
|
} |
391 |
|
|
|
392 |
|
✗ |
int av_vdpau_bind_context(AVCodecContext *avctx, VdpDevice device, |
393 |
|
|
VdpGetProcAddress *get_proc, unsigned flags) |
394 |
|
|
{ |
395 |
|
|
VDPAUHWContext *hwctx; |
396 |
|
|
|
397 |
|
✗ |
if (flags & ~(AV_HWACCEL_FLAG_IGNORE_LEVEL|AV_HWACCEL_FLAG_ALLOW_HIGH_DEPTH)) |
398 |
|
✗ |
return AVERROR(EINVAL); |
399 |
|
|
|
400 |
|
✗ |
if (av_reallocp(&avctx->hwaccel_context, sizeof(*hwctx))) |
401 |
|
✗ |
return AVERROR(ENOMEM); |
402 |
|
|
|
403 |
|
✗ |
hwctx = avctx->hwaccel_context; |
404 |
|
|
|
405 |
|
✗ |
memset(hwctx, 0, sizeof(*hwctx)); |
406 |
|
✗ |
hwctx->context.decoder = VDP_INVALID_HANDLE; |
407 |
|
✗ |
hwctx->device = device; |
408 |
|
✗ |
hwctx->get_proc_address = get_proc; |
409 |
|
✗ |
hwctx->flags = flags; |
410 |
|
✗ |
hwctx->reset = 1; |
411 |
|
✗ |
return 0; |
412 |
|
|
} |
413 |
|
|
|
414 |
|
|
/* @}*/ |
415 |
|
|
|