Line | Branch | Exec | Source |
---|---|---|---|
1 | /* | ||
2 | * Copyright (c) 2013-2015 Paul B Mahol | ||
3 | * | ||
4 | * This file is part of FFmpeg. | ||
5 | * | ||
6 | * FFmpeg is free software; you can redistribute it and/or | ||
7 | * modify it under the terms of the GNU Lesser General Public | ||
8 | * License as published by the Free Software Foundation; either | ||
9 | * version 2.1 of the License, or (at your option) any later version. | ||
10 | * | ||
11 | * FFmpeg is distributed in the hope that it will be useful, | ||
12 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
13 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
14 | * Lesser General Public License for more details. | ||
15 | * | ||
16 | * You should have received a copy of the GNU Lesser General Public | ||
17 | * License along with FFmpeg; if not, write to the Free Software | ||
18 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
19 | */ | ||
20 | |||
21 | /** | ||
22 | * @file | ||
23 | * fade audio filter | ||
24 | */ | ||
25 | |||
26 | #include "config_components.h" | ||
27 | |||
28 | #include "libavutil/avassert.h" | ||
29 | #include "libavutil/avstring.h" | ||
30 | #include "libavutil/opt.h" | ||
31 | #include "audio.h" | ||
32 | #include "avfilter.h" | ||
33 | #include "filters.h" | ||
34 | |||
35 | typedef struct AudioFadeContext { | ||
36 | const AVClass *class; | ||
37 | int nb_inputs; | ||
38 | int type; | ||
39 | int curve, curve2; | ||
40 | int64_t nb_samples; | ||
41 | int64_t start_sample; | ||
42 | int64_t duration; | ||
43 | int64_t start_time; | ||
44 | double silence; | ||
45 | double unity; | ||
46 | int overlap; | ||
47 | int64_t pts; | ||
48 | int xfade_idx; | ||
49 | |||
50 | void (*fade_samples)(uint8_t **dst, uint8_t * const *src, | ||
51 | int nb_samples, int channels, int direction, | ||
52 | int64_t start, int64_t range, int curve, | ||
53 | double silence, double unity); | ||
54 | void (*scale_samples)(uint8_t **dst, uint8_t * const *src, | ||
55 | int nb_samples, int channels, double unity); | ||
56 | void (*crossfade_samples)(uint8_t **dst, uint8_t * const *cf0, | ||
57 | uint8_t * const *cf1, | ||
58 | int nb_samples, int channels, | ||
59 | int curve0, int curve1); | ||
60 | } AudioFadeContext; | ||
61 | |||
62 | enum CurveType { NONE = -1, TRI, QSIN, ESIN, HSIN, LOG, IPAR, QUA, CUB, SQU, CBR, PAR, EXP, IQSIN, IHSIN, DESE, DESI, LOSI, SINC, ISINC, QUAT, QUATR, QSIN2, HSIN2, NB_CURVES }; | ||
63 | |||
64 | #define OFFSET(x) offsetof(AudioFadeContext, x) | ||
65 | #define FLAGS AV_OPT_FLAG_AUDIO_PARAM|AV_OPT_FLAG_FILTERING_PARAM | ||
66 | #define TFLAGS AV_OPT_FLAG_AUDIO_PARAM|AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_RUNTIME_PARAM | ||
67 | |||
68 | static const enum AVSampleFormat sample_fmts[] = { | ||
69 | AV_SAMPLE_FMT_S16, AV_SAMPLE_FMT_S16P, | ||
70 | AV_SAMPLE_FMT_S32, AV_SAMPLE_FMT_S32P, | ||
71 | AV_SAMPLE_FMT_FLT, AV_SAMPLE_FMT_FLTP, | ||
72 | AV_SAMPLE_FMT_DBL, AV_SAMPLE_FMT_DBLP, | ||
73 | AV_SAMPLE_FMT_NONE | ||
74 | }; | ||
75 | |||
76 | 913952 | static double fade_gain(int curve, int64_t index, int64_t range, double silence, double unity) | |
77 | { | ||
78 | #define CUBE(a) ((a)*(a)*(a)) | ||
79 | double gain; | ||
80 | |||
81 | 913952 | gain = av_clipd(1.0 * index / range, 0, 1.0); | |
82 | |||
83 |
6/24✓ Branch 0 taken 90112 times.
✓ Branch 1 taken 90112 times.
✓ Branch 2 taken 90112 times.
✓ Branch 3 taken 90112 times.
✗ Branch 4 not taken.
✓ Branch 5 taken 266512 times.
✓ Branch 6 taken 286992 times.
✗ Branch 7 not taken.
✗ Branch 8 not taken.
✗ Branch 9 not taken.
✗ Branch 10 not taken.
✗ Branch 11 not taken.
✗ Branch 12 not taken.
✗ Branch 13 not taken.
✗ Branch 14 not taken.
✗ Branch 15 not taken.
✗ Branch 16 not taken.
✗ Branch 17 not taken.
✗ Branch 18 not taken.
✗ Branch 19 not taken.
✗ Branch 20 not taken.
✗ Branch 21 not taken.
✗ Branch 22 not taken.
✗ Branch 23 not taken.
|
913952 | switch (curve) { |
84 | 90112 | case QSIN: | |
85 | 90112 | gain = sin(gain * M_PI / 2.0); | |
86 | 90112 | break; | |
87 | 90112 | case IQSIN: | |
88 | /* 0.6... = 2 / M_PI */ | ||
89 | 90112 | gain = 0.6366197723675814 * asin(gain); | |
90 | 90112 | break; | |
91 | 90112 | case ESIN: | |
92 | 90112 | gain = 1.0 - cos(M_PI / 4.0 * (CUBE(2.0*gain - 1) + 1)); | |
93 | 90112 | break; | |
94 | 90112 | case HSIN: | |
95 | 90112 | gain = (1.0 - cos(gain * M_PI)) / 2.0; | |
96 | 90112 | break; | |
97 | ✗ | case IHSIN: | |
98 | /* 0.3... = 1 / M_PI */ | ||
99 | ✗ | gain = 0.3183098861837907 * acos(1 - 2 * gain); | |
100 | ✗ | break; | |
101 | 266512 | case EXP: | |
102 | /* -11.5... = 5*ln(0.1) */ | ||
103 | 266512 | gain = exp(-11.512925464970227 * (1 - gain)); | |
104 | 266512 | break; | |
105 | 286992 | case LOG: | |
106 | 286992 | gain = av_clipd(1 + 0.2 * log10(gain), 0, 1.0); | |
107 | 286992 | break; | |
108 | ✗ | case PAR: | |
109 | ✗ | gain = 1 - sqrt(1 - gain); | |
110 | ✗ | break; | |
111 | ✗ | case IPAR: | |
112 | ✗ | gain = (1 - (1 - gain) * (1 - gain)); | |
113 | ✗ | break; | |
114 | ✗ | case QUA: | |
115 | ✗ | gain *= gain; | |
116 | ✗ | break; | |
117 | ✗ | case CUB: | |
118 | ✗ | gain = CUBE(gain); | |
119 | ✗ | break; | |
120 | ✗ | case SQU: | |
121 | ✗ | gain = sqrt(gain); | |
122 | ✗ | break; | |
123 | ✗ | case CBR: | |
124 | ✗ | gain = cbrt(gain); | |
125 | ✗ | break; | |
126 | ✗ | case DESE: | |
127 | ✗ | gain = gain <= 0.5 ? cbrt(2 * gain) / 2: 1 - cbrt(2 * (1 - gain)) / 2; | |
128 | ✗ | break; | |
129 | ✗ | case DESI: | |
130 | ✗ | gain = gain <= 0.5 ? CUBE(2 * gain) / 2: 1 - CUBE(2 * (1 - gain)) / 2; | |
131 | ✗ | break; | |
132 | ✗ | case LOSI: { | |
133 | ✗ | const double a = 1. / (1. - 0.787) - 1; | |
134 | ✗ | double A = 1. / (1.0 + exp(0 -((gain-0.5) * a * 2.0))); | |
135 | ✗ | double B = 1. / (1.0 + exp(a)); | |
136 | ✗ | double C = 1. / (1.0 + exp(0-a)); | |
137 | ✗ | gain = (A - B) / (C - B); | |
138 | } | ||
139 | ✗ | break; | |
140 | ✗ | case SINC: | |
141 | ✗ | gain = gain >= 1.0 ? 1.0 : sin(M_PI * (1.0 - gain)) / (M_PI * (1.0 - gain)); | |
142 | ✗ | break; | |
143 | ✗ | case ISINC: | |
144 | ✗ | gain = gain <= 0.0 ? 0.0 : 1.0 - sin(M_PI * gain) / (M_PI * gain); | |
145 | ✗ | break; | |
146 | ✗ | case QUAT: | |
147 | ✗ | gain = gain * gain * gain * gain; | |
148 | ✗ | break; | |
149 | ✗ | case QUATR: | |
150 | ✗ | gain = pow(gain, 0.25); | |
151 | ✗ | break; | |
152 | ✗ | case QSIN2: | |
153 | ✗ | gain = sin(gain * M_PI / 2.0) * sin(gain * M_PI / 2.0); | |
154 | ✗ | break; | |
155 | ✗ | case HSIN2: | |
156 | ✗ | gain = pow((1.0 - cos(gain * M_PI)) / 2.0, 2.0); | |
157 | ✗ | break; | |
158 | ✗ | case NONE: | |
159 | ✗ | gain = 1.0; | |
160 | ✗ | break; | |
161 | } | ||
162 | |||
163 | 913952 | return silence + (unity - silence) * gain; | |
164 | } | ||
165 | |||
166 | #define FADE_PLANAR(name, type) \ | ||
167 | static void fade_samples_## name ##p(uint8_t **dst, uint8_t * const *src, \ | ||
168 | int nb_samples, int channels, int dir, \ | ||
169 | int64_t start, int64_t range,int curve,\ | ||
170 | double silence, double unity) \ | ||
171 | { \ | ||
172 | int i, c; \ | ||
173 | \ | ||
174 | for (i = 0; i < nb_samples; i++) { \ | ||
175 | double gain = fade_gain(curve, start + i * dir,range,silence,unity);\ | ||
176 | for (c = 0; c < channels; c++) { \ | ||
177 | type *d = (type *)dst[c]; \ | ||
178 | const type *s = (type *)src[c]; \ | ||
179 | \ | ||
180 | d[i] = s[i] * gain; \ | ||
181 | } \ | ||
182 | } \ | ||
183 | } | ||
184 | |||
185 | #define FADE(name, type) \ | ||
186 | static void fade_samples_## name (uint8_t **dst, uint8_t * const *src, \ | ||
187 | int nb_samples, int channels, int dir, \ | ||
188 | int64_t start, int64_t range, int curve, \ | ||
189 | double silence, double unity) \ | ||
190 | { \ | ||
191 | type *d = (type *)dst[0]; \ | ||
192 | const type *s = (type *)src[0]; \ | ||
193 | int i, c, k = 0; \ | ||
194 | \ | ||
195 | for (i = 0; i < nb_samples; i++) { \ | ||
196 | double gain = fade_gain(curve, start + i * dir,range,silence,unity);\ | ||
197 | for (c = 0; c < channels; c++, k++) \ | ||
198 | d[k] = s[k] * gain; \ | ||
199 | } \ | ||
200 | } | ||
201 | |||
202 | ✗ | FADE_PLANAR(dbl, double) | |
203 | ✗ | FADE_PLANAR(flt, float) | |
204 | ✗ | FADE_PLANAR(s16, int16_t) | |
205 | ✗ | FADE_PLANAR(s32, int32_t) | |
206 | |||
207 | ✗ | FADE(dbl, double) | |
208 | ✗ | FADE(flt, float) | |
209 |
4/4✓ Branch 1 taken 1122304 times.
✓ Branch 2 taken 561152 times.
✓ Branch 3 taken 561152 times.
✓ Branch 4 taken 137 times.
|
1683593 | FADE(s16, int16_t) |
210 | ✗ | FADE(s32, int32_t) | |
211 | |||
212 | #define SCALE_PLANAR(name, type) \ | ||
213 | static void scale_samples_## name ##p(uint8_t **dst, uint8_t * const *src, \ | ||
214 | int nb_samples, int channels, \ | ||
215 | double gain) \ | ||
216 | { \ | ||
217 | int i, c; \ | ||
218 | \ | ||
219 | for (i = 0; i < nb_samples; i++) { \ | ||
220 | for (c = 0; c < channels; c++) { \ | ||
221 | type *d = (type *)dst[c]; \ | ||
222 | const type *s = (type *)src[c]; \ | ||
223 | \ | ||
224 | d[i] = s[i] * gain; \ | ||
225 | } \ | ||
226 | } \ | ||
227 | } | ||
228 | |||
229 | #define SCALE(name, type) \ | ||
230 | static void scale_samples_## name (uint8_t **dst, uint8_t * const *src, \ | ||
231 | int nb_samples, int channels, double gain)\ | ||
232 | { \ | ||
233 | type *d = (type *)dst[0]; \ | ||
234 | const type *s = (type *)src[0]; \ | ||
235 | int i, c, k = 0; \ | ||
236 | \ | ||
237 | for (i = 0; i < nb_samples; i++) { \ | ||
238 | for (c = 0; c < channels; c++, k++) \ | ||
239 | d[k] = s[k] * gain; \ | ||
240 | } \ | ||
241 | } | ||
242 | |||
243 | ✗ | SCALE_PLANAR(dbl, double) | |
244 | ✗ | SCALE_PLANAR(flt, float) | |
245 | ✗ | SCALE_PLANAR(s16, int16_t) | |
246 | ✗ | SCALE_PLANAR(s32, int32_t) | |
247 | |||
248 | ✗ | SCALE(dbl, double) | |
249 | ✗ | SCALE(flt, float) | |
250 | ✗ | SCALE(s16, int16_t) | |
251 | ✗ | SCALE(s32, int32_t) | |
252 | |||
253 | 7 | static int config_output(AVFilterLink *outlink) | |
254 | { | ||
255 | 7 | AVFilterContext *ctx = outlink->src; | |
256 | 7 | AudioFadeContext *s = ctx->priv; | |
257 | |||
258 |
1/9✗ Branch 0 not taken.
✗ Branch 1 not taken.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
✓ Branch 4 taken 7 times.
✗ Branch 5 not taken.
✗ Branch 6 not taken.
✗ Branch 7 not taken.
✗ Branch 8 not taken.
|
7 | switch (outlink->format) { |
259 | ✗ | case AV_SAMPLE_FMT_DBL: s->fade_samples = fade_samples_dbl; | |
260 | ✗ | s->scale_samples = scale_samples_dbl; | |
261 | ✗ | break; | |
262 | ✗ | case AV_SAMPLE_FMT_DBLP: s->fade_samples = fade_samples_dblp; | |
263 | ✗ | s->scale_samples = scale_samples_dblp; | |
264 | ✗ | break; | |
265 | ✗ | case AV_SAMPLE_FMT_FLT: s->fade_samples = fade_samples_flt; | |
266 | ✗ | s->scale_samples = scale_samples_flt; | |
267 | ✗ | break; | |
268 | ✗ | case AV_SAMPLE_FMT_FLTP: s->fade_samples = fade_samples_fltp; | |
269 | ✗ | s->scale_samples = scale_samples_fltp; | |
270 | ✗ | break; | |
271 | 7 | case AV_SAMPLE_FMT_S16: s->fade_samples = fade_samples_s16; | |
272 | 7 | s->scale_samples = scale_samples_s16; | |
273 | 7 | break; | |
274 | ✗ | case AV_SAMPLE_FMT_S16P: s->fade_samples = fade_samples_s16p; | |
275 | ✗ | s->scale_samples = scale_samples_s16p; | |
276 | ✗ | break; | |
277 | ✗ | case AV_SAMPLE_FMT_S32: s->fade_samples = fade_samples_s32; | |
278 | ✗ | s->scale_samples = scale_samples_s32; | |
279 | ✗ | break; | |
280 | ✗ | case AV_SAMPLE_FMT_S32P: s->fade_samples = fade_samples_s32p; | |
281 | ✗ | s->scale_samples = scale_samples_s32p; | |
282 | ✗ | break; | |
283 | } | ||
284 | |||
285 |
1/2✓ Branch 0 taken 7 times.
✗ Branch 1 not taken.
|
7 | if (s->duration) |
286 | 7 | s->nb_samples = av_rescale(s->duration, outlink->sample_rate, AV_TIME_BASE); | |
287 | 7 | s->duration = 0; | |
288 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 7 times.
|
7 | if (s->start_time) |
289 | ✗ | s->start_sample = av_rescale(s->start_time, outlink->sample_rate, AV_TIME_BASE); | |
290 | 7 | s->start_time = 0; | |
291 | |||
292 | 7 | return 0; | |
293 | } | ||
294 | |||
295 | #if CONFIG_AFADE_FILTER | ||
296 | |||
297 | static const AVOption afade_options[] = { | ||
298 | { "type", "set the fade direction", OFFSET(type), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 1, TFLAGS, .unit = "type" }, | ||
299 | { "t", "set the fade direction", OFFSET(type), AV_OPT_TYPE_INT, {.i64 = 0 }, 0, 1, TFLAGS, .unit = "type" }, | ||
300 | { "in", "fade-in", 0, AV_OPT_TYPE_CONST, {.i64 = 0 }, 0, 0, TFLAGS, .unit = "type" }, | ||
301 | { "out", "fade-out", 0, AV_OPT_TYPE_CONST, {.i64 = 1 }, 0, 0, TFLAGS, .unit = "type" }, | ||
302 | { "start_sample", "set number of first sample to start fading", OFFSET(start_sample), AV_OPT_TYPE_INT64, {.i64 = 0 }, 0, INT64_MAX, TFLAGS }, | ||
303 | { "ss", "set number of first sample to start fading", OFFSET(start_sample), AV_OPT_TYPE_INT64, {.i64 = 0 }, 0, INT64_MAX, TFLAGS }, | ||
304 | { "nb_samples", "set number of samples for fade duration", OFFSET(nb_samples), AV_OPT_TYPE_INT64, {.i64 = 44100}, 1, INT64_MAX, TFLAGS }, | ||
305 | { "ns", "set number of samples for fade duration", OFFSET(nb_samples), AV_OPT_TYPE_INT64, {.i64 = 44100}, 1, INT64_MAX, TFLAGS }, | ||
306 | { "start_time", "set time to start fading", OFFSET(start_time), AV_OPT_TYPE_DURATION, {.i64 = 0 }, 0, INT64_MAX, TFLAGS }, | ||
307 | { "st", "set time to start fading", OFFSET(start_time), AV_OPT_TYPE_DURATION, {.i64 = 0 }, 0, INT64_MAX, TFLAGS }, | ||
308 | { "duration", "set fade duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = 0 }, 0, INT64_MAX, TFLAGS }, | ||
309 | { "d", "set fade duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = 0 }, 0, INT64_MAX, TFLAGS }, | ||
310 | { "curve", "set fade curve type", OFFSET(curve), AV_OPT_TYPE_INT, {.i64 = TRI }, NONE, NB_CURVES - 1, TFLAGS, .unit = "curve" }, | ||
311 | { "c", "set fade curve type", OFFSET(curve), AV_OPT_TYPE_INT, {.i64 = TRI }, NONE, NB_CURVES - 1, TFLAGS, .unit = "curve" }, | ||
312 | { "nofade", "no fade; keep audio as-is", 0, AV_OPT_TYPE_CONST, {.i64 = NONE }, 0, 0, TFLAGS, .unit = "curve" }, | ||
313 | { "tri", "linear slope", 0, AV_OPT_TYPE_CONST, {.i64 = TRI }, 0, 0, TFLAGS, .unit = "curve" }, | ||
314 | { "qsin", "quarter of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = QSIN }, 0, 0, TFLAGS, .unit = "curve" }, | ||
315 | { "esin", "exponential sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = ESIN }, 0, 0, TFLAGS, .unit = "curve" }, | ||
316 | { "hsin", "half of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = HSIN }, 0, 0, TFLAGS, .unit = "curve" }, | ||
317 | { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64 = LOG }, 0, 0, TFLAGS, .unit = "curve" }, | ||
318 | { "ipar", "inverted parabola", 0, AV_OPT_TYPE_CONST, {.i64 = IPAR }, 0, 0, TFLAGS, .unit = "curve" }, | ||
319 | { "qua", "quadratic", 0, AV_OPT_TYPE_CONST, {.i64 = QUA }, 0, 0, TFLAGS, .unit = "curve" }, | ||
320 | { "cub", "cubic", 0, AV_OPT_TYPE_CONST, {.i64 = CUB }, 0, 0, TFLAGS, .unit = "curve" }, | ||
321 | { "squ", "square root", 0, AV_OPT_TYPE_CONST, {.i64 = SQU }, 0, 0, TFLAGS, .unit = "curve" }, | ||
322 | { "cbr", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64 = CBR }, 0, 0, TFLAGS, .unit = "curve" }, | ||
323 | { "par", "parabola", 0, AV_OPT_TYPE_CONST, {.i64 = PAR }, 0, 0, TFLAGS, .unit = "curve" }, | ||
324 | { "exp", "exponential", 0, AV_OPT_TYPE_CONST, {.i64 = EXP }, 0, 0, TFLAGS, .unit = "curve" }, | ||
325 | { "iqsin", "inverted quarter of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = IQSIN}, 0, 0, TFLAGS, .unit = "curve" }, | ||
326 | { "ihsin", "inverted half of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = IHSIN}, 0, 0, TFLAGS, .unit = "curve" }, | ||
327 | { "dese", "double-exponential seat", 0, AV_OPT_TYPE_CONST, {.i64 = DESE }, 0, 0, TFLAGS, .unit = "curve" }, | ||
328 | { "desi", "double-exponential sigmoid", 0, AV_OPT_TYPE_CONST, {.i64 = DESI }, 0, 0, TFLAGS, .unit = "curve" }, | ||
329 | { "losi", "logistic sigmoid", 0, AV_OPT_TYPE_CONST, {.i64 = LOSI }, 0, 0, TFLAGS, .unit = "curve" }, | ||
330 | { "sinc", "sine cardinal function", 0, AV_OPT_TYPE_CONST, {.i64 = SINC }, 0, 0, TFLAGS, .unit = "curve" }, | ||
331 | { "isinc", "inverted sine cardinal function", 0, AV_OPT_TYPE_CONST, {.i64 = ISINC}, 0, 0, TFLAGS, .unit = "curve" }, | ||
332 | { "quat", "quartic", 0, AV_OPT_TYPE_CONST, {.i64 = QUAT }, 0, 0, TFLAGS, .unit = "curve" }, | ||
333 | { "quatr", "quartic root", 0, AV_OPT_TYPE_CONST, {.i64 = QUATR}, 0, 0, TFLAGS, .unit = "curve" }, | ||
334 | { "qsin2", "squared quarter of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = QSIN2}, 0, 0, TFLAGS, .unit = "curve" }, | ||
335 | { "hsin2", "squared half of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = HSIN2}, 0, 0, TFLAGS, .unit = "curve" }, | ||
336 | { "silence", "set the silence gain", OFFSET(silence), AV_OPT_TYPE_DOUBLE, {.dbl = 0 }, 0, 1, TFLAGS }, | ||
337 | { "unity", "set the unity gain", OFFSET(unity), AV_OPT_TYPE_DOUBLE, {.dbl = 1 }, 0, 1, TFLAGS }, | ||
338 | { NULL } | ||
339 | }; | ||
340 | |||
341 | AVFILTER_DEFINE_CLASS(afade); | ||
342 | |||
343 | 12 | static av_cold int init(AVFilterContext *ctx) | |
344 | { | ||
345 | 12 | AudioFadeContext *s = ctx->priv; | |
346 | |||
347 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 12 times.
|
12 | if (INT64_MAX - s->nb_samples < s->start_sample) |
348 | ✗ | return AVERROR(EINVAL); | |
349 | |||
350 | 12 | return 0; | |
351 | } | ||
352 | |||
353 | 390 | static int filter_frame(AVFilterLink *inlink, AVFrame *buf) | |
354 | { | ||
355 | 390 | AudioFadeContext *s = inlink->dst->priv; | |
356 | 390 | AVFilterLink *outlink = inlink->dst->outputs[0]; | |
357 | 390 | int nb_samples = buf->nb_samples; | |
358 | AVFrame *out_buf; | ||
359 | 390 | int64_t cur_sample = av_rescale_q(buf->pts, inlink->time_base, (AVRational){1, inlink->sample_rate}); | |
360 | |||
361 |
1/2✓ Branch 0 taken 390 times.
✗ Branch 1 not taken.
|
390 | if (s->unity == 1.0 && |
362 |
3/4✓ Branch 0 taken 390 times.
✗ Branch 1 not taken.
✓ Branch 2 taken 137 times.
✓ Branch 3 taken 253 times.
|
390 | ((!s->type && (s->start_sample + s->nb_samples < cur_sample)) || |
363 |
1/4✗ Branch 0 not taken.
✓ Branch 1 taken 137 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
|
137 | ( s->type && (cur_sample + nb_samples < s->start_sample)))) |
364 | 253 | return ff_filter_frame(outlink, buf); | |
365 | |||
366 |
1/2✓ Branch 1 taken 137 times.
✗ Branch 2 not taken.
|
137 | if (av_frame_is_writable(buf)) { |
367 | 137 | out_buf = buf; | |
368 | } else { | ||
369 | ✗ | out_buf = ff_get_audio_buffer(outlink, nb_samples); | |
370 | ✗ | if (!out_buf) | |
371 | ✗ | return AVERROR(ENOMEM); | |
372 | ✗ | av_frame_copy_props(out_buf, buf); | |
373 | } | ||
374 | |||
375 |
2/4✓ Branch 0 taken 137 times.
✗ Branch 1 not taken.
✓ Branch 2 taken 137 times.
✗ Branch 3 not taken.
|
137 | if ((!s->type && (cur_sample + nb_samples < s->start_sample)) || |
376 |
1/4✗ Branch 0 not taken.
✓ Branch 1 taken 137 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
|
137 | ( s->type && (s->start_sample + s->nb_samples < cur_sample))) { |
377 | ✗ | if (s->silence == 0.) { | |
378 | ✗ | av_samples_set_silence(out_buf->extended_data, 0, nb_samples, | |
379 | ✗ | out_buf->ch_layout.nb_channels, out_buf->format); | |
380 | } else { | ||
381 | ✗ | s->scale_samples(out_buf->extended_data, buf->extended_data, | |
382 | ✗ | nb_samples, buf->ch_layout.nb_channels, | |
383 | s->silence); | ||
384 | } | ||
385 |
1/4✗ Branch 0 not taken.
✓ Branch 1 taken 137 times.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
|
137 | } else if (( s->type && (cur_sample + nb_samples < s->start_sample)) || |
386 |
2/4✓ Branch 0 taken 137 times.
✗ Branch 1 not taken.
✗ Branch 2 not taken.
✓ Branch 3 taken 137 times.
|
137 | (!s->type && (s->start_sample + s->nb_samples < cur_sample))) { |
387 | ✗ | s->scale_samples(out_buf->extended_data, buf->extended_data, | |
388 | ✗ | nb_samples, buf->ch_layout.nb_channels, | |
389 | s->unity); | ||
390 | } else { | ||
391 | int64_t start; | ||
392 | |||
393 |
1/2✓ Branch 0 taken 137 times.
✗ Branch 1 not taken.
|
137 | if (!s->type) |
394 | 137 | start = cur_sample - s->start_sample; | |
395 | else | ||
396 | ✗ | start = s->start_sample + s->nb_samples - cur_sample; | |
397 | |||
398 | 137 | s->fade_samples(out_buf->extended_data, buf->extended_data, | |
399 | 137 | nb_samples, buf->ch_layout.nb_channels, | |
400 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 137 times.
|
137 | s->type ? -1 : 1, start, |
401 | s->nb_samples, s->curve, s->silence, s->unity); | ||
402 | } | ||
403 | |||
404 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 137 times.
|
137 | if (buf != out_buf) |
405 | ✗ | av_frame_free(&buf); | |
406 | |||
407 | 137 | return ff_filter_frame(outlink, out_buf); | |
408 | } | ||
409 | |||
410 | ✗ | static int process_command(AVFilterContext *ctx, const char *cmd, const char *args, | |
411 | char *res, int res_len, int flags) | ||
412 | { | ||
413 | int ret; | ||
414 | |||
415 | ✗ | ret = ff_filter_process_command(ctx, cmd, args, res, res_len, flags); | |
416 | ✗ | if (ret < 0) | |
417 | ✗ | return ret; | |
418 | |||
419 | ✗ | return config_output(ctx->outputs[0]); | |
420 | } | ||
421 | |||
422 | static const AVFilterPad avfilter_af_afade_inputs[] = { | ||
423 | { | ||
424 | .name = "default", | ||
425 | .type = AVMEDIA_TYPE_AUDIO, | ||
426 | .filter_frame = filter_frame, | ||
427 | }, | ||
428 | }; | ||
429 | |||
430 | static const AVFilterPad avfilter_af_afade_outputs[] = { | ||
431 | { | ||
432 | .name = "default", | ||
433 | .type = AVMEDIA_TYPE_AUDIO, | ||
434 | .config_props = config_output, | ||
435 | }, | ||
436 | }; | ||
437 | |||
438 | const FFFilter ff_af_afade = { | ||
439 | .p.name = "afade", | ||
440 | .p.description = NULL_IF_CONFIG_SMALL("Fade in/out input audio."), | ||
441 | .p.priv_class = &afade_class, | ||
442 | .p.flags = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC, | ||
443 | .priv_size = sizeof(AudioFadeContext), | ||
444 | .init = init, | ||
445 | FILTER_INPUTS(avfilter_af_afade_inputs), | ||
446 | FILTER_OUTPUTS(avfilter_af_afade_outputs), | ||
447 | FILTER_SAMPLEFMTS_ARRAY(sample_fmts), | ||
448 | .process_command = process_command, | ||
449 | }; | ||
450 | |||
451 | #endif /* CONFIG_AFADE_FILTER */ | ||
452 | |||
453 | #if CONFIG_ACROSSFADE_FILTER | ||
454 | |||
455 | static const AVOption acrossfade_options[] = { | ||
456 | { "inputs", "set number of input files to cross fade", OFFSET(nb_inputs), AV_OPT_TYPE_INT, {.i64 = 2}, 1, INT32_MAX, FLAGS }, | ||
457 | { "n", "set number of input files to cross fade", OFFSET(nb_inputs), AV_OPT_TYPE_INT, {.i64 = 2}, 1, INT32_MAX, FLAGS }, | ||
458 | { "nb_samples", "set number of samples for cross fade duration", OFFSET(nb_samples), AV_OPT_TYPE_INT64, {.i64 = 44100}, 1, INT32_MAX/10, FLAGS }, | ||
459 | { "ns", "set number of samples for cross fade duration", OFFSET(nb_samples), AV_OPT_TYPE_INT64, {.i64 = 44100}, 1, INT32_MAX/10, FLAGS }, | ||
460 | { "duration", "set cross fade duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = 0 }, 0, 60000000, FLAGS }, | ||
461 | { "d", "set cross fade duration", OFFSET(duration), AV_OPT_TYPE_DURATION, {.i64 = 0 }, 0, 60000000, FLAGS }, | ||
462 | { "overlap", "overlap 1st stream end with 2nd stream start", OFFSET(overlap), AV_OPT_TYPE_BOOL, {.i64 = 1 }, 0, 1, FLAGS }, | ||
463 | { "o", "overlap 1st stream end with 2nd stream start", OFFSET(overlap), AV_OPT_TYPE_BOOL, {.i64 = 1 }, 0, 1, FLAGS }, | ||
464 | { "curve1", "set fade curve type for 1st stream", OFFSET(curve), AV_OPT_TYPE_INT, {.i64 = TRI }, NONE, NB_CURVES - 1, FLAGS, .unit = "curve" }, | ||
465 | { "c1", "set fade curve type for 1st stream", OFFSET(curve), AV_OPT_TYPE_INT, {.i64 = TRI }, NONE, NB_CURVES - 1, FLAGS, .unit = "curve" }, | ||
466 | { "nofade", "no fade; keep audio as-is", 0, AV_OPT_TYPE_CONST, {.i64 = NONE }, 0, 0, FLAGS, .unit = "curve" }, | ||
467 | { "tri", "linear slope", 0, AV_OPT_TYPE_CONST, {.i64 = TRI }, 0, 0, FLAGS, .unit = "curve" }, | ||
468 | { "qsin", "quarter of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = QSIN }, 0, 0, FLAGS, .unit = "curve" }, | ||
469 | { "esin", "exponential sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = ESIN }, 0, 0, FLAGS, .unit = "curve" }, | ||
470 | { "hsin", "half of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = HSIN }, 0, 0, FLAGS, .unit = "curve" }, | ||
471 | { "log", "logarithmic", 0, AV_OPT_TYPE_CONST, {.i64 = LOG }, 0, 0, FLAGS, .unit = "curve" }, | ||
472 | { "ipar", "inverted parabola", 0, AV_OPT_TYPE_CONST, {.i64 = IPAR }, 0, 0, FLAGS, .unit = "curve" }, | ||
473 | { "qua", "quadratic", 0, AV_OPT_TYPE_CONST, {.i64 = QUA }, 0, 0, FLAGS, .unit = "curve" }, | ||
474 | { "cub", "cubic", 0, AV_OPT_TYPE_CONST, {.i64 = CUB }, 0, 0, FLAGS, .unit = "curve" }, | ||
475 | { "squ", "square root", 0, AV_OPT_TYPE_CONST, {.i64 = SQU }, 0, 0, FLAGS, .unit = "curve" }, | ||
476 | { "cbr", "cubic root", 0, AV_OPT_TYPE_CONST, {.i64 = CBR }, 0, 0, FLAGS, .unit = "curve" }, | ||
477 | { "par", "parabola", 0, AV_OPT_TYPE_CONST, {.i64 = PAR }, 0, 0, FLAGS, .unit = "curve" }, | ||
478 | { "exp", "exponential", 0, AV_OPT_TYPE_CONST, {.i64 = EXP }, 0, 0, FLAGS, .unit = "curve" }, | ||
479 | { "iqsin", "inverted quarter of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = IQSIN}, 0, 0, FLAGS, .unit = "curve" }, | ||
480 | { "ihsin", "inverted half of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = IHSIN}, 0, 0, FLAGS, .unit = "curve" }, | ||
481 | { "dese", "double-exponential seat", 0, AV_OPT_TYPE_CONST, {.i64 = DESE }, 0, 0, FLAGS, .unit = "curve" }, | ||
482 | { "desi", "double-exponential sigmoid", 0, AV_OPT_TYPE_CONST, {.i64 = DESI }, 0, 0, FLAGS, .unit = "curve" }, | ||
483 | { "losi", "logistic sigmoid", 0, AV_OPT_TYPE_CONST, {.i64 = LOSI }, 0, 0, FLAGS, .unit = "curve" }, | ||
484 | { "sinc", "sine cardinal function", 0, AV_OPT_TYPE_CONST, {.i64 = SINC }, 0, 0, FLAGS, .unit = "curve" }, | ||
485 | { "isinc", "inverted sine cardinal function", 0, AV_OPT_TYPE_CONST, {.i64 = ISINC}, 0, 0, FLAGS, .unit = "curve" }, | ||
486 | { "quat", "quartic", 0, AV_OPT_TYPE_CONST, {.i64 = QUAT }, 0, 0, FLAGS, .unit = "curve" }, | ||
487 | { "quatr", "quartic root", 0, AV_OPT_TYPE_CONST, {.i64 = QUATR}, 0, 0, FLAGS, .unit = "curve" }, | ||
488 | { "qsin2", "squared quarter of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = QSIN2}, 0, 0, FLAGS, .unit = "curve" }, | ||
489 | { "hsin2", "squared half of sine wave", 0, AV_OPT_TYPE_CONST, {.i64 = HSIN2}, 0, 0, FLAGS, .unit = "curve" }, | ||
490 | { "curve2", "set fade curve type for 2nd stream", OFFSET(curve2), AV_OPT_TYPE_INT, {.i64 = TRI }, NONE, NB_CURVES - 1, FLAGS, .unit = "curve" }, | ||
491 | { "c2", "set fade curve type for 2nd stream", OFFSET(curve2), AV_OPT_TYPE_INT, {.i64 = TRI }, NONE, NB_CURVES - 1, FLAGS, .unit = "curve" }, | ||
492 | { NULL } | ||
493 | }; | ||
494 | |||
495 | AVFILTER_DEFINE_CLASS(acrossfade); | ||
496 | |||
497 | #define CROSSFADE_PLANAR(name, type) \ | ||
498 | static void crossfade_samples_## name ##p(uint8_t **dst, uint8_t * const *cf0, \ | ||
499 | uint8_t * const *cf1, \ | ||
500 | int nb_samples, int channels, \ | ||
501 | int curve0, int curve1) \ | ||
502 | { \ | ||
503 | int i, c; \ | ||
504 | \ | ||
505 | for (i = 0; i < nb_samples; i++) { \ | ||
506 | double gain0 = fade_gain(curve0, nb_samples - 1 - i, nb_samples,0.,1.);\ | ||
507 | double gain1 = fade_gain(curve1, i, nb_samples, 0., 1.); \ | ||
508 | for (c = 0; c < channels; c++) { \ | ||
509 | type *d = (type *)dst[c]; \ | ||
510 | const type *s0 = (type *)cf0[c]; \ | ||
511 | const type *s1 = (type *)cf1[c]; \ | ||
512 | \ | ||
513 | d[i] = s0[i] * gain0 + s1[i] * gain1; \ | ||
514 | } \ | ||
515 | } \ | ||
516 | } | ||
517 | |||
518 | #define CROSSFADE(name, type) \ | ||
519 | static void crossfade_samples_## name (uint8_t **dst, uint8_t * const *cf0, \ | ||
520 | uint8_t * const *cf1, \ | ||
521 | int nb_samples, int channels, \ | ||
522 | int curve0, int curve1) \ | ||
523 | { \ | ||
524 | type *d = (type *)dst[0]; \ | ||
525 | const type *s0 = (type *)cf0[0]; \ | ||
526 | const type *s1 = (type *)cf1[0]; \ | ||
527 | int i, c, k = 0; \ | ||
528 | \ | ||
529 | for (i = 0; i < nb_samples; i++) { \ | ||
530 | double gain0 = fade_gain(curve0, nb_samples - 1-i,nb_samples,0.,1.);\ | ||
531 | double gain1 = fade_gain(curve1, i, nb_samples, 0., 1.); \ | ||
532 | for (c = 0; c < channels; c++, k++) \ | ||
533 | d[k] = s0[k] * gain0 + s1[k] * gain1; \ | ||
534 | } \ | ||
535 | } | ||
536 | |||
537 | ✗ | CROSSFADE_PLANAR(dbl, double) | |
538 | ✗ | CROSSFADE_PLANAR(flt, float) | |
539 | ✗ | CROSSFADE_PLANAR(s16, int16_t) | |
540 | ✗ | CROSSFADE_PLANAR(s32, int32_t) | |
541 | |||
542 | ✗ | CROSSFADE(dbl, double) | |
543 | ✗ | CROSSFADE(flt, float) | |
544 |
4/4✓ Branch 2 taken 352800 times.
✓ Branch 3 taken 176400 times.
✓ Branch 4 taken 176400 times.
✓ Branch 5 taken 2 times.
|
529202 | CROSSFADE(s16, int16_t) |
545 | ✗ | CROSSFADE(s32, int32_t) | |
546 | |||
547 | 104 | static int pass_frame(AVFilterLink *inlink, AVFilterLink *outlink, int64_t *pts) | |
548 | { | ||
549 | AVFrame *in; | ||
550 | 104 | int ret = ff_inlink_consume_frame(inlink, &in); | |
551 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 104 times.
|
104 | if (ret < 0) |
552 | ✗ | return ret; | |
553 | av_assert1(ret); | ||
554 | 104 | in->pts = *pts; | |
555 | 104 | *pts += av_rescale_q(in->nb_samples, | |
556 | 104 | (AVRational){ 1, outlink->sample_rate }, outlink->time_base); | |
557 | 104 | return ff_filter_frame(outlink, in); | |
558 | } | ||
559 | |||
560 | 2 | static int pass_samples(AVFilterLink *inlink, AVFilterLink *outlink, unsigned nb_samples, int64_t *pts) | |
561 | { | ||
562 | AVFrame *in; | ||
563 | 2 | int ret = ff_inlink_consume_samples(inlink, nb_samples, nb_samples, &in); | |
564 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 2 times.
|
2 | if (ret < 0) |
565 | ✗ | return ret; | |
566 | av_assert1(ret); | ||
567 | 2 | in->pts = *pts; | |
568 | 2 | *pts += av_rescale_q(in->nb_samples, | |
569 | 2 | (AVRational){ 1, outlink->sample_rate }, outlink->time_base); | |
570 | 2 | return ff_filter_frame(outlink, in); | |
571 | } | ||
572 | |||
573 | 2 | static int pass_crossfade(AVFilterContext *ctx, const int idx0, const int idx1) | |
574 | { | ||
575 | 2 | AudioFadeContext *s = ctx->priv; | |
576 | 2 | AVFilterLink *outlink = ctx->outputs[0]; | |
577 | 2 | AVFrame *out, *cf[2] = { NULL }; | |
578 | int ret; | ||
579 | |||
580 | 2 | AVFilterLink *in0 = ctx->inputs[idx0]; | |
581 | 2 | AVFilterLink *in1 = ctx->inputs[idx1]; | |
582 | 2 | int queued_samples0 = ff_inlink_queued_samples(in0); | |
583 | 2 | int queued_samples1 = ff_inlink_queued_samples(in1); | |
584 | |||
585 | /* Limit to the relevant region */ | ||
586 | av_assert1(queued_samples0 <= s->nb_samples); | ||
587 |
1/4✗ Branch 1 not taken.
✓ Branch 2 taken 2 times.
✗ Branch 3 not taken.
✗ Branch 4 not taken.
|
2 | if (ff_outlink_get_status(in1) && idx1 < s->nb_inputs - 1) |
588 | ✗ | queued_samples1 /= 2; /* reserve second half for next fade-out */ | |
589 | 2 | queued_samples1 = FFMIN(queued_samples1, s->nb_samples); | |
590 | |||
591 |
1/2✓ Branch 0 taken 2 times.
✗ Branch 1 not taken.
|
2 | if (s->overlap) { |
592 | 2 | int nb_samples = FFMIN(queued_samples0, queued_samples1); | |
593 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 2 times.
|
2 | if (nb_samples < s->nb_samples) { |
594 | ✗ | av_log(ctx, AV_LOG_WARNING, "Input %d duration (%d samples) " | |
595 | "is shorter than crossfade duration (%"PRId64" samples), " | ||
596 | "crossfade will be shorter by %"PRId64" samples.\n", | ||
597 | queued_samples0 <= queued_samples1 ? idx0 : idx1, | ||
598 | ✗ | nb_samples, s->nb_samples, s->nb_samples - nb_samples); | |
599 | |||
600 | ✗ | if (queued_samples0 > nb_samples) { | |
601 | ✗ | ret = pass_samples(in0, outlink, queued_samples0 - nb_samples, &s->pts); | |
602 | ✗ | if (ret < 0) | |
603 | ✗ | return ret; | |
604 | } | ||
605 | |||
606 | ✗ | if (!nb_samples) | |
607 | ✗ | return 0; /* either input was completely empty */ | |
608 | } | ||
609 | |||
610 | av_assert1(nb_samples > 0); | ||
611 | 2 | out = ff_get_audio_buffer(outlink, nb_samples); | |
612 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 2 times.
|
2 | if (!out) |
613 | ✗ | return AVERROR(ENOMEM); | |
614 | |||
615 | 2 | ret = ff_inlink_consume_samples(in0, nb_samples, nb_samples, &cf[0]); | |
616 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 2 times.
|
2 | if (ret < 0) { |
617 | ✗ | av_frame_free(&out); | |
618 | ✗ | return ret; | |
619 | } | ||
620 | |||
621 | 2 | ret = ff_inlink_consume_samples(in1, nb_samples, nb_samples, &cf[1]); | |
622 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 2 times.
|
2 | if (ret < 0) { |
623 | ✗ | av_frame_free(&cf[0]); | |
624 | ✗ | av_frame_free(&out); | |
625 | ✗ | return ret; | |
626 | } | ||
627 | |||
628 | 2 | s->crossfade_samples(out->extended_data, cf[0]->extended_data, | |
629 | 2 | cf[1]->extended_data, nb_samples, | |
630 | 2 | out->ch_layout.nb_channels, s->curve, s->curve2); | |
631 | 2 | out->pts = s->pts; | |
632 | 2 | s->pts += av_rescale_q(nb_samples, | |
633 | 2 | (AVRational){ 1, outlink->sample_rate }, outlink->time_base); | |
634 | 2 | av_frame_free(&cf[0]); | |
635 | 2 | av_frame_free(&cf[1]); | |
636 | 2 | return ff_filter_frame(outlink, out); | |
637 | } else { | ||
638 | ✗ | if (queued_samples0 < s->nb_samples) { | |
639 | ✗ | av_log(ctx, AV_LOG_WARNING, "Input %d duration (%d samples) " | |
640 | "is shorter than crossfade duration (%"PRId64" samples), " | ||
641 | "fade-out will be shorter by %"PRId64" samples.\n", | ||
642 | idx0, queued_samples0, s->nb_samples, | ||
643 | ✗ | s->nb_samples - queued_samples0); | |
644 | ✗ | if (!queued_samples0) | |
645 | ✗ | goto fade_in; | |
646 | } | ||
647 | |||
648 | ✗ | out = ff_get_audio_buffer(outlink, queued_samples0); | |
649 | ✗ | if (!out) | |
650 | ✗ | return AVERROR(ENOMEM); | |
651 | |||
652 | ✗ | ret = ff_inlink_consume_samples(in0, queued_samples0, queued_samples0, &cf[0]); | |
653 | ✗ | if (ret < 0) { | |
654 | ✗ | av_frame_free(&out); | |
655 | ✗ | return ret; | |
656 | } | ||
657 | |||
658 | ✗ | s->fade_samples(out->extended_data, cf[0]->extended_data, cf[0]->nb_samples, | |
659 | ✗ | outlink->ch_layout.nb_channels, -1, cf[0]->nb_samples - 1, cf[0]->nb_samples, s->curve, 0., 1.); | |
660 | ✗ | out->pts = s->pts; | |
661 | ✗ | s->pts += av_rescale_q(cf[0]->nb_samples, | |
662 | ✗ | (AVRational){ 1, outlink->sample_rate }, outlink->time_base); | |
663 | ✗ | av_frame_free(&cf[0]); | |
664 | ✗ | ret = ff_filter_frame(outlink, out); | |
665 | ✗ | if (ret < 0) | |
666 | ✗ | return ret; | |
667 | |||
668 | ✗ | fade_in: | |
669 | ✗ | if (queued_samples1 < s->nb_samples) { | |
670 | ✗ | av_log(ctx, AV_LOG_WARNING, "Input %d duration (%d samples) " | |
671 | "is shorter than crossfade duration (%"PRId64" samples), " | ||
672 | "fade-in will be shorter by %"PRId64" samples.\n", | ||
673 | idx1, ff_inlink_queued_samples(in1), s->nb_samples, | ||
674 | ✗ | s->nb_samples - queued_samples1); | |
675 | ✗ | if (!queued_samples1) | |
676 | ✗ | return 0; | |
677 | } | ||
678 | |||
679 | ✗ | out = ff_get_audio_buffer(outlink, queued_samples1); | |
680 | ✗ | if (!out) | |
681 | ✗ | return AVERROR(ENOMEM); | |
682 | |||
683 | ✗ | ret = ff_inlink_consume_samples(in1, queued_samples1, queued_samples1, &cf[1]); | |
684 | ✗ | if (ret < 0) { | |
685 | ✗ | av_frame_free(&out); | |
686 | ✗ | return ret; | |
687 | } | ||
688 | |||
689 | ✗ | s->fade_samples(out->extended_data, cf[1]->extended_data, cf[1]->nb_samples, | |
690 | ✗ | outlink->ch_layout.nb_channels, 1, 0, cf[1]->nb_samples, s->curve2, 0., 1.); | |
691 | ✗ | out->pts = s->pts; | |
692 | ✗ | s->pts += av_rescale_q(cf[1]->nb_samples, | |
693 | ✗ | (AVRational){ 1, outlink->sample_rate }, outlink->time_base); | |
694 | ✗ | av_frame_free(&cf[1]); | |
695 | ✗ | return ff_filter_frame(outlink, out); | |
696 | } | ||
697 | } | ||
698 | |||
699 | 382 | static int activate(AVFilterContext *ctx) | |
700 | { | ||
701 | 382 | AudioFadeContext *s = ctx->priv; | |
702 | 382 | const int idx0 = s->xfade_idx; | |
703 | 382 | const int idx1 = s->xfade_idx + 1; | |
704 | 382 | AVFilterLink *outlink = ctx->outputs[0]; | |
705 | 382 | AVFilterLink *in0 = ctx->inputs[idx0]; | |
706 | |||
707 |
4/4✓ Branch 1 taken 1 times.
✓ Branch 2 taken 381 times.
✓ Branch 4 taken 3 times.
✓ Branch 5 taken 1 times.
|
385 | FF_FILTER_FORWARD_STATUS_BACK_ALL(outlink, ctx); |
708 | |||
709 |
2/2✓ Branch 0 taken 5 times.
✓ Branch 1 taken 376 times.
|
381 | if (idx0 == s->nb_inputs - 1) { |
710 | /* Last active input, read until EOF */ | ||
711 |
2/2✓ Branch 1 taken 2 times.
✓ Branch 2 taken 3 times.
|
5 | if (ff_inlink_queued_frames(in0)) |
712 | 2 | return pass_frame(in0, outlink, &s->pts); | |
713 |
2/2✓ Branch 1 taken 1 times.
✓ Branch 2 taken 2 times.
|
3 | FF_FILTER_FORWARD_STATUS(in0, outlink); |
714 |
1/2✓ Branch 1 taken 2 times.
✗ Branch 2 not taken.
|
2 | FF_FILTER_FORWARD_WANTED(outlink, in0); |
715 | ✗ | return FFERROR_NOT_READY; | |
716 | } | ||
717 | |||
718 | 376 | AVFilterLink *in1 = ctx->inputs[idx1]; | |
719 | 376 | int queued_samples0 = ff_inlink_queued_samples(in0); | |
720 |
2/2✓ Branch 0 taken 210 times.
✓ Branch 1 taken 166 times.
|
376 | if (queued_samples0 > s->nb_samples) { |
721 | 210 | AVFrame *frame = ff_inlink_peek_frame(in0, 0); | |
722 |
2/2✓ Branch 0 taken 102 times.
✓ Branch 1 taken 108 times.
|
210 | if (queued_samples0 - s->nb_samples >= frame->nb_samples) |
723 | 102 | return pass_frame(in0, outlink, &s->pts); | |
724 | } | ||
725 | |||
726 | /* Continue reading until EOF */ | ||
727 |
2/2✓ Branch 1 taken 122 times.
✓ Branch 2 taken 152 times.
|
274 | if (ff_outlink_get_status(in0)) { |
728 |
2/2✓ Branch 0 taken 2 times.
✓ Branch 1 taken 120 times.
|
122 | if (queued_samples0 > s->nb_samples) |
729 | 2 | return pass_samples(in0, outlink, queued_samples0 - s->nb_samples, &s->pts); | |
730 | } else { | ||
731 |
2/2✓ Branch 1 taken 151 times.
✓ Branch 2 taken 1 times.
|
152 | FF_FILTER_FORWARD_WANTED(outlink, in0); |
732 | 1 | return FFERROR_NOT_READY; | |
733 | } | ||
734 | |||
735 | /* At this point, in0 has reached EOF with no more samples remaining | ||
736 | * except those that we want to crossfade */ | ||
737 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 120 times.
|
120 | av_assert0(queued_samples0 <= s->nb_samples); |
738 | 120 | int queued_samples1 = ff_inlink_queued_samples(in1); | |
739 | |||
740 | /* If this clip is sandwiched between two other clips, buffer at least | ||
741 | * twice the total crossfade duration to ensure that we won't reach EOF | ||
742 | * during the second fade (in which case we would shorten the fade) */ | ||
743 | 120 | int needed_samples = s->nb_samples; | |
744 |
2/2✓ Branch 0 taken 82 times.
✓ Branch 1 taken 38 times.
|
120 | if (idx1 < s->nb_inputs - 1) |
745 | 82 | needed_samples *= 2; | |
746 | |||
747 |
3/4✓ Branch 0 taken 118 times.
✓ Branch 1 taken 2 times.
✗ Branch 3 not taken.
✓ Branch 4 taken 118 times.
|
120 | if (queued_samples1 >= needed_samples || ff_outlink_get_status(in1)) { |
748 | /* The first filter may EOF before delivering any samples, in which | ||
749 | * case it's possible for pass_crossfade() to be a no-op. Just ensure | ||
750 | * the activate() function runs again after incrementing the index to | ||
751 | * ensure we correctly move on to the next input in that case. */ | ||
752 | 2 | s->xfade_idx++; | |
753 | 2 | ff_filter_set_ready(ctx, 10); | |
754 | 2 | return pass_crossfade(ctx, idx0, idx1); | |
755 | } else { | ||
756 |
1/2✓ Branch 1 taken 118 times.
✗ Branch 2 not taken.
|
118 | FF_FILTER_FORWARD_WANTED(outlink, in1); |
757 | ✗ | return FFERROR_NOT_READY; | |
758 | } | ||
759 | } | ||
760 | |||
761 | 2 | static av_cold int acrossfade_init(AVFilterContext *ctx) | |
762 | { | ||
763 | 2 | AudioFadeContext *s = ctx->priv; | |
764 | int ret; | ||
765 | |||
766 |
2/2✓ Branch 0 taken 6 times.
✓ Branch 1 taken 2 times.
|
8 | for (int i = 0; i < s->nb_inputs; i++) { |
767 | 12 | AVFilterPad pad = { | |
768 | 6 | .name = av_asprintf("crossfade%d", i), | |
769 | .type = AVMEDIA_TYPE_AUDIO, | ||
770 | }; | ||
771 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 6 times.
|
6 | if (!pad.name) |
772 | ✗ | return AVERROR(ENOMEM); | |
773 | |||
774 | 6 | ret = ff_append_inpad_free_name(ctx, &pad); | |
775 |
1/2✗ Branch 0 not taken.
✓ Branch 1 taken 6 times.
|
6 | if (ret < 0) |
776 | ✗ | return ret; | |
777 | } | ||
778 | |||
779 | 2 | return 0; | |
780 | } | ||
781 | |||
782 | 1 | static int acrossfade_config_output(AVFilterLink *outlink) | |
783 | { | ||
784 | 1 | AVFilterContext *ctx = outlink->src; | |
785 | 1 | AudioFadeContext *s = ctx->priv; | |
786 | |||
787 | 1 | outlink->time_base = ctx->inputs[0]->time_base; | |
788 | |||
789 |
1/9✗ Branch 0 not taken.
✗ Branch 1 not taken.
✗ Branch 2 not taken.
✗ Branch 3 not taken.
✓ Branch 4 taken 1 times.
✗ Branch 5 not taken.
✗ Branch 6 not taken.
✗ Branch 7 not taken.
✗ Branch 8 not taken.
|
1 | switch (outlink->format) { |
790 | ✗ | case AV_SAMPLE_FMT_DBL: s->crossfade_samples = crossfade_samples_dbl; break; | |
791 | ✗ | case AV_SAMPLE_FMT_DBLP: s->crossfade_samples = crossfade_samples_dblp; break; | |
792 | ✗ | case AV_SAMPLE_FMT_FLT: s->crossfade_samples = crossfade_samples_flt; break; | |
793 | ✗ | case AV_SAMPLE_FMT_FLTP: s->crossfade_samples = crossfade_samples_fltp; break; | |
794 | 1 | case AV_SAMPLE_FMT_S16: s->crossfade_samples = crossfade_samples_s16; break; | |
795 | ✗ | case AV_SAMPLE_FMT_S16P: s->crossfade_samples = crossfade_samples_s16p; break; | |
796 | ✗ | case AV_SAMPLE_FMT_S32: s->crossfade_samples = crossfade_samples_s32; break; | |
797 | ✗ | case AV_SAMPLE_FMT_S32P: s->crossfade_samples = crossfade_samples_s32p; break; | |
798 | } | ||
799 | |||
800 | 1 | config_output(outlink); | |
801 | |||
802 | 1 | return 0; | |
803 | } | ||
804 | |||
805 | static const AVFilterPad avfilter_af_acrossfade_outputs[] = { | ||
806 | { | ||
807 | .name = "default", | ||
808 | .type = AVMEDIA_TYPE_AUDIO, | ||
809 | .config_props = acrossfade_config_output, | ||
810 | }, | ||
811 | }; | ||
812 | |||
813 | const FFFilter ff_af_acrossfade = { | ||
814 | .p.name = "acrossfade", | ||
815 | .p.description = NULL_IF_CONFIG_SMALL("Cross fade two input audio streams."), | ||
816 | .p.priv_class = &acrossfade_class, | ||
817 | .p.flags = AVFILTER_FLAG_DYNAMIC_INPUTS, | ||
818 | .priv_size = sizeof(AudioFadeContext), | ||
819 | .init = acrossfade_init, | ||
820 | .activate = activate, | ||
821 | FILTER_OUTPUTS(avfilter_af_acrossfade_outputs), | ||
822 | FILTER_SAMPLEFMTS_ARRAY(sample_fmts), | ||
823 | }; | ||
824 | |||
825 | #endif /* CONFIG_ACROSSFADE_FILTER */ | ||
826 |