| Line | Branch | Exec | Source |
|---|---|---|---|
| 1 | /* | ||
| 2 | * Copyright (C) 2010 Georg Martius <georg.martius@web.de> | ||
| 3 | * Copyright (C) 2010 Daniel G. Taylor <dan@programmer-art.org> | ||
| 4 | * | ||
| 5 | * This file is part of FFmpeg. | ||
| 6 | * | ||
| 7 | * FFmpeg is free software; you can redistribute it and/or | ||
| 8 | * modify it under the terms of the GNU Lesser General Public | ||
| 9 | * License as published by the Free Software Foundation; either | ||
| 10 | * version 2.1 of the License, or (at your option) any later version. | ||
| 11 | * | ||
| 12 | * FFmpeg is distributed in the hope that it will be useful, | ||
| 13 | * but WITHOUT ANY WARRANTY; without even the implied warranty of | ||
| 14 | * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | ||
| 15 | * Lesser General Public License for more details. | ||
| 16 | * | ||
| 17 | * You should have received a copy of the GNU Lesser General Public | ||
| 18 | * License along with FFmpeg; if not, write to the Free Software | ||
| 19 | * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA | ||
| 20 | */ | ||
| 21 | |||
| 22 | /** | ||
| 23 | * @file | ||
| 24 | * fast deshake / depan video filter | ||
| 25 | * | ||
| 26 | * SAD block-matching motion compensation to fix small changes in | ||
| 27 | * horizontal and/or vertical shift. This filter helps remove camera shake | ||
| 28 | * from hand-holding a camera, bumping a tripod, moving on a vehicle, etc. | ||
| 29 | * | ||
| 30 | * Algorithm: | ||
| 31 | * - For each frame with one previous reference frame | ||
| 32 | * - For each block in the frame | ||
| 33 | * - If contrast > threshold then find likely motion vector | ||
| 34 | * - For all found motion vectors | ||
| 35 | * - Find most common, store as global motion vector | ||
| 36 | * - Find most likely rotation angle | ||
| 37 | * - Transform image along global motion | ||
| 38 | * | ||
| 39 | * TODO: | ||
| 40 | * - Fill frame edges based on previous/next reference frames | ||
| 41 | * - Fill frame edges by stretching image near the edges? | ||
| 42 | * - Can this be done quickly and look decent? | ||
| 43 | * | ||
| 44 | * Dark Shikari links to http://wiki.videolan.org/SoC_x264_2010#GPU_Motion_Estimation_2 | ||
| 45 | * for an algorithm similar to what could be used here to get the gmv | ||
| 46 | * It requires only a couple diamond searches + fast downscaling | ||
| 47 | * | ||
| 48 | * Special thanks to Jason Kotenko for his help with the algorithm and my | ||
| 49 | * inability to see simple errors in C code. | ||
| 50 | */ | ||
| 51 | |||
| 52 | #include "avfilter.h" | ||
| 53 | #include "filters.h" | ||
| 54 | #include "transform.h" | ||
| 55 | #include "video.h" | ||
| 56 | #include "libavutil/common.h" | ||
| 57 | #include "libavutil/file_open.h" | ||
| 58 | #include "libavutil/mem.h" | ||
| 59 | #include "libavutil/opt.h" | ||
| 60 | #include "libavutil/pixdesc.h" | ||
| 61 | #include "libavutil/pixelutils.h" | ||
| 62 | #include "libavutil/qsort.h" | ||
| 63 | |||
| 64 | |||
| 65 | enum SearchMethod { | ||
| 66 | EXHAUSTIVE, ///< Search all possible positions | ||
| 67 | SMART_EXHAUSTIVE, ///< Search most possible positions (faster) | ||
| 68 | SEARCH_COUNT | ||
| 69 | }; | ||
| 70 | |||
| 71 | typedef struct IntMotionVector { | ||
| 72 | int x; ///< Horizontal shift | ||
| 73 | int y; ///< Vertical shift | ||
| 74 | } IntMotionVector; | ||
| 75 | |||
| 76 | typedef struct MotionVector { | ||
| 77 | double x; ///< Horizontal shift | ||
| 78 | double y; ///< Vertical shift | ||
| 79 | } MotionVector; | ||
| 80 | |||
| 81 | typedef struct Transform { | ||
| 82 | MotionVector vec; ///< Motion vector | ||
| 83 | double angle; ///< Angle of rotation | ||
| 84 | double zoom; ///< Zoom percentage | ||
| 85 | } Transform; | ||
| 86 | |||
| 87 | #define MAX_R 64 | ||
| 88 | |||
| 89 | typedef struct DeshakeContext { | ||
| 90 | const AVClass *class; | ||
| 91 | int counts[2*MAX_R+1][2*MAX_R+1]; ///< Scratch buffer for motion search | ||
| 92 | double *angles; ///< Scratch buffer for block angles | ||
| 93 | unsigned angles_size; | ||
| 94 | AVFrame *ref; ///< Previous frame | ||
| 95 | int rx; ///< Maximum horizontal shift | ||
| 96 | int ry; ///< Maximum vertical shift | ||
| 97 | int edge; ///< Edge fill method | ||
| 98 | int blocksize; ///< Size of blocks to compare | ||
| 99 | int contrast; ///< Contrast threshold | ||
| 100 | int search; ///< Motion search method | ||
| 101 | av_pixelutils_sad_fn sad; ///< Sum of the absolute difference function | ||
| 102 | Transform last; ///< Transform from last frame | ||
| 103 | int refcount; ///< Number of reference frames (defines averaging window) | ||
| 104 | FILE *fp; | ||
| 105 | Transform avg; | ||
| 106 | int cw; ///< Crop motion search to this box | ||
| 107 | int ch; | ||
| 108 | int cx; | ||
| 109 | int cy; | ||
| 110 | char *filename; ///< Motion search detailed log filename | ||
| 111 | int opencl; | ||
| 112 | int (* transform)(AVFilterContext *ctx, int width, int height, int cw, int ch, | ||
| 113 | const float *matrix_y, const float *matrix_uv, enum InterpolateMethod interpolate, | ||
| 114 | enum FillMethod fill, AVFrame *in, AVFrame *out); | ||
| 115 | } DeshakeContext; | ||
| 116 | |||
| 117 | #define OFFSET(x) offsetof(DeshakeContext, x) | ||
| 118 | #define FLAGS AV_OPT_FLAG_VIDEO_PARAM|AV_OPT_FLAG_FILTERING_PARAM | ||
| 119 | |||
| 120 | static const AVOption deshake_options[] = { | ||
| 121 | { "x", "set x for the rectangular search area", OFFSET(cx), AV_OPT_TYPE_INT, {.i64=-1}, -1, INT_MAX, .flags = FLAGS }, | ||
| 122 | { "y", "set y for the rectangular search area", OFFSET(cy), AV_OPT_TYPE_INT, {.i64=-1}, -1, INT_MAX, .flags = FLAGS }, | ||
| 123 | { "w", "set width for the rectangular search area", OFFSET(cw), AV_OPT_TYPE_INT, {.i64=-1}, -1, INT_MAX, .flags = FLAGS }, | ||
| 124 | { "h", "set height for the rectangular search area", OFFSET(ch), AV_OPT_TYPE_INT, {.i64=-1}, -1, INT_MAX, .flags = FLAGS }, | ||
| 125 | { "rx", "set x for the rectangular search area", OFFSET(rx), AV_OPT_TYPE_INT, {.i64=16}, 0, MAX_R, .flags = FLAGS }, | ||
| 126 | { "ry", "set y for the rectangular search area", OFFSET(ry), AV_OPT_TYPE_INT, {.i64=16}, 0, MAX_R, .flags = FLAGS }, | ||
| 127 | { "edge", "set edge mode", OFFSET(edge), AV_OPT_TYPE_INT, {.i64=FILL_MIRROR}, FILL_BLANK, FILL_COUNT-1, FLAGS, .unit = "edge"}, | ||
| 128 | { "blank", "fill zeroes at blank locations", 0, AV_OPT_TYPE_CONST, {.i64=FILL_BLANK}, INT_MIN, INT_MAX, FLAGS, .unit = "edge" }, | ||
| 129 | { "original", "original image at blank locations", 0, AV_OPT_TYPE_CONST, {.i64=FILL_ORIGINAL}, INT_MIN, INT_MAX, FLAGS, .unit = "edge" }, | ||
| 130 | { "clamp", "extruded edge value at blank locations", 0, AV_OPT_TYPE_CONST, {.i64=FILL_CLAMP}, INT_MIN, INT_MAX, FLAGS, .unit = "edge" }, | ||
| 131 | { "mirror", "mirrored edge at blank locations", 0, AV_OPT_TYPE_CONST, {.i64=FILL_MIRROR}, INT_MIN, INT_MAX, FLAGS, .unit = "edge" }, | ||
| 132 | { "blocksize", "set motion search blocksize", OFFSET(blocksize), AV_OPT_TYPE_INT, {.i64=8}, 4, 128, .flags = FLAGS }, | ||
| 133 | { "contrast", "set contrast threshold for blocks", OFFSET(contrast), AV_OPT_TYPE_INT, {.i64=125}, 1, 255, .flags = FLAGS }, | ||
| 134 | { "search", "set search strategy", OFFSET(search), AV_OPT_TYPE_INT, {.i64=EXHAUSTIVE}, EXHAUSTIVE, SEARCH_COUNT-1, FLAGS, .unit = "smode" }, | ||
| 135 | { "exhaustive", "exhaustive search", 0, AV_OPT_TYPE_CONST, {.i64=EXHAUSTIVE}, INT_MIN, INT_MAX, FLAGS, .unit = "smode" }, | ||
| 136 | { "less", "less exhaustive search", 0, AV_OPT_TYPE_CONST, {.i64=SMART_EXHAUSTIVE}, INT_MIN, INT_MAX, FLAGS, .unit = "smode" }, | ||
| 137 | { "filename", "set motion search detailed log file name", OFFSET(filename), AV_OPT_TYPE_STRING, {.str=NULL}, .flags = FLAGS }, | ||
| 138 | { "opencl", "ignored", OFFSET(opencl), AV_OPT_TYPE_BOOL, {.i64=0}, 0, 1, .flags = FLAGS }, | ||
| 139 | { NULL } | ||
| 140 | }; | ||
| 141 | |||
| 142 | AVFILTER_DEFINE_CLASS(deshake); | ||
| 143 | |||
| 144 | ✗ | static int cmp(const void *a, const void *b) | |
| 145 | { | ||
| 146 | ✗ | return FFDIFFSIGN(*(const double *)a, *(const double *)b); | |
| 147 | } | ||
| 148 | |||
| 149 | /** | ||
| 150 | * Cleaned mean (cuts off 20% of values to remove outliers and then averages) | ||
| 151 | */ | ||
| 152 | ✗ | static double clean_mean(double *values, int count) | |
| 153 | { | ||
| 154 | ✗ | double mean = 0; | |
| 155 | ✗ | int cut = count / 5; | |
| 156 | int x; | ||
| 157 | |||
| 158 | ✗ | AV_QSORT(values, count, double, cmp); | |
| 159 | |||
| 160 | ✗ | for (x = cut; x < count - cut; x++) { | |
| 161 | ✗ | mean += values[x]; | |
| 162 | } | ||
| 163 | |||
| 164 | ✗ | return mean / (count - cut * 2); | |
| 165 | } | ||
| 166 | |||
| 167 | /** | ||
| 168 | * Find the most likely shift in motion between two frames for a given | ||
| 169 | * macroblock. Test each block against several shifts given by the rx | ||
| 170 | * and ry attributes. Searches using a simple matrix of those shifts and | ||
| 171 | * chooses the most likely shift by the smallest difference in blocks. | ||
| 172 | */ | ||
| 173 | ✗ | static void find_block_motion(DeshakeContext *deshake, uint8_t *src1, | |
| 174 | uint8_t *src2, int cx, int cy, int stride, | ||
| 175 | IntMotionVector *mv) | ||
| 176 | { | ||
| 177 | int x, y; | ||
| 178 | int diff; | ||
| 179 | ✗ | int smallest = INT_MAX; | |
| 180 | int tmp, tmp2; | ||
| 181 | |||
| 182 | #define CMP(i, j) deshake->sad(src1 + cy * stride + cx, stride,\ | ||
| 183 | src2 + (j) * stride + (i), stride) | ||
| 184 | |||
| 185 | ✗ | if (deshake->search == EXHAUSTIVE) { | |
| 186 | // Compare every possible position - this is sloooow! | ||
| 187 | ✗ | for (y = -deshake->ry; y <= deshake->ry; y++) { | |
| 188 | ✗ | for (x = -deshake->rx; x <= deshake->rx; x++) { | |
| 189 | ✗ | diff = CMP(cx - x, cy - y); | |
| 190 | ✗ | if (diff < smallest) { | |
| 191 | ✗ | smallest = diff; | |
| 192 | ✗ | mv->x = x; | |
| 193 | ✗ | mv->y = y; | |
| 194 | } | ||
| 195 | } | ||
| 196 | } | ||
| 197 | ✗ | } else if (deshake->search == SMART_EXHAUSTIVE) { | |
| 198 | // Compare every other possible position and find the best match | ||
| 199 | ✗ | for (y = -deshake->ry + 1; y < deshake->ry; y += 2) { | |
| 200 | ✗ | for (x = -deshake->rx + 1; x < deshake->rx; x += 2) { | |
| 201 | ✗ | diff = CMP(cx - x, cy - y); | |
| 202 | ✗ | if (diff < smallest) { | |
| 203 | ✗ | smallest = diff; | |
| 204 | ✗ | mv->x = x; | |
| 205 | ✗ | mv->y = y; | |
| 206 | } | ||
| 207 | } | ||
| 208 | } | ||
| 209 | |||
| 210 | // Hone in on the specific best match around the match we found above | ||
| 211 | ✗ | tmp = mv->x; | |
| 212 | ✗ | tmp2 = mv->y; | |
| 213 | |||
| 214 | ✗ | for (y = tmp2 - 1; y <= tmp2 + 1; y++) { | |
| 215 | ✗ | for (x = tmp - 1; x <= tmp + 1; x++) { | |
| 216 | ✗ | if (x == tmp && y == tmp2) | |
| 217 | ✗ | continue; | |
| 218 | |||
| 219 | ✗ | diff = CMP(cx - x, cy - y); | |
| 220 | ✗ | if (diff < smallest) { | |
| 221 | ✗ | smallest = diff; | |
| 222 | ✗ | mv->x = x; | |
| 223 | ✗ | mv->y = y; | |
| 224 | } | ||
| 225 | } | ||
| 226 | } | ||
| 227 | } | ||
| 228 | |||
| 229 | ✗ | if (smallest > 512) { | |
| 230 | ✗ | mv->x = -1; | |
| 231 | ✗ | mv->y = -1; | |
| 232 | } | ||
| 233 | //av_log(NULL, AV_LOG_ERROR, "%d\n", smallest); | ||
| 234 | //av_log(NULL, AV_LOG_ERROR, "Final: (%d, %d) = %d x %d\n", cx, cy, mv->x, mv->y); | ||
| 235 | ✗ | } | |
| 236 | |||
| 237 | /** | ||
| 238 | * Find the contrast of a given block. When searching for global motion we | ||
| 239 | * really only care about the high contrast blocks, so using this method we | ||
| 240 | * can actually skip blocks we don't care much about. | ||
| 241 | */ | ||
| 242 | ✗ | static int block_contrast(uint8_t *src, int x, int y, int stride, int blocksize) | |
| 243 | { | ||
| 244 | ✗ | int highest = 0; | |
| 245 | ✗ | int lowest = 255; | |
| 246 | int i, j, pos; | ||
| 247 | |||
| 248 | ✗ | for (i = 0; i <= blocksize * 2; i++) { | |
| 249 | // We use a width of 16 here to match the sad function | ||
| 250 | ✗ | for (j = 0; j <= 15; j++) { | |
| 251 | ✗ | pos = (y + i) * stride + (x + j); | |
| 252 | ✗ | if (src[pos] < lowest) | |
| 253 | ✗ | lowest = src[pos]; | |
| 254 | ✗ | else if (src[pos] > highest) { | |
| 255 | ✗ | highest = src[pos]; | |
| 256 | } | ||
| 257 | } | ||
| 258 | } | ||
| 259 | |||
| 260 | ✗ | return highest - lowest; | |
| 261 | } | ||
| 262 | |||
| 263 | /** | ||
| 264 | * Find the rotation for a given block. | ||
| 265 | */ | ||
| 266 | ✗ | static double block_angle(int x, int y, int cx, int cy, IntMotionVector *shift) | |
| 267 | { | ||
| 268 | double a1, a2, diff; | ||
| 269 | |||
| 270 | ✗ | a1 = atan2(y - cy, x - cx); | |
| 271 | ✗ | a2 = atan2(y - cy + shift->y, x - cx + shift->x); | |
| 272 | |||
| 273 | ✗ | diff = a2 - a1; | |
| 274 | |||
| 275 | ✗ | return (diff > M_PI) ? diff - 2 * M_PI : | |
| 276 | ✗ | (diff < -M_PI) ? diff + 2 * M_PI : | |
| 277 | diff; | ||
| 278 | } | ||
| 279 | |||
| 280 | /** | ||
| 281 | * Find the estimated global motion for a scene given the most likely shift | ||
| 282 | * for each block in the frame. The global motion is estimated to be the | ||
| 283 | * same as the motion from most blocks in the frame, so if most blocks | ||
| 284 | * move one pixel to the right and two pixels down, this would yield a | ||
| 285 | * motion vector (1, -2). | ||
| 286 | */ | ||
| 287 | ✗ | static void find_motion(DeshakeContext *deshake, uint8_t *src1, uint8_t *src2, | |
| 288 | int width, int height, int stride, Transform *t) | ||
| 289 | { | ||
| 290 | int x, y; | ||
| 291 | ✗ | IntMotionVector mv = {0, 0}; | |
| 292 | ✗ | int count_max_value = 0; | |
| 293 | int contrast; | ||
| 294 | |||
| 295 | int pos; | ||
| 296 | ✗ | int center_x = 0, center_y = 0; | |
| 297 | double p_x, p_y; | ||
| 298 | |||
| 299 | ✗ | av_fast_malloc(&deshake->angles, &deshake->angles_size, width * height / (16 * deshake->blocksize) * sizeof(*deshake->angles)); | |
| 300 | |||
| 301 | // Reset counts to zero | ||
| 302 | ✗ | for (x = 0; x < deshake->rx * 2 + 1; x++) { | |
| 303 | ✗ | for (y = 0; y < deshake->ry * 2 + 1; y++) { | |
| 304 | ✗ | deshake->counts[x][y] = 0; | |
| 305 | } | ||
| 306 | } | ||
| 307 | |||
| 308 | ✗ | pos = 0; | |
| 309 | // Find motion for every block and store the motion vector in the counts | ||
| 310 | ✗ | for (y = deshake->ry; y < height - deshake->ry - (deshake->blocksize * 2); y += deshake->blocksize * 2) { | |
| 311 | // We use a width of 16 here to match the sad function | ||
| 312 | ✗ | for (x = deshake->rx; x < width - deshake->rx - 16; x += 16) { | |
| 313 | // If the contrast is too low, just skip this block as it probably | ||
| 314 | // won't be very useful to us. | ||
| 315 | ✗ | contrast = block_contrast(src2, x, y, stride, deshake->blocksize); | |
| 316 | ✗ | if (contrast > deshake->contrast) { | |
| 317 | //av_log(NULL, AV_LOG_ERROR, "%d\n", contrast); | ||
| 318 | ✗ | find_block_motion(deshake, src1, src2, x, y, stride, &mv); | |
| 319 | ✗ | if (mv.x != -1 && mv.y != -1) { | |
| 320 | ✗ | deshake->counts[mv.x + deshake->rx][mv.y + deshake->ry] += 1; | |
| 321 | ✗ | if (x > deshake->rx && y > deshake->ry) | |
| 322 | ✗ | deshake->angles[pos++] = block_angle(x, y, 0, 0, &mv); | |
| 323 | |||
| 324 | ✗ | center_x += mv.x; | |
| 325 | ✗ | center_y += mv.y; | |
| 326 | } | ||
| 327 | } | ||
| 328 | } | ||
| 329 | } | ||
| 330 | |||
| 331 | ✗ | if (pos) { | |
| 332 | ✗ | center_x /= pos; | |
| 333 | ✗ | center_y /= pos; | |
| 334 | ✗ | t->angle = clean_mean(deshake->angles, pos); | |
| 335 | ✗ | if (t->angle < 0.001) | |
| 336 | ✗ | t->angle = 0; | |
| 337 | } else { | ||
| 338 | ✗ | t->angle = 0; | |
| 339 | } | ||
| 340 | |||
| 341 | // Find the most common motion vector in the frame and use it as the gmv | ||
| 342 | ✗ | for (y = deshake->ry * 2; y >= 0; y--) { | |
| 343 | ✗ | for (x = 0; x < deshake->rx * 2 + 1; x++) { | |
| 344 | //av_log(NULL, AV_LOG_ERROR, "%5d ", deshake->counts[x][y]); | ||
| 345 | ✗ | if (deshake->counts[x][y] > count_max_value) { | |
| 346 | ✗ | t->vec.x = x - deshake->rx; | |
| 347 | ✗ | t->vec.y = y - deshake->ry; | |
| 348 | ✗ | count_max_value = deshake->counts[x][y]; | |
| 349 | } | ||
| 350 | } | ||
| 351 | //av_log(NULL, AV_LOG_ERROR, "\n"); | ||
| 352 | } | ||
| 353 | |||
| 354 | ✗ | p_x = (center_x - width / 2.0); | |
| 355 | ✗ | p_y = (center_y - height / 2.0); | |
| 356 | ✗ | t->vec.x += (cos(t->angle)-1)*p_x - sin(t->angle)*p_y; | |
| 357 | ✗ | t->vec.y += sin(t->angle)*p_x + (cos(t->angle)-1)*p_y; | |
| 358 | |||
| 359 | // Clamp max shift & rotation? | ||
| 360 | ✗ | t->vec.x = av_clipf(t->vec.x, -deshake->rx * 2, deshake->rx * 2); | |
| 361 | ✗ | t->vec.y = av_clipf(t->vec.y, -deshake->ry * 2, deshake->ry * 2); | |
| 362 | ✗ | t->angle = av_clipf(t->angle, -0.1, 0.1); | |
| 363 | |||
| 364 | //av_log(NULL, AV_LOG_ERROR, "%d x %d\n", avg->x, avg->y); | ||
| 365 | ✗ | } | |
| 366 | |||
| 367 | ✗ | static int deshake_transform_c(AVFilterContext *ctx, | |
| 368 | int width, int height, int cw, int ch, | ||
| 369 | const float *matrix_y, const float *matrix_uv, | ||
| 370 | enum InterpolateMethod interpolate, | ||
| 371 | enum FillMethod fill, AVFrame *in, AVFrame *out) | ||
| 372 | { | ||
| 373 | ✗ | int i = 0, ret = 0; | |
| 374 | const float *matrixs[3]; | ||
| 375 | int plane_w[3], plane_h[3]; | ||
| 376 | ✗ | matrixs[0] = matrix_y; | |
| 377 | ✗ | matrixs[1] = matrixs[2] = matrix_uv; | |
| 378 | ✗ | plane_w[0] = width; | |
| 379 | ✗ | plane_w[1] = plane_w[2] = cw; | |
| 380 | ✗ | plane_h[0] = height; | |
| 381 | ✗ | plane_h[1] = plane_h[2] = ch; | |
| 382 | |||
| 383 | ✗ | for (i = 0; i < 3; i++) { | |
| 384 | // Transform the luma and chroma planes | ||
| 385 | ✗ | ret = ff_affine_transform(in->data[i], out->data[i], in->linesize[i], | |
| 386 | out->linesize[i], plane_w[i], plane_h[i], | ||
| 387 | matrixs[i], interpolate, fill); | ||
| 388 | ✗ | if (ret < 0) | |
| 389 | ✗ | return ret; | |
| 390 | } | ||
| 391 | ✗ | return ret; | |
| 392 | } | ||
| 393 | |||
| 394 | ✗ | static av_cold int init(AVFilterContext *ctx) | |
| 395 | { | ||
| 396 | ✗ | DeshakeContext *deshake = ctx->priv; | |
| 397 | |||
| 398 | ✗ | deshake->refcount = 20; // XXX: add to options? | |
| 399 | ✗ | deshake->blocksize /= 2; | |
| 400 | ✗ | deshake->blocksize = av_clip(deshake->blocksize, 4, 128); | |
| 401 | |||
| 402 | ✗ | if (deshake->rx % 16) { | |
| 403 | ✗ | av_log(ctx, AV_LOG_ERROR, "rx must be a multiple of 16\n"); | |
| 404 | ✗ | return AVERROR_PATCHWELCOME; | |
| 405 | } | ||
| 406 | |||
| 407 | ✗ | if (deshake->filename) | |
| 408 | ✗ | deshake->fp = avpriv_fopen_utf8(deshake->filename, "w"); | |
| 409 | ✗ | if (deshake->fp) | |
| 410 | ✗ | fwrite("Ori x, Avg x, Fin x, Ori y, Avg y, Fin y, Ori angle, Avg angle, Fin angle, Ori zoom, Avg zoom, Fin zoom\n", 1, 104, deshake->fp); | |
| 411 | |||
| 412 | // Quadword align left edge of box for MMX code, adjust width if necessary | ||
| 413 | // to keep right margin | ||
| 414 | ✗ | if (deshake->cx > 0) { | |
| 415 | ✗ | deshake->cw += deshake->cx - (deshake->cx & ~15); | |
| 416 | ✗ | deshake->cx &= ~15; | |
| 417 | } | ||
| 418 | ✗ | deshake->transform = deshake_transform_c; | |
| 419 | |||
| 420 | ✗ | av_log(ctx, AV_LOG_VERBOSE, "cx: %d, cy: %d, cw: %d, ch: %d, rx: %d, ry: %d, edge: %d blocksize: %d contrast: %d search: %d\n", | |
| 421 | deshake->cx, deshake->cy, deshake->cw, deshake->ch, | ||
| 422 | ✗ | deshake->rx, deshake->ry, deshake->edge, deshake->blocksize * 2, deshake->contrast, deshake->search); | |
| 423 | |||
| 424 | ✗ | return 0; | |
| 425 | } | ||
| 426 | |||
| 427 | static const enum AVPixelFormat pix_fmts[] = { | ||
| 428 | AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV410P, | ||
| 429 | AV_PIX_FMT_YUV411P, AV_PIX_FMT_YUV440P, AV_PIX_FMT_YUVJ420P, AV_PIX_FMT_YUVJ422P, | ||
| 430 | AV_PIX_FMT_YUVJ444P, AV_PIX_FMT_YUVJ440P, AV_PIX_FMT_NONE | ||
| 431 | }; | ||
| 432 | |||
| 433 | ✗ | static int config_props(AVFilterLink *link) | |
| 434 | { | ||
| 435 | ✗ | DeshakeContext *deshake = link->dst->priv; | |
| 436 | |||
| 437 | ✗ | deshake->ref = NULL; | |
| 438 | ✗ | deshake->last.vec.x = 0; | |
| 439 | ✗ | deshake->last.vec.y = 0; | |
| 440 | ✗ | deshake->last.angle = 0; | |
| 441 | ✗ | deshake->last.zoom = 0; | |
| 442 | |||
| 443 | ✗ | return 0; | |
| 444 | } | ||
| 445 | |||
| 446 | ✗ | static av_cold void uninit(AVFilterContext *ctx) | |
| 447 | { | ||
| 448 | ✗ | DeshakeContext *deshake = ctx->priv; | |
| 449 | ✗ | av_frame_free(&deshake->ref); | |
| 450 | ✗ | av_freep(&deshake->angles); | |
| 451 | ✗ | deshake->angles_size = 0; | |
| 452 | ✗ | if (deshake->fp) | |
| 453 | ✗ | fclose(deshake->fp); | |
| 454 | ✗ | } | |
| 455 | |||
| 456 | ✗ | static int filter_frame(AVFilterLink *link, AVFrame *in) | |
| 457 | { | ||
| 458 | ✗ | DeshakeContext *deshake = link->dst->priv; | |
| 459 | ✗ | AVFilterLink *outlink = link->dst->outputs[0]; | |
| 460 | AVFrame *out; | ||
| 461 | ✗ | Transform t = {{0},0}, orig = {{0},0}; | |
| 462 | float matrix_y[9], matrix_uv[9]; | ||
| 463 | ✗ | float alpha = 2.0 / deshake->refcount; | |
| 464 | char tmp[256]; | ||
| 465 | ✗ | int ret = 0; | |
| 466 | ✗ | const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(link->format); | |
| 467 | ✗ | const int chroma_width = AV_CEIL_RSHIFT(link->w, desc->log2_chroma_w); | |
| 468 | ✗ | const int chroma_height = AV_CEIL_RSHIFT(link->h, desc->log2_chroma_h); | |
| 469 | int aligned; | ||
| 470 | float transform_zoom; | ||
| 471 | |||
| 472 | ✗ | out = ff_get_video_buffer(outlink, outlink->w, outlink->h); | |
| 473 | ✗ | if (!out) { | |
| 474 | ✗ | av_frame_free(&in); | |
| 475 | ✗ | return AVERROR(ENOMEM); | |
| 476 | } | ||
| 477 | ✗ | av_frame_copy_props(out, in); | |
| 478 | |||
| 479 | ✗ | aligned = !((intptr_t)in->data[0] & 15 | in->linesize[0] & 15); | |
| 480 | ✗ | deshake->sad = av_pixelutils_get_sad_fn(4, 4, aligned, deshake); // 16x16, 2nd source unaligned | |
| 481 | ✗ | if (!deshake->sad) { | |
| 482 | ✗ | ret = AVERROR(EINVAL); | |
| 483 | ✗ | goto fail; | |
| 484 | } | ||
| 485 | |||
| 486 | ✗ | if (deshake->cx < 0 || deshake->cy < 0 || deshake->cw < 0 || deshake->ch < 0) { | |
| 487 | // Find the most likely global motion for the current frame | ||
| 488 | ✗ | find_motion(deshake, (deshake->ref == NULL) ? in->data[0] : deshake->ref->data[0], in->data[0], link->w, link->h, in->linesize[0], &t); | |
| 489 | } else { | ||
| 490 | ✗ | uint8_t *src1 = (deshake->ref == NULL) ? in->data[0] : deshake->ref->data[0]; | |
| 491 | ✗ | uint8_t *src2 = in->data[0]; | |
| 492 | |||
| 493 | ✗ | deshake->cx = FFMIN(deshake->cx, link->w); | |
| 494 | ✗ | deshake->cy = FFMIN(deshake->cy, link->h); | |
| 495 | |||
| 496 | ✗ | if ((unsigned)deshake->cx + (unsigned)deshake->cw > link->w) deshake->cw = link->w - deshake->cx; | |
| 497 | ✗ | if ((unsigned)deshake->cy + (unsigned)deshake->ch > link->h) deshake->ch = link->h - deshake->cy; | |
| 498 | |||
| 499 | // Quadword align right margin | ||
| 500 | ✗ | deshake->cw &= ~15; | |
| 501 | |||
| 502 | ✗ | src1 += deshake->cy * in->linesize[0] + deshake->cx; | |
| 503 | ✗ | src2 += deshake->cy * in->linesize[0] + deshake->cx; | |
| 504 | |||
| 505 | ✗ | find_motion(deshake, src1, src2, deshake->cw, deshake->ch, in->linesize[0], &t); | |
| 506 | } | ||
| 507 | |||
| 508 | |||
| 509 | // Copy transform so we can output it later to compare to the smoothed value | ||
| 510 | ✗ | orig.vec.x = t.vec.x; | |
| 511 | ✗ | orig.vec.y = t.vec.y; | |
| 512 | ✗ | orig.angle = t.angle; | |
| 513 | ✗ | orig.zoom = t.zoom; | |
| 514 | |||
| 515 | // Generate a one-sided moving exponential average | ||
| 516 | ✗ | deshake->avg.vec.x = alpha * t.vec.x + (1.0 - alpha) * deshake->avg.vec.x; | |
| 517 | ✗ | deshake->avg.vec.y = alpha * t.vec.y + (1.0 - alpha) * deshake->avg.vec.y; | |
| 518 | ✗ | deshake->avg.angle = alpha * t.angle + (1.0 - alpha) * deshake->avg.angle; | |
| 519 | ✗ | deshake->avg.zoom = alpha * t.zoom + (1.0 - alpha) * deshake->avg.zoom; | |
| 520 | |||
| 521 | // Remove the average from the current motion to detect the motion that | ||
| 522 | // is not on purpose, just as jitter from bumping the camera | ||
| 523 | ✗ | t.vec.x -= deshake->avg.vec.x; | |
| 524 | ✗ | t.vec.y -= deshake->avg.vec.y; | |
| 525 | ✗ | t.angle -= deshake->avg.angle; | |
| 526 | ✗ | t.zoom -= deshake->avg.zoom; | |
| 527 | |||
| 528 | // Invert the motion to undo it | ||
| 529 | ✗ | t.vec.x *= -1; | |
| 530 | ✗ | t.vec.y *= -1; | |
| 531 | ✗ | t.angle *= -1; | |
| 532 | |||
| 533 | // Write statistics to file | ||
| 534 | ✗ | if (deshake->fp) { | |
| 535 | ✗ | snprintf(tmp, 256, "%f, %f, %f, %f, %f, %f, %f, %f, %f, %f, %f, %f\n", orig.vec.x, deshake->avg.vec.x, t.vec.x, orig.vec.y, deshake->avg.vec.y, t.vec.y, orig.angle, deshake->avg.angle, t.angle, orig.zoom, deshake->avg.zoom, t.zoom); | |
| 536 | ✗ | fwrite(tmp, 1, strlen(tmp), deshake->fp); | |
| 537 | } | ||
| 538 | |||
| 539 | // Turn relative current frame motion into absolute by adding it to the | ||
| 540 | // last absolute motion | ||
| 541 | ✗ | t.vec.x += deshake->last.vec.x; | |
| 542 | ✗ | t.vec.y += deshake->last.vec.y; | |
| 543 | ✗ | t.angle += deshake->last.angle; | |
| 544 | ✗ | t.zoom += deshake->last.zoom; | |
| 545 | |||
| 546 | // Shrink motion by 10% to keep things centered in the camera frame | ||
| 547 | ✗ | t.vec.x *= 0.9; | |
| 548 | ✗ | t.vec.y *= 0.9; | |
| 549 | ✗ | t.angle *= 0.9; | |
| 550 | |||
| 551 | // Store the last absolute motion information | ||
| 552 | ✗ | deshake->last.vec.x = t.vec.x; | |
| 553 | ✗ | deshake->last.vec.y = t.vec.y; | |
| 554 | ✗ | deshake->last.angle = t.angle; | |
| 555 | ✗ | deshake->last.zoom = t.zoom; | |
| 556 | |||
| 557 | ✗ | transform_zoom = 1.0 + t.zoom / 100.0; | |
| 558 | |||
| 559 | // Generate a luma transformation matrix | ||
| 560 | ✗ | ff_get_matrix(t.vec.x, t.vec.y, t.angle, transform_zoom, transform_zoom, matrix_y); | |
| 561 | // Generate a chroma transformation matrix | ||
| 562 | ✗ | ff_get_matrix(t.vec.x / (link->w / chroma_width), t.vec.y / (link->h / chroma_height), t.angle, transform_zoom, transform_zoom, matrix_uv); | |
| 563 | // Transform the luma and chroma planes | ||
| 564 | ✗ | ret = deshake->transform(link->dst, link->w, link->h, chroma_width, chroma_height, | |
| 565 | ✗ | matrix_y, matrix_uv, INTERPOLATE_BILINEAR, deshake->edge, in, out); | |
| 566 | |||
| 567 | // Cleanup the old reference frame | ||
| 568 | ✗ | av_frame_free(&deshake->ref); | |
| 569 | |||
| 570 | ✗ | if (ret < 0) | |
| 571 | ✗ | goto fail; | |
| 572 | |||
| 573 | // Store the current frame as the reference frame for calculating the | ||
| 574 | // motion of the next frame | ||
| 575 | ✗ | deshake->ref = in; | |
| 576 | |||
| 577 | ✗ | return ff_filter_frame(outlink, out); | |
| 578 | ✗ | fail: | |
| 579 | ✗ | av_frame_free(&out); | |
| 580 | ✗ | return ret; | |
| 581 | } | ||
| 582 | |||
| 583 | static const AVFilterPad deshake_inputs[] = { | ||
| 584 | { | ||
| 585 | .name = "default", | ||
| 586 | .type = AVMEDIA_TYPE_VIDEO, | ||
| 587 | .filter_frame = filter_frame, | ||
| 588 | .config_props = config_props, | ||
| 589 | }, | ||
| 590 | }; | ||
| 591 | |||
| 592 | const FFFilter ff_vf_deshake = { | ||
| 593 | .p.name = "deshake", | ||
| 594 | .p.description = NULL_IF_CONFIG_SMALL("Stabilize shaky video."), | ||
| 595 | .p.priv_class = &deshake_class, | ||
| 596 | .priv_size = sizeof(DeshakeContext), | ||
| 597 | .init = init, | ||
| 598 | .uninit = uninit, | ||
| 599 | FILTER_INPUTS(deshake_inputs), | ||
| 600 | FILTER_OUTPUTS(ff_video_default_filterpad), | ||
| 601 | FILTER_PIXFMTS_ARRAY(pix_fmts), | ||
| 602 | }; | ||
| 603 |