LCOV - code coverage report
Current view: top level - libavfilter - vf_colorbalance.c (source / functions) Hit Total Coverage
Test: coverage.info Lines: 0 186 0.0 %
Date: 2018-05-20 11:54:08 Functions: 0 7 0.0 %

          Line data    Source code
       1             : /*
       2             :  * Copyright (c) 2013 Paul B Mahol
       3             :  *
       4             :  * This file is part of FFmpeg.
       5             :  *
       6             :  * FFmpeg is free software; you can redistribute it and/or
       7             :  * modify it under the terms of the GNU Lesser General Public
       8             :  * License as published by the Free Software Foundation; either
       9             :  * version 2.1 of the License, or (at your option) any later version.
      10             :  *
      11             :  * FFmpeg is distributed in the hope that it will be useful,
      12             :  * but WITHOUT ANY WARRANTY; without even the implied warranty of
      13             :  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      14             :  * Lesser General Public License for more details.
      15             :  *
      16             :  * You should have received a copy of the GNU Lesser General Public
      17             :  * License along with FFmpeg; if not, write to the Free Software
      18             :  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
      19             :  */
      20             : 
      21             : #include "libavutil/opt.h"
      22             : #include "libavutil/pixdesc.h"
      23             : #include "avfilter.h"
      24             : #include "drawutils.h"
      25             : #include "formats.h"
      26             : #include "internal.h"
      27             : #include "video.h"
      28             : 
      29             : #define R 0
      30             : #define G 1
      31             : #define B 2
      32             : #define A 3
      33             : 
      34             : typedef struct ThreadData {
      35             :     AVFrame *in, *out;
      36             : } ThreadData;
      37             : 
      38             : typedef struct Range {
      39             :     double shadows;
      40             :     double midtones;
      41             :     double highlights;
      42             : } Range;
      43             : 
      44             : typedef struct ColorBalanceContext {
      45             :     const AVClass *class;
      46             :     Range cyan_red;
      47             :     Range magenta_green;
      48             :     Range yellow_blue;
      49             : 
      50             :     uint16_t lut[3][65536];
      51             : 
      52             :     uint8_t rgba_map[4];
      53             :     int step;
      54             : 
      55             :     int (*apply_lut)(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs);
      56             : } ColorBalanceContext;
      57             : 
      58             : #define OFFSET(x) offsetof(ColorBalanceContext, x)
      59             : #define FLAGS AV_OPT_FLAG_FILTERING_PARAM|AV_OPT_FLAG_VIDEO_PARAM
      60             : static const AVOption colorbalance_options[] = {
      61             :     { "rs", "set red shadows",      OFFSET(cyan_red.shadows),         AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
      62             :     { "gs", "set green shadows",    OFFSET(magenta_green.shadows),    AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
      63             :     { "bs", "set blue shadows",     OFFSET(yellow_blue.shadows),      AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
      64             :     { "rm", "set red midtones",     OFFSET(cyan_red.midtones),        AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
      65             :     { "gm", "set green midtones",   OFFSET(magenta_green.midtones),   AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
      66             :     { "bm", "set blue midtones",    OFFSET(yellow_blue.midtones),     AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
      67             :     { "rh", "set red highlights",   OFFSET(cyan_red.highlights),      AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
      68             :     { "gh", "set green highlights", OFFSET(magenta_green.highlights), AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
      69             :     { "bh", "set blue highlights",  OFFSET(yellow_blue.highlights),   AV_OPT_TYPE_DOUBLE, {.dbl=0}, -1, 1, FLAGS },
      70             :     { NULL }
      71             : };
      72             : 
      73             : AVFILTER_DEFINE_CLASS(colorbalance);
      74             : 
      75           0 : static int query_formats(AVFilterContext *ctx)
      76             : {
      77             :     static const enum AVPixelFormat pix_fmts[] = {
      78             :         AV_PIX_FMT_RGB24, AV_PIX_FMT_BGR24,
      79             :         AV_PIX_FMT_RGBA,  AV_PIX_FMT_BGRA,
      80             :         AV_PIX_FMT_ABGR,  AV_PIX_FMT_ARGB,
      81             :         AV_PIX_FMT_0BGR,  AV_PIX_FMT_0RGB,
      82             :         AV_PIX_FMT_RGB0,  AV_PIX_FMT_BGR0,
      83             :         AV_PIX_FMT_RGB48,  AV_PIX_FMT_BGR48,
      84             :         AV_PIX_FMT_RGBA64, AV_PIX_FMT_BGRA64,
      85             :         AV_PIX_FMT_GBRP,   AV_PIX_FMT_GBRAP,
      86             :         AV_PIX_FMT_GBRP9,
      87             :         AV_PIX_FMT_GBRP10, AV_PIX_FMT_GBRAP10,
      88             :         AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRAP12,
      89             :         AV_PIX_FMT_GBRP14,
      90             :         AV_PIX_FMT_GBRP16, AV_PIX_FMT_GBRAP16,
      91             :         AV_PIX_FMT_NONE
      92             :     };
      93           0 :     AVFilterFormats *fmts_list = ff_make_format_list(pix_fmts);
      94           0 :     if (!fmts_list)
      95           0 :         return AVERROR(ENOMEM);
      96           0 :     return ff_set_common_formats(ctx, fmts_list);
      97             : }
      98             : 
      99           0 : static int apply_lut8_p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
     100             : {
     101           0 :     ColorBalanceContext *s = ctx->priv;
     102           0 :     ThreadData *td = arg;
     103           0 :     AVFrame *in = td->in;
     104           0 :     AVFrame *out = td->out;
     105           0 :     const int slice_start = (out->height * jobnr) / nb_jobs;
     106           0 :     const int slice_end = (out->height * (jobnr+1)) / nb_jobs;
     107           0 :     const uint8_t *srcg = in->data[0] + slice_start * in->linesize[0];
     108           0 :     const uint8_t *srcb = in->data[1] + slice_start * in->linesize[1];
     109           0 :     const uint8_t *srcr = in->data[2] + slice_start * in->linesize[2];
     110           0 :     const uint8_t *srca = in->data[3] + slice_start * in->linesize[3];
     111           0 :     uint8_t *dstg = out->data[0] + slice_start * out->linesize[0];
     112           0 :     uint8_t *dstb = out->data[1] + slice_start * out->linesize[1];
     113           0 :     uint8_t *dstr = out->data[2] + slice_start * out->linesize[2];
     114           0 :     uint8_t *dsta = out->data[3] + slice_start * out->linesize[3];
     115             :     int i, j;
     116             : 
     117           0 :     for (i = slice_start; i < slice_end; i++) {
     118           0 :         for (j = 0; j < out->width; j++) {
     119           0 :             dstg[j] = s->lut[G][srcg[j]];
     120           0 :             dstb[j] = s->lut[B][srcb[j]];
     121           0 :             dstr[j] = s->lut[R][srcr[j]];
     122           0 :             if (in != out && out->linesize[3])
     123           0 :                 dsta[j] = srca[j];
     124             :         }
     125             : 
     126           0 :         srcg += in->linesize[0];
     127           0 :         srcb += in->linesize[1];
     128           0 :         srcr += in->linesize[2];
     129           0 :         srca += in->linesize[3];
     130           0 :         dstg += out->linesize[0];
     131           0 :         dstb += out->linesize[1];
     132           0 :         dstr += out->linesize[2];
     133           0 :         dsta += out->linesize[3];
     134             :     }
     135             : 
     136           0 :     return 0;
     137             : }
     138             : 
     139           0 : static int apply_lut16_p(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
     140             : {
     141           0 :     ColorBalanceContext *s = ctx->priv;
     142           0 :     ThreadData *td = arg;
     143           0 :     AVFrame *in = td->in;
     144           0 :     AVFrame *out = td->out;
     145           0 :     const int slice_start = (out->height * jobnr) / nb_jobs;
     146           0 :     const int slice_end = (out->height * (jobnr+1)) / nb_jobs;
     147           0 :     const uint16_t *srcg = (const uint16_t *)in->data[0] + slice_start * in->linesize[0] / 2;
     148           0 :     const uint16_t *srcb = (const uint16_t *)in->data[1] + slice_start * in->linesize[1] / 2;
     149           0 :     const uint16_t *srcr = (const uint16_t *)in->data[2] + slice_start * in->linesize[2] / 2;
     150           0 :     const uint16_t *srca = (const uint16_t *)in->data[3] + slice_start * in->linesize[3] / 2;
     151           0 :     uint16_t *dstg = (uint16_t *)out->data[0] + slice_start * out->linesize[0] / 2;
     152           0 :     uint16_t *dstb = (uint16_t *)out->data[1] + slice_start * out->linesize[1] / 2;
     153           0 :     uint16_t *dstr = (uint16_t *)out->data[2] + slice_start * out->linesize[2] / 2;
     154           0 :     uint16_t *dsta = (uint16_t *)out->data[3] + slice_start * out->linesize[3] / 2;
     155             :     int i, j;
     156             : 
     157           0 :     for (i = slice_start; i < slice_end; i++) {
     158           0 :         for (j = 0; j < out->width; j++) {
     159           0 :             dstg[j] = s->lut[G][srcg[j]];
     160           0 :             dstb[j] = s->lut[B][srcb[j]];
     161           0 :             dstr[j] = s->lut[R][srcr[j]];
     162           0 :             if (in != out && out->linesize[3])
     163           0 :                 dsta[j] = srca[j];
     164             :         }
     165             : 
     166           0 :         srcg += in->linesize[0] / 2;
     167           0 :         srcb += in->linesize[1] / 2;
     168           0 :         srcr += in->linesize[2] / 2;
     169           0 :         srca += in->linesize[3] / 2;
     170           0 :         dstg += out->linesize[0] / 2;
     171           0 :         dstb += out->linesize[1] / 2;
     172           0 :         dstr += out->linesize[2] / 2;
     173           0 :         dsta += out->linesize[3] / 2;
     174             :     }
     175             : 
     176           0 :     return 0;
     177             : }
     178             : 
     179           0 : static int apply_lut8(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
     180             : {
     181           0 :     ColorBalanceContext *s = ctx->priv;
     182           0 :     ThreadData *td = arg;
     183           0 :     AVFrame *in = td->in;
     184           0 :     AVFrame *out = td->out;
     185           0 :     AVFilterLink *outlink = ctx->outputs[0];
     186           0 :     const int slice_start = (out->height * jobnr) / nb_jobs;
     187           0 :     const int slice_end = (out->height * (jobnr+1)) / nb_jobs;
     188           0 :     const uint8_t *srcrow = in->data[0] + slice_start * in->linesize[0];
     189           0 :     const uint8_t roffset = s->rgba_map[R];
     190           0 :     const uint8_t goffset = s->rgba_map[G];
     191           0 :     const uint8_t boffset = s->rgba_map[B];
     192           0 :     const uint8_t aoffset = s->rgba_map[A];
     193           0 :     const int step = s->step;
     194             :     uint8_t *dstrow;
     195             :     int i, j;
     196             : 
     197           0 :     dstrow = out->data[0] + slice_start * out->linesize[0];
     198           0 :     for (i = slice_start; i < slice_end; i++) {
     199           0 :         const uint8_t *src = srcrow;
     200           0 :         uint8_t *dst = dstrow;
     201             : 
     202           0 :         for (j = 0; j < outlink->w * step; j += step) {
     203           0 :             dst[j + roffset] = s->lut[R][src[j + roffset]];
     204           0 :             dst[j + goffset] = s->lut[G][src[j + goffset]];
     205           0 :             dst[j + boffset] = s->lut[B][src[j + boffset]];
     206           0 :             if (in != out && step == 4)
     207           0 :                 dst[j + aoffset] = src[j + aoffset];
     208             :         }
     209             : 
     210           0 :         srcrow += in->linesize[0];
     211           0 :         dstrow += out->linesize[0];
     212             :     }
     213             : 
     214           0 :     return 0;
     215             : }
     216             : 
     217           0 : static int apply_lut16(AVFilterContext *ctx, void *arg, int jobnr, int nb_jobs)
     218             : {
     219           0 :     ColorBalanceContext *s = ctx->priv;
     220           0 :     ThreadData *td = arg;
     221           0 :     AVFrame *in = td->in;
     222           0 :     AVFrame *out = td->out;
     223           0 :     AVFilterLink *outlink = ctx->outputs[0];
     224           0 :     const int slice_start = (out->height * jobnr) / nb_jobs;
     225           0 :     const int slice_end = (out->height * (jobnr+1)) / nb_jobs;
     226           0 :     const uint16_t *srcrow = (const uint16_t *)in->data[0] + slice_start * in->linesize[0] / 2;
     227           0 :     const uint8_t roffset = s->rgba_map[R];
     228           0 :     const uint8_t goffset = s->rgba_map[G];
     229           0 :     const uint8_t boffset = s->rgba_map[B];
     230           0 :     const uint8_t aoffset = s->rgba_map[A];
     231           0 :     const int step = s->step / 2;
     232             :     uint16_t *dstrow;
     233             :     int i, j;
     234             : 
     235           0 :     dstrow = (uint16_t *)out->data[0] + slice_start * out->linesize[0] / 2;
     236           0 :     for (i = slice_start; i < slice_end; i++) {
     237           0 :         const uint16_t *src = srcrow;
     238           0 :         uint16_t *dst = dstrow;
     239             : 
     240           0 :         for (j = 0; j < outlink->w * step; j += step) {
     241           0 :             dst[j + roffset] = s->lut[R][src[j + roffset]];
     242           0 :             dst[j + goffset] = s->lut[G][src[j + goffset]];
     243           0 :             dst[j + boffset] = s->lut[B][src[j + boffset]];
     244           0 :             if (in != out && step == 4)
     245           0 :                 dst[j + aoffset] = src[j + aoffset];
     246             :         }
     247             : 
     248           0 :         srcrow += in->linesize[0] / 2;
     249           0 :         dstrow += out->linesize[0] / 2;
     250             :     }
     251             : 
     252           0 :     return 0;
     253             : }
     254             : 
     255           0 : static int config_output(AVFilterLink *outlink)
     256             : {
     257           0 :     AVFilterContext *ctx = outlink->src;
     258           0 :     ColorBalanceContext *s = ctx->priv;
     259           0 :     const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(outlink->format);
     260           0 :     const int depth = desc->comp[0].depth;
     261           0 :     const int max = 1 << depth;
     262           0 :     const int planar = av_pix_fmt_count_planes(outlink->format) > 1;
     263             :     double *shadows, *midtones, *highlights, *buffer;
     264             :     int i, r, g, b;
     265             : 
     266           0 :     if (max == 256 && planar) {
     267           0 :         s->apply_lut = apply_lut8_p;
     268           0 :     } else if (planar) {
     269           0 :         s->apply_lut = apply_lut16_p;
     270           0 :     } else if (max == 256) {
     271           0 :         s->apply_lut = apply_lut8;
     272             :     } else {
     273           0 :         s->apply_lut = apply_lut16;
     274             :     }
     275             : 
     276           0 :     buffer = av_malloc(max * 3 * sizeof(*buffer));
     277           0 :     if (!buffer)
     278           0 :         return AVERROR(ENOMEM);
     279             : 
     280           0 :     shadows    = buffer + max * 0;
     281           0 :     midtones   = buffer + max * 1;
     282           0 :     highlights = buffer + max * 2;
     283             : 
     284           0 :     for (i = 0; i < max; i++) {
     285           0 :         const double L = 0.333 * (max - 1);
     286           0 :         const double M = 0.7 * (max - 1);
     287           0 :         const double H = 1 * (max - 1);
     288           0 :         double low = av_clipd((i - L) / (-max * 0.25) + 0.5, 0, 1) * M;
     289           0 :         double mid = av_clipd((i - L) / ( max * 0.25) + 0.5, 0, 1) *
     290           0 :                      av_clipd((i + L - H) / (-max * 0.25) + 0.5, 0, 1) * M;
     291             : 
     292           0 :         shadows[i] = low;
     293           0 :         midtones[i] = mid;
     294           0 :         highlights[max - i - 1] = low;
     295             :     }
     296             : 
     297           0 :     for (i = 0; i < max; i++) {
     298           0 :         r = g = b = i;
     299             : 
     300           0 :         r = av_clip_uintp2(r + s->cyan_red.shadows         * shadows[r],    depth);
     301           0 :         r = av_clip_uintp2(r + s->cyan_red.midtones        * midtones[r],   depth);
     302           0 :         r = av_clip_uintp2(r + s->cyan_red.highlights      * highlights[r], depth);
     303             : 
     304           0 :         g = av_clip_uintp2(g + s->magenta_green.shadows    * shadows[g],    depth);
     305           0 :         g = av_clip_uintp2(g + s->magenta_green.midtones   * midtones[g],   depth);
     306           0 :         g = av_clip_uintp2(g + s->magenta_green.highlights * highlights[g], depth);
     307             : 
     308           0 :         b = av_clip_uintp2(b + s->yellow_blue.shadows      * shadows[b],    depth);
     309           0 :         b = av_clip_uintp2(b + s->yellow_blue.midtones     * midtones[b],   depth);
     310           0 :         b = av_clip_uintp2(b + s->yellow_blue.highlights   * highlights[b], depth);
     311             : 
     312           0 :         s->lut[R][i] = r;
     313           0 :         s->lut[G][i] = g;
     314           0 :         s->lut[B][i] = b;
     315             :     }
     316             : 
     317           0 :     av_free(buffer);
     318             : 
     319           0 :     ff_fill_rgba_map(s->rgba_map, outlink->format);
     320           0 :     s->step = av_get_padded_bits_per_pixel(desc) >> 3;
     321             : 
     322           0 :     return 0;
     323             : }
     324             : 
     325           0 : static int filter_frame(AVFilterLink *inlink, AVFrame *in)
     326             : {
     327           0 :     AVFilterContext *ctx = inlink->dst;
     328           0 :     ColorBalanceContext *s = ctx->priv;
     329           0 :     AVFilterLink *outlink = ctx->outputs[0];
     330             :     ThreadData td;
     331             :     AVFrame *out;
     332             : 
     333           0 :     if (av_frame_is_writable(in)) {
     334           0 :         out = in;
     335             :     } else {
     336           0 :         out = ff_get_video_buffer(outlink, outlink->w, outlink->h);
     337           0 :         if (!out) {
     338           0 :             av_frame_free(&in);
     339           0 :             return AVERROR(ENOMEM);
     340             :         }
     341           0 :         av_frame_copy_props(out, in);
     342             :     }
     343             : 
     344           0 :     td.in = in;
     345           0 :     td.out = out;
     346           0 :     ctx->internal->execute(ctx, s->apply_lut, &td, NULL, FFMIN(outlink->h, ff_filter_get_nb_threads(ctx)));
     347             : 
     348           0 :     if (in != out)
     349           0 :         av_frame_free(&in);
     350           0 :     return ff_filter_frame(outlink, out);
     351             : }
     352             : 
     353             : static const AVFilterPad colorbalance_inputs[] = {
     354             :     {
     355             :         .name         = "default",
     356             :         .type         = AVMEDIA_TYPE_VIDEO,
     357             :         .filter_frame = filter_frame,
     358             :     },
     359             :     { NULL }
     360             : };
     361             : 
     362             : static const AVFilterPad colorbalance_outputs[] = {
     363             :     {
     364             :         .name         = "default",
     365             :         .type         = AVMEDIA_TYPE_VIDEO,
     366             :         .config_props = config_output,
     367             :     },
     368             :     { NULL }
     369             : };
     370             : 
     371             : AVFilter ff_vf_colorbalance = {
     372             :     .name          = "colorbalance",
     373             :     .description   = NULL_IF_CONFIG_SMALL("Adjust the color balance."),
     374             :     .priv_size     = sizeof(ColorBalanceContext),
     375             :     .priv_class    = &colorbalance_class,
     376             :     .query_formats = query_formats,
     377             :     .inputs        = colorbalance_inputs,
     378             :     .outputs       = colorbalance_outputs,
     379             :     .flags         = AVFILTER_FLAG_SUPPORT_TIMELINE_GENERIC | AVFILTER_FLAG_SLICE_THREADS,
     380             : };

Generated by: LCOV version 1.13