LCOV - code coverage report
Current view: top level - libavcodec - huffyuvenc.c (source / functions) Hit Total Coverage
Test: coverage.info Lines: 341 561 60.8 %
Date: 2017-12-15 11:05:35 Functions: 13 14 92.9 %

          Line data    Source code
       1             : /*
       2             :  * Copyright (c) 2002-2014 Michael Niedermayer <michaelni@gmx.at>
       3             :  *
       4             :  * see http://www.pcisys.net/~melanson/codecs/huffyuv.txt for a description of
       5             :  * the algorithm used
       6             :  *
       7             :  * This file is part of FFmpeg.
       8             :  *
       9             :  * FFmpeg is free software; you can redistribute it and/or
      10             :  * modify it under the terms of the GNU Lesser General Public
      11             :  * License as published by the Free Software Foundation; either
      12             :  * version 2.1 of the License, or (at your option) any later version.
      13             :  *
      14             :  * FFmpeg is distributed in the hope that it will be useful,
      15             :  * but WITHOUT ANY WARRANTY; without even the implied warranty of
      16             :  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
      17             :  * Lesser General Public License for more details.
      18             :  *
      19             :  * You should have received a copy of the GNU Lesser General Public
      20             :  * License along with FFmpeg; if not, write to the Free Software
      21             :  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
      22             :  *
      23             :  * yuva, gray, 4:4:4, 4:1:1, 4:1:0 and >8 bit per sample support sponsored by NOA
      24             :  */
      25             : 
      26             : /**
      27             :  * @file
      28             :  * huffyuv encoder
      29             :  */
      30             : 
      31             : #include "avcodec.h"
      32             : #include "huffyuv.h"
      33             : #include "huffman.h"
      34             : #include "huffyuvencdsp.h"
      35             : #include "internal.h"
      36             : #include "lossless_videoencdsp.h"
      37             : #include "put_bits.h"
      38             : #include "libavutil/opt.h"
      39             : #include "libavutil/pixdesc.h"
      40             : 
      41      134100 : static inline void diff_bytes(HYuvContext *s, uint8_t *dst,
      42             :                               const uint8_t *src0, const uint8_t *src1, int w)
      43             : {
      44      134100 :     if (s->bps <= 8) {
      45           0 :         s->llvidencdsp.diff_bytes(dst, src0, src1, w);
      46             :     } else {
      47      134100 :         s->hencdsp.diff_int16((uint16_t *)dst, (const uint16_t *)src0, (const uint16_t *)src1, s->n - 1, w);
      48             :     }
      49      134100 : }
      50             : 
      51      718400 : static inline int sub_left_prediction(HYuvContext *s, uint8_t *dst,
      52             :                                       const uint8_t *src, int w, int left)
      53             : {
      54             :     int i;
      55      718400 :     int min_width = FFMIN(w, 32);
      56             : 
      57      718400 :     if (s->bps <= 8) {
      58    11777100 :         for (i = 0; i < min_width; i++) { /* scalar loop before dsp call */
      59    11417900 :             const int temp = src[i];
      60    11417900 :             dst[i] = temp - left;
      61    11417900 :             left   = temp;
      62             :         }
      63      359200 :         if (w < 32)
      64        5100 :             return left;
      65      354100 :         s->llvidencdsp.diff_bytes(dst + 32, src + 32, src + 31, w - 32);
      66      354100 :         return src[w-1];
      67             :     } else {
      68      359200 :         const uint16_t *src16 = (const uint16_t *)src;
      69      359200 :         uint16_t       *dst16 = (      uint16_t *)dst;
      70    11777100 :         for (i = 0; i < min_width; i++) { /* scalar loop before dsp call */
      71    11417900 :             const int temp = src16[i];
      72    11417900 :             dst16[i] = temp - left;
      73    11417900 :             left   = temp;
      74             :         }
      75      359200 :         if (w < 32)
      76        5100 :             return left;
      77      354100 :         s->hencdsp.diff_int16(dst16 + 32, src16 + 32, src16 + 31, s->n - 1, w - 32);
      78      354100 :         return src16[w-1];
      79             :     }
      80             : }
      81             : 
      82       44900 : static inline void sub_left_prediction_bgr32(HYuvContext *s, uint8_t *dst,
      83             :                                              const uint8_t *src, int w,
      84             :                                              int *red, int *green, int *blue,
      85             :                                              int *alpha)
      86             : {
      87             :     int i;
      88             :     int r, g, b, a;
      89       44900 :     int min_width = FFMIN(w, 8);
      90       44900 :     r = *red;
      91       44900 :     g = *green;
      92       44900 :     b = *blue;
      93       44900 :     a = *alpha;
      94             : 
      95      404100 :     for (i = 0; i < min_width; i++) {
      96      359200 :         const int rt = src[i * 4 + R];
      97      359200 :         const int gt = src[i * 4 + G];
      98      359200 :         const int bt = src[i * 4 + B];
      99      359200 :         const int at = src[i * 4 + A];
     100      359200 :         dst[i * 4 + R] = rt - r;
     101      359200 :         dst[i * 4 + G] = gt - g;
     102      359200 :         dst[i * 4 + B] = bt - b;
     103      359200 :         dst[i * 4 + A] = at - a;
     104      359200 :         r = rt;
     105      359200 :         g = gt;
     106      359200 :         b = bt;
     107      359200 :         a = at;
     108             :     }
     109             : 
     110       44900 :     s->llvidencdsp.diff_bytes(dst + 32, src + 32, src + 32 - 4, w * 4 - 32);
     111             : 
     112       44900 :     *red   = src[(w - 1) * 4 + R];
     113       44900 :     *green = src[(w - 1) * 4 + G];
     114       44900 :     *blue  = src[(w - 1) * 4 + B];
     115       44900 :     *alpha = src[(w - 1) * 4 + A];
     116       44900 : }
     117             : 
     118       44900 : static inline void sub_left_prediction_rgb24(HYuvContext *s, uint8_t *dst,
     119             :                                              uint8_t *src, int w,
     120             :                                              int *red, int *green, int *blue)
     121             : {
     122             :     int i;
     123             :     int r, g, b;
     124       44900 :     r = *red;
     125       44900 :     g = *green;
     126       44900 :     b = *blue;
     127      763300 :     for (i = 0; i < FFMIN(w, 16); i++) {
     128      718400 :         const int rt = src[i * 3 + 0];
     129      718400 :         const int gt = src[i * 3 + 1];
     130      718400 :         const int bt = src[i * 3 + 2];
     131      718400 :         dst[i * 3 + 0] = rt - r;
     132      718400 :         dst[i * 3 + 1] = gt - g;
     133      718400 :         dst[i * 3 + 2] = bt - b;
     134      718400 :         r = rt;
     135      718400 :         g = gt;
     136      718400 :         b = bt;
     137             :     }
     138             : 
     139       44900 :     s->llvidencdsp.diff_bytes(dst + 48, src + 48, src + 48 - 3, w * 3 - 48);
     140             : 
     141       44900 :     *red   = src[(w - 1) * 3 + 0];
     142       44900 :     *green = src[(w - 1) * 3 + 1];
     143       44900 :     *blue  = src[(w - 1) * 3 + 2];
     144       44900 : }
     145             : 
     146           0 : static void sub_median_prediction(HYuvContext *s, uint8_t *dst, const uint8_t *src1, const uint8_t *src2, int w, int *left, int *left_top)
     147             : {
     148           0 :     if (s->bps <= 8) {
     149           0 :         s->llvidencdsp.sub_median_pred(dst, src1, src2, w , left, left_top);
     150             :     } else {
     151           0 :         s->hencdsp.sub_hfyu_median_pred_int16((uint16_t *)dst, (const uint16_t *)src1, (const uint16_t *)src2, s->n - 1, w , left, left_top);
     152             :     }
     153           0 : }
     154             : 
     155          96 : static int store_table(HYuvContext *s, const uint8_t *len, uint8_t *buf)
     156             : {
     157             :     int i;
     158          96 :     int index = 0;
     159          96 :     int n = s->vlc_n;
     160             : 
     161        4068 :     for (i = 0; i < n;) {
     162        3876 :         int val = len[i];
     163        3876 :         int repeat = 0;
     164             : 
     165      277284 :         for (; i < n && len[i] == val && repeat < 255; i++)
     166      273408 :             repeat++;
     167             : 
     168        3876 :         av_assert0(val < 32 && val >0 && repeat < 256 && repeat>0);
     169        3876 :         if (repeat > 7) {
     170        2244 :             buf[index++] = val;
     171        2244 :             buf[index++] = repeat;
     172             :         } else {
     173        1632 :             buf[index++] = val | (repeat << 5);
     174             :         }
     175             :     }
     176             : 
     177          96 :     return index;
     178             : }
     179             : 
     180          32 : static int store_huffman_tables(HYuvContext *s, uint8_t *buf)
     181             : {
     182             :     int i, ret;
     183          32 :     int size = 0;
     184          32 :     int count = 3;
     185             : 
     186          32 :     if (s->version > 2)
     187          16 :         count = 1 + s->alpha + 2*s->chroma;
     188             : 
     189         128 :     for (i = 0; i < count; i++) {
     190          96 :         if ((ret = ff_huff_gen_len_table(s->len[i], s->stats[i], s->vlc_n, 0)) < 0)
     191           0 :             return ret;
     192             : 
     193          96 :         if (ff_huffyuv_generate_bits_table(s->bits[i], s->len[i], s->vlc_n) < 0) {
     194           0 :             return -1;
     195             :         }
     196             : 
     197          96 :         size += store_table(s, s->len[i], buf + size);
     198             :     }
     199          32 :     return size;
     200             : }
     201             : 
     202          32 : static av_cold int encode_init(AVCodecContext *avctx)
     203             : {
     204          32 :     HYuvContext *s = avctx->priv_data;
     205             :     int i, j;
     206             :     int ret;
     207          32 :     const AVPixFmtDescriptor *desc = av_pix_fmt_desc_get(avctx->pix_fmt);
     208             : 
     209          32 :     ff_huffyuv_common_init(avctx);
     210          32 :     ff_huffyuvencdsp_init(&s->hencdsp, avctx);
     211          32 :     ff_llvidencdsp_init(&s->llvidencdsp);
     212             : 
     213          32 :     avctx->extradata = av_mallocz(3*MAX_N + 4);
     214          32 :     if (s->flags&AV_CODEC_FLAG_PASS1) {
     215             : #define STATS_OUT_SIZE 21*MAX_N*3 + 4
     216           0 :         avctx->stats_out = av_mallocz(STATS_OUT_SIZE); // 21*256*3(%llu ) + 3(\n) + 1(0) = 16132
     217           0 :         if (!avctx->stats_out)
     218           0 :             return AVERROR(ENOMEM);
     219             :     }
     220          32 :     s->version = 2;
     221             : 
     222          32 :     if (!avctx->extradata)
     223           0 :         return AVERROR(ENOMEM);
     224             : 
     225             : #if FF_API_CODED_FRAME
     226             : FF_DISABLE_DEPRECATION_WARNINGS
     227          32 :     avctx->coded_frame->pict_type = AV_PICTURE_TYPE_I;
     228          32 :     avctx->coded_frame->key_frame = 1;
     229             : FF_ENABLE_DEPRECATION_WARNINGS
     230             : #endif
     231             : #if FF_API_PRIVATE_OPT
     232             : FF_DISABLE_DEPRECATION_WARNINGS
     233          32 :     if (avctx->context_model == 1)
     234           0 :         s->context = avctx->context_model;
     235             : FF_ENABLE_DEPRECATION_WARNINGS
     236             : #endif
     237             : 
     238          32 :     s->bps = desc->comp[0].depth;
     239          32 :     s->yuv = !(desc->flags & AV_PIX_FMT_FLAG_RGB) && desc->nb_components >= 2;
     240          32 :     s->chroma = desc->nb_components > 2;
     241          32 :     s->alpha = !!(desc->flags & AV_PIX_FMT_FLAG_ALPHA);
     242          32 :     av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt,
     243             :                                      &s->chroma_h_shift,
     244             :                                      &s->chroma_v_shift);
     245             : 
     246          32 :     switch (avctx->pix_fmt) {
     247           8 :     case AV_PIX_FMT_YUV420P:
     248             :     case AV_PIX_FMT_YUV422P:
     249           8 :         if (s->width & 1) {
     250           0 :             av_log(avctx, AV_LOG_ERROR, "Width must be even for this colorspace.\n");
     251           0 :             return AVERROR(EINVAL);
     252             :         }
     253           8 :         s->bitstream_bpp = avctx->pix_fmt == AV_PIX_FMT_YUV420P ? 12 : 16;
     254           8 :         break;
     255          16 :     case AV_PIX_FMT_YUV444P:
     256             :     case AV_PIX_FMT_YUV410P:
     257             :     case AV_PIX_FMT_YUV411P:
     258             :     case AV_PIX_FMT_YUV440P:
     259             :     case AV_PIX_FMT_GBRP:
     260             :     case AV_PIX_FMT_GBRP9:
     261             :     case AV_PIX_FMT_GBRP10:
     262             :     case AV_PIX_FMT_GBRP12:
     263             :     case AV_PIX_FMT_GBRP14:
     264             :     case AV_PIX_FMT_GBRP16:
     265             :     case AV_PIX_FMT_GRAY8:
     266             :     case AV_PIX_FMT_GRAY16:
     267             :     case AV_PIX_FMT_YUVA444P:
     268             :     case AV_PIX_FMT_YUVA420P:
     269             :     case AV_PIX_FMT_YUVA422P:
     270             :     case AV_PIX_FMT_GBRAP:
     271             :     case AV_PIX_FMT_GRAY8A:
     272             :     case AV_PIX_FMT_YUV420P9:
     273             :     case AV_PIX_FMT_YUV420P10:
     274             :     case AV_PIX_FMT_YUV420P12:
     275             :     case AV_PIX_FMT_YUV420P14:
     276             :     case AV_PIX_FMT_YUV420P16:
     277             :     case AV_PIX_FMT_YUV422P9:
     278             :     case AV_PIX_FMT_YUV422P10:
     279             :     case AV_PIX_FMT_YUV422P12:
     280             :     case AV_PIX_FMT_YUV422P14:
     281             :     case AV_PIX_FMT_YUV422P16:
     282             :     case AV_PIX_FMT_YUV444P9:
     283             :     case AV_PIX_FMT_YUV444P10:
     284             :     case AV_PIX_FMT_YUV444P12:
     285             :     case AV_PIX_FMT_YUV444P14:
     286             :     case AV_PIX_FMT_YUV444P16:
     287             :     case AV_PIX_FMT_YUVA420P9:
     288             :     case AV_PIX_FMT_YUVA420P10:
     289             :     case AV_PIX_FMT_YUVA420P16:
     290             :     case AV_PIX_FMT_YUVA422P9:
     291             :     case AV_PIX_FMT_YUVA422P10:
     292             :     case AV_PIX_FMT_YUVA422P16:
     293             :     case AV_PIX_FMT_YUVA444P9:
     294             :     case AV_PIX_FMT_YUVA444P10:
     295             :     case AV_PIX_FMT_YUVA444P16:
     296          16 :         s->version = 3;
     297          16 :         break;
     298           4 :     case AV_PIX_FMT_RGB32:
     299           4 :         s->bitstream_bpp = 32;
     300           4 :         break;
     301           4 :     case AV_PIX_FMT_RGB24:
     302           4 :         s->bitstream_bpp = 24;
     303           4 :         break;
     304           0 :     default:
     305           0 :         av_log(avctx, AV_LOG_ERROR, "format not supported\n");
     306           0 :         return AVERROR(EINVAL);
     307             :     }
     308          32 :     s->n = 1<<s->bps;
     309          32 :     s->vlc_n = FFMIN(s->n, MAX_VLC_N);
     310             : 
     311          32 :     avctx->bits_per_coded_sample = s->bitstream_bpp;
     312          32 :     s->decorrelate = s->bitstream_bpp >= 24 && !s->yuv && !(desc->flags & AV_PIX_FMT_FLAG_PLANAR);
     313             : #if FF_API_PRIVATE_OPT
     314             : FF_DISABLE_DEPRECATION_WARNINGS
     315          32 :     if (avctx->prediction_method)
     316           0 :         s->predictor = avctx->prediction_method;
     317             : FF_ENABLE_DEPRECATION_WARNINGS
     318             : #endif
     319          32 :     s->interlaced = avctx->flags & AV_CODEC_FLAG_INTERLACED_ME ? 1 : 0;
     320          32 :     if (s->context) {
     321           0 :         if (s->flags & (AV_CODEC_FLAG_PASS1 | AV_CODEC_FLAG_PASS2)) {
     322           0 :             av_log(avctx, AV_LOG_ERROR,
     323             :                    "context=1 is not compatible with "
     324             :                    "2 pass huffyuv encoding\n");
     325           0 :             return AVERROR(EINVAL);
     326             :         }
     327             :     }
     328             : 
     329          32 :     if (avctx->codec->id == AV_CODEC_ID_HUFFYUV) {
     330          12 :         if (avctx->pix_fmt == AV_PIX_FMT_YUV420P) {
     331           0 :             av_log(avctx, AV_LOG_ERROR,
     332             :                    "Error: YV12 is not supported by huffyuv; use "
     333             :                    "vcodec=ffvhuff or format=422p\n");
     334           0 :             return AVERROR(EINVAL);
     335             :         }
     336             : #if FF_API_PRIVATE_OPT
     337          12 :         if (s->context) {
     338           0 :             av_log(avctx, AV_LOG_ERROR,
     339             :                    "Error: per-frame huffman tables are not supported "
     340             :                    "by huffyuv; use vcodec=ffvhuff\n");
     341           0 :             return AVERROR(EINVAL);
     342             :         }
     343          12 :         if (s->version > 2) {
     344           0 :             av_log(avctx, AV_LOG_ERROR,
     345             :                    "Error: ver>2 is not supported "
     346             :                    "by huffyuv; use vcodec=ffvhuff\n");
     347           0 :             return AVERROR(EINVAL);
     348             :         }
     349             : #endif
     350          12 :         if (s->interlaced != ( s->height > 288 ))
     351           0 :             av_log(avctx, AV_LOG_INFO,
     352             :                    "using huffyuv 2.2.0 or newer interlacing flag\n");
     353             :     }
     354             : 
     355          32 :     if (s->version > 3 && avctx->strict_std_compliance > FF_COMPLIANCE_EXPERIMENTAL) {
     356           0 :         av_log(avctx, AV_LOG_ERROR, "Ver > 3 is under development, files encoded with it may not be decodable with future versions!!!\n"
     357             :                "Use vstrict=-2 / -strict -2 to use it anyway.\n");
     358           0 :         return AVERROR(EINVAL);
     359             :     }
     360             : 
     361          32 :     if (s->bitstream_bpp >= 24 && s->predictor == MEDIAN && s->version <= 2) {
     362           0 :         av_log(avctx, AV_LOG_ERROR,
     363             :                "Error: RGB is incompatible with median predictor\n");
     364           0 :         return AVERROR(EINVAL);
     365             :     }
     366             : 
     367          32 :     ((uint8_t*)avctx->extradata)[0] = s->predictor | (s->decorrelate << 6);
     368          32 :     ((uint8_t*)avctx->extradata)[2] = s->interlaced ? 0x10 : 0x20;
     369          32 :     if (s->context)
     370           0 :         ((uint8_t*)avctx->extradata)[2] |= 0x40;
     371          32 :     if (s->version < 3) {
     372          16 :         ((uint8_t*)avctx->extradata)[1] = s->bitstream_bpp;
     373          16 :         ((uint8_t*)avctx->extradata)[3] = 0;
     374             :     } else {
     375          16 :         ((uint8_t*)avctx->extradata)[1] = ((s->bps-1)<<4) | s->chroma_h_shift | (s->chroma_v_shift<<2);
     376          16 :         if (s->chroma)
     377          16 :             ((uint8_t*)avctx->extradata)[2] |= s->yuv ? 1 : 2;
     378          16 :         if (s->alpha)
     379           0 :             ((uint8_t*)avctx->extradata)[2] |= 4;
     380          16 :         ((uint8_t*)avctx->extradata)[3] = 1;
     381             :     }
     382          32 :     s->avctx->extradata_size = 4;
     383             : 
     384          32 :     if (avctx->stats_in) {
     385           0 :         char *p = avctx->stats_in;
     386             : 
     387           0 :         for (i = 0; i < 4; i++)
     388           0 :             for (j = 0; j < s->vlc_n; j++)
     389           0 :                 s->stats[i][j] = 1;
     390             : 
     391             :         for (;;) {
     392           0 :             for (i = 0; i < 4; i++) {
     393             :                 char *next;
     394             : 
     395           0 :                 for (j = 0; j < s->vlc_n; j++) {
     396           0 :                     s->stats[i][j] += strtol(p, &next, 0);
     397           0 :                     if (next == p) return -1;
     398           0 :                     p = next;
     399             :                 }
     400             :             }
     401           0 :             if (p[0] == 0 || p[1] == 0 || p[2] == 0) break;
     402             :         }
     403             :     } else {
     404         160 :         for (i = 0; i < 4; i++)
     405      364672 :             for (j = 0; j < s->vlc_n; j++) {
     406      364544 :                 int d = FFMIN(j, s->vlc_n - j);
     407             : 
     408      364544 :                 s->stats[i][j] = 100000000 / (d*d + 1);
     409             :             }
     410             :     }
     411             : 
     412          32 :     ret = store_huffman_tables(s, s->avctx->extradata + s->avctx->extradata_size);
     413          32 :     if (ret < 0)
     414           0 :         return ret;
     415          32 :     s->avctx->extradata_size += ret;
     416             : 
     417          32 :     if (s->context) {
     418           0 :         for (i = 0; i < 4; i++) {
     419           0 :             int pels = s->width * s->height / (i ? 40 : 10);
     420           0 :             for (j = 0; j < s->vlc_n; j++) {
     421           0 :                 int d = FFMIN(j, s->vlc_n - j);
     422           0 :                 s->stats[i][j] = pels/(d*d + 1);
     423             :             }
     424             :         }
     425             :     } else {
     426         160 :         for (i = 0; i < 4; i++)
     427      364672 :             for (j = 0; j < s->vlc_n; j++)
     428      364544 :                 s->stats[i][j]= 0;
     429             :     }
     430             : 
     431          32 :     if (ff_huffyuv_alloc_temp(s)) {
     432           0 :         ff_huffyuv_common_end(s);
     433           0 :         return AVERROR(ENOMEM);
     434             :     }
     435             : 
     436          32 :     s->picture_number=0;
     437             : 
     438          32 :     return 0;
     439             : }
     440       67350 : static int encode_422_bitstream(HYuvContext *s, int offset, int count)
     441             : {
     442             :     int i;
     443       67350 :     const uint8_t *y = s->temp[0] + offset;
     444       67350 :     const uint8_t *u = s->temp[1] + offset / 2;
     445       67350 :     const uint8_t *v = s->temp[2] + offset / 2;
     446             : 
     447       67350 :     if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) < 2 * 4 * count) {
     448           0 :         av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
     449           0 :         return -1;
     450             :     }
     451             : 
     452             : #define LOAD4\
     453             :             int y0 = y[2 * i];\
     454             :             int y1 = y[2 * i + 1];\
     455             :             int u0 = u[i];\
     456             :             int v0 = v[i];
     457             : 
     458       67350 :     count /= 2;
     459             : 
     460       67350 :     if (s->flags & AV_CODEC_FLAG_PASS1) {
     461           0 :         for(i = 0; i < count; i++) {
     462           0 :             LOAD4;
     463           0 :             s->stats[0][y0]++;
     464           0 :             s->stats[1][u0]++;
     465           0 :             s->stats[0][y1]++;
     466           0 :             s->stats[2][v0]++;
     467             :         }
     468             :     }
     469       67350 :     if (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)
     470           0 :         return 0;
     471       67350 :     if (s->context) {
     472           0 :         for (i = 0; i < count; i++) {
     473           0 :             LOAD4;
     474           0 :             s->stats[0][y0]++;
     475           0 :             put_bits(&s->pb, s->len[0][y0], s->bits[0][y0]);
     476           0 :             s->stats[1][u0]++;
     477           0 :             put_bits(&s->pb, s->len[1][u0], s->bits[1][u0]);
     478           0 :             s->stats[0][y1]++;
     479           0 :             put_bits(&s->pb, s->len[0][y1], s->bits[0][y1]);
     480           0 :             s->stats[2][v0]++;
     481           0 :             put_bits(&s->pb, s->len[2][v0], s->bits[2][v0]);
     482             :         }
     483             :     } else {
     484    11515100 :         for(i = 0; i < count; i++) {
     485    11447750 :             LOAD4;
     486    11447750 :             put_bits(&s->pb, s->len[0][y0], s->bits[0][y0]);
     487    11447750 :             put_bits(&s->pb, s->len[1][u0], s->bits[1][u0]);
     488    11447750 :             put_bits(&s->pb, s->len[0][y1], s->bits[0][y1]);
     489    11447750 :             put_bits(&s->pb, s->len[2][v0], s->bits[2][v0]);
     490             :         }
     491             :     }
     492       67350 :     return 0;
     493             : }
     494             : 
     495      493900 : static int encode_plane_bitstream(HYuvContext *s, int width, int plane)
     496             : {
     497      493900 :     int i, count = width/2;
     498             : 
     499      493900 :     if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) < count * s->bps / 2) {
     500           0 :         av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
     501           0 :         return -1;
     502             :     }
     503             : 
     504             : #define LOADEND\
     505             :             int y0 = s->temp[0][width-1];
     506             : #define LOADEND_14\
     507             :             int y0 = s->temp16[0][width-1] & mask;
     508             : #define LOADEND_16\
     509             :             int y0 = s->temp16[0][width-1];
     510             : #define STATEND\
     511             :             s->stats[plane][y0]++;
     512             : #define STATEND_16\
     513             :             s->stats[plane][y0>>2]++;
     514             : #define WRITEEND\
     515             :             put_bits(&s->pb, s->len[plane][y0], s->bits[plane][y0]);
     516             : #define WRITEEND_16\
     517             :             put_bits(&s->pb, s->len[plane][y0>>2], s->bits[plane][y0>>2]);\
     518             :             put_bits(&s->pb, 2, y0&3);
     519             : 
     520             : #define LOAD2\
     521             :             int y0 = s->temp[0][2 * i];\
     522             :             int y1 = s->temp[0][2 * i + 1];
     523             : #define LOAD2_14\
     524             :             int y0 = s->temp16[0][2 * i] & mask;\
     525             :             int y1 = s->temp16[0][2 * i + 1] & mask;
     526             : #define LOAD2_16\
     527             :             int y0 = s->temp16[0][2 * i];\
     528             :             int y1 = s->temp16[0][2 * i + 1];
     529             : #define STAT2\
     530             :             s->stats[plane][y0]++;\
     531             :             s->stats[plane][y1]++;
     532             : #define STAT2_16\
     533             :             s->stats[plane][y0>>2]++;\
     534             :             s->stats[plane][y1>>2]++;
     535             : #define WRITE2\
     536             :             put_bits(&s->pb, s->len[plane][y0], s->bits[plane][y0]);\
     537             :             put_bits(&s->pb, s->len[plane][y1], s->bits[plane][y1]);
     538             : #define WRITE2_16\
     539             :             put_bits(&s->pb, s->len[plane][y0>>2], s->bits[plane][y0>>2]);\
     540             :             put_bits(&s->pb, 2, y0&3);\
     541             :             put_bits(&s->pb, s->len[plane][y1>>2], s->bits[plane][y1>>2]);\
     542             :             put_bits(&s->pb, 2, y1&3);
     543             : 
     544      493900 :     if (s->bps <= 8) {
     545      134700 :     if (s->flags & AV_CODEC_FLAG_PASS1) {
     546           0 :         for (i = 0; i < count; i++) {
     547           0 :             LOAD2;
     548           0 :             STAT2;
     549             :         }
     550           0 :         if (width&1) {
     551           0 :             LOADEND;
     552           0 :             STATEND;
     553             :         }
     554             :     }
     555      134700 :     if (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)
     556           0 :         return 0;
     557             : 
     558      134700 :     if (s->context) {
     559           0 :         for (i = 0; i < count; i++) {
     560           0 :             LOAD2;
     561           0 :             STAT2;
     562           0 :             WRITE2;
     563             :         }
     564           0 :         if (width&1) {
     565           0 :             LOADEND;
     566           0 :             STATEND;
     567           0 :             WRITEEND;
     568             :         }
     569             :     } else {
     570    23031000 :         for (i = 0; i < count; i++) {
     571    22896300 :             LOAD2;
     572    22896300 :             WRITE2;
     573             :         }
     574      134700 :         if (width&1) {
     575           0 :             LOADEND;
     576           0 :             WRITEEND;
     577             :         }
     578             :     }
     579      359200 :     } else if (s->bps <= 14) {
     580      224500 :         int mask = s->n - 1;
     581      224500 :         if (s->flags & AV_CODEC_FLAG_PASS1) {
     582           0 :             for (i = 0; i < count; i++) {
     583           0 :                 LOAD2_14;
     584           0 :                 STAT2;
     585             :             }
     586           0 :             if (width&1) {
     587           0 :                 LOADEND_14;
     588           0 :                 STATEND;
     589             :             }
     590             :         }
     591      224500 :         if (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)
     592           0 :             return 0;
     593             : 
     594      224500 :         if (s->context) {
     595           0 :             for (i = 0; i < count; i++) {
     596           0 :                 LOAD2_14;
     597           0 :                 STAT2;
     598           0 :                 WRITE2;
     599             :             }
     600           0 :             if (width&1) {
     601           0 :                 LOADEND_14;
     602           0 :                 STATEND;
     603           0 :                 WRITEEND;
     604             :             }
     605             :         } else {
     606    26934300 :             for (i = 0; i < count; i++) {
     607    26709800 :                 LOAD2_14;
     608    26709800 :                 WRITE2;
     609             :             }
     610      224500 :             if (width&1) {
     611        5100 :                 LOADEND_14;
     612        5100 :                 WRITEEND;
     613             :             }
     614             :         }
     615             :     } else {
     616      134700 :         if (s->flags & AV_CODEC_FLAG_PASS1) {
     617           0 :             for (i = 0; i < count; i++) {
     618           0 :                 LOAD2_16;
     619           0 :                 STAT2_16;
     620             :             }
     621           0 :             if (width&1) {
     622           0 :                 LOADEND_16;
     623           0 :                 STATEND_16;
     624             :             }
     625             :         }
     626      134700 :         if (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)
     627           0 :             return 0;
     628             : 
     629      134700 :         if (s->context) {
     630           0 :             for (i = 0; i < count; i++) {
     631           0 :                 LOAD2_16;
     632           0 :                 STAT2_16;
     633           0 :                 WRITE2_16;
     634             :             }
     635           0 :             if (width&1) {
     636           0 :                 LOADEND_16;
     637           0 :                 STATEND_16;
     638           0 :                 WRITEEND_16;
     639             :             }
     640             :         } else {
     641    23031000 :             for (i = 0; i < count; i++) {
     642    22896300 :                 LOAD2_16;
     643    22896300 :                 WRITE2_16;
     644             :             }
     645      134700 :             if (width&1) {
     646           0 :                 LOADEND_16;
     647           0 :                 WRITEEND_16;
     648             :             }
     649             :         }
     650             :     }
     651             : #undef LOAD2
     652             : #undef STAT2
     653             : #undef WRITE2
     654      493900 :     return 0;
     655             : }
     656             : 
     657       22450 : static int encode_gray_bitstream(HYuvContext *s, int count)
     658             : {
     659             :     int i;
     660             : 
     661       22450 :     if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) < 4 * count) {
     662           0 :         av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
     663           0 :         return -1;
     664             :     }
     665             : 
     666             : #define LOAD2\
     667             :             int y0 = s->temp[0][2 * i];\
     668             :             int y1 = s->temp[0][2 * i + 1];
     669             : #define STAT2\
     670             :             s->stats[0][y0]++;\
     671             :             s->stats[0][y1]++;
     672             : #define WRITE2\
     673             :             put_bits(&s->pb, s->len[0][y0], s->bits[0][y0]);\
     674             :             put_bits(&s->pb, s->len[0][y1], s->bits[0][y1]);
     675             : 
     676       22450 :     count /= 2;
     677             : 
     678       22450 :     if (s->flags & AV_CODEC_FLAG_PASS1) {
     679           0 :         for (i = 0; i < count; i++) {
     680           0 :             LOAD2;
     681           0 :             STAT2;
     682             :         }
     683             :     }
     684       22450 :     if (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)
     685           0 :         return 0;
     686             : 
     687       22450 :     if (s->context) {
     688           0 :         for (i = 0; i < count; i++) {
     689           0 :             LOAD2;
     690           0 :             STAT2;
     691           0 :             WRITE2;
     692             :         }
     693             :     } else {
     694     3838500 :         for (i = 0; i < count; i++) {
     695     3816050 :             LOAD2;
     696     3816050 :             WRITE2;
     697             :         }
     698             :     }
     699       22450 :     return 0;
     700             : }
     701             : 
     702       89800 : static inline int encode_bgra_bitstream(HYuvContext *s, int count, int planes)
     703             : {
     704             :     int i;
     705             : 
     706      179600 :     if (s->pb.buf_end - s->pb.buf - (put_bits_count(&s->pb) >> 3) <
     707       89800 :         4 * planes * count) {
     708           0 :         av_log(s->avctx, AV_LOG_ERROR, "encoded frame too large\n");
     709           0 :         return -1;
     710             :     }
     711             : 
     712             : #define LOAD_GBRA                                                       \
     713             :     int g = s->temp[0][planes == 3 ? 3 * i + 1 : 4 * i + G];            \
     714             :     int b =(s->temp[0][planes == 3 ? 3 * i + 2 : 4 * i + B] - g) & 0xFF;\
     715             :     int r =(s->temp[0][planes == 3 ? 3 * i + 0 : 4 * i + R] - g) & 0xFF;\
     716             :     int a = s->temp[0][planes * i + A];
     717             : 
     718             : #define STAT_BGRA                                                       \
     719             :     s->stats[0][b]++;                                                   \
     720             :     s->stats[1][g]++;                                                   \
     721             :     s->stats[2][r]++;                                                   \
     722             :     if (planes == 4)                                                    \
     723             :         s->stats[2][a]++;
     724             : 
     725             : #define WRITE_GBRA                                                      \
     726             :     put_bits(&s->pb, s->len[1][g], s->bits[1][g]);                      \
     727             :     put_bits(&s->pb, s->len[0][b], s->bits[0][b]);                      \
     728             :     put_bits(&s->pb, s->len[2][r], s->bits[2][r]);                      \
     729             :     if (planes == 4)                                                    \
     730             :         put_bits(&s->pb, s->len[2][a], s->bits[2][a]);
     731             : 
     732       89800 :     if ((s->flags & AV_CODEC_FLAG_PASS1) &&
     733           0 :         (s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)) {
     734           0 :         for (i = 0; i < count; i++) {
     735           0 :             LOAD_GBRA;
     736           0 :             STAT_BGRA;
     737             :         }
     738       89800 :     } else if (s->context || (s->flags & AV_CODEC_FLAG_PASS1)) {
     739           0 :         for (i = 0; i < count; i++) {
     740           0 :             LOAD_GBRA;
     741           0 :             STAT_BGRA;
     742           0 :             WRITE_GBRA;
     743             :         }
     744             :     } else {
     745    30617800 :         for (i = 0; i < count; i++) {
     746    30528000 :             LOAD_GBRA;
     747    30528000 :             WRITE_GBRA;
     748             :         }
     749             :     }
     750       89800 :     return 0;
     751             : }
     752             : 
     753        1600 : static int encode_frame(AVCodecContext *avctx, AVPacket *pkt,
     754             :                         const AVFrame *pict, int *got_packet)
     755             : {
     756        1600 :     HYuvContext *s = avctx->priv_data;
     757        1600 :     const int width = s->width;
     758        1600 :     const int width2 = s->width>>1;
     759        1600 :     const int height = s->height;
     760        1600 :     const int fake_ystride = s->interlaced ? pict->linesize[0]*2  : pict->linesize[0];
     761        1600 :     const int fake_ustride = s->interlaced ? pict->linesize[1]*2  : pict->linesize[1];
     762        1600 :     const int fake_vstride = s->interlaced ? pict->linesize[2]*2  : pict->linesize[2];
     763        1600 :     const AVFrame * const p = pict;
     764        1600 :     int i, j, size = 0, ret;
     765             : 
     766        1600 :     if ((ret = ff_alloc_packet2(avctx, pkt, width * height * 3 * 4 + AV_INPUT_BUFFER_MIN_SIZE, 0)) < 0)
     767           0 :         return ret;
     768             : 
     769        1600 :     if (s->context) {
     770           0 :         size = store_huffman_tables(s, pkt->data);
     771           0 :         if (size < 0)
     772           0 :             return size;
     773             : 
     774           0 :         for (i = 0; i < 4; i++)
     775           0 :             for (j = 0; j < s->vlc_n; j++)
     776           0 :                 s->stats[i][j] >>= 1;
     777             :     }
     778             : 
     779        1600 :     init_put_bits(&s->pb, pkt->data + size, pkt->size - size);
     780             : 
     781        3000 :     if (avctx->pix_fmt == AV_PIX_FMT_YUV422P ||
     782        1800 :         avctx->pix_fmt == AV_PIX_FMT_YUV420P) {
     783             :         int lefty, leftu, leftv, y, cy;
     784             : 
     785         400 :         put_bits(&s->pb, 8, leftv = p->data[2][0]);
     786         400 :         put_bits(&s->pb, 8, lefty = p->data[0][1]);
     787         400 :         put_bits(&s->pb, 8, leftu = p->data[1][0]);
     788         400 :         put_bits(&s->pb, 8,         p->data[0][0]);
     789             : 
     790         400 :         lefty = sub_left_prediction(s, s->temp[0], p->data[0], width , 0);
     791         400 :         leftu = sub_left_prediction(s, s->temp[1], p->data[1], width2, 0);
     792         400 :         leftv = sub_left_prediction(s, s->temp[2], p->data[2], width2, 0);
     793             : 
     794         400 :         encode_422_bitstream(s, 2, width-2);
     795             : 
     796         400 :         if (s->predictor==MEDIAN) {
     797             :             int lefttopy, lefttopu, lefttopv;
     798           0 :             cy = y = 1;
     799           0 :             if (s->interlaced) {
     800           0 :                 lefty = sub_left_prediction(s, s->temp[0], p->data[0] + p->linesize[0], width , lefty);
     801           0 :                 leftu = sub_left_prediction(s, s->temp[1], p->data[1] + p->linesize[1], width2, leftu);
     802           0 :                 leftv = sub_left_prediction(s, s->temp[2], p->data[2] + p->linesize[2], width2, leftv);
     803             : 
     804           0 :                 encode_422_bitstream(s, 0, width);
     805           0 :                 y++; cy++;
     806             :             }
     807             : 
     808           0 :             lefty = sub_left_prediction(s, s->temp[0], p->data[0] + fake_ystride, 4, lefty);
     809           0 :             leftu = sub_left_prediction(s, s->temp[1], p->data[1] + fake_ustride, 2, leftu);
     810           0 :             leftv = sub_left_prediction(s, s->temp[2], p->data[2] + fake_vstride, 2, leftv);
     811             : 
     812           0 :             encode_422_bitstream(s, 0, 4);
     813             : 
     814           0 :             lefttopy = p->data[0][3];
     815           0 :             lefttopu = p->data[1][1];
     816           0 :             lefttopv = p->data[2][1];
     817           0 :             s->llvidencdsp.sub_median_pred(s->temp[0], p->data[0] + 4, p->data[0] + fake_ystride + 4, width  - 4, &lefty, &lefttopy);
     818           0 :             s->llvidencdsp.sub_median_pred(s->temp[1], p->data[1] + 2, p->data[1] + fake_ustride + 2, width2 - 2, &leftu, &lefttopu);
     819           0 :             s->llvidencdsp.sub_median_pred(s->temp[2], p->data[2] + 2, p->data[2] + fake_vstride + 2, width2 - 2, &leftv, &lefttopv);
     820           0 :             encode_422_bitstream(s, 0, width - 4);
     821           0 :             y++; cy++;
     822             : 
     823           0 :             for (; y < height; y++,cy++) {
     824             :                 uint8_t *ydst, *udst, *vdst;
     825             : 
     826           0 :                 if (s->bitstream_bpp == 12) {
     827           0 :                     while (2 * cy > y) {
     828           0 :                         ydst = p->data[0] + p->linesize[0] * y;
     829           0 :                         s->llvidencdsp.sub_median_pred(s->temp[0], ydst - fake_ystride, ydst, width, &lefty, &lefttopy);
     830           0 :                         encode_gray_bitstream(s, width);
     831           0 :                         y++;
     832             :                     }
     833           0 :                     if (y >= height) break;
     834             :                 }
     835           0 :                 ydst = p->data[0] + p->linesize[0] * y;
     836           0 :                 udst = p->data[1] + p->linesize[1] * cy;
     837           0 :                 vdst = p->data[2] + p->linesize[2] * cy;
     838             : 
     839           0 :                 s->llvidencdsp.sub_median_pred(s->temp[0], ydst - fake_ystride, ydst, width,  &lefty, &lefttopy);
     840           0 :                 s->llvidencdsp.sub_median_pred(s->temp[1], udst - fake_ustride, udst, width2, &leftu, &lefttopu);
     841           0 :                 s->llvidencdsp.sub_median_pred(s->temp[2], vdst - fake_vstride, vdst, width2, &leftv, &lefttopv);
     842             : 
     843           0 :                 encode_422_bitstream(s, 0, width);
     844             :             }
     845             :         } else {
     846       67350 :             for (cy = y = 1; y < height; y++, cy++) {
     847             :                 uint8_t *ydst, *udst, *vdst;
     848             : 
     849             :                 /* encode a luma only line & y++ */
     850       67150 :                 if (s->bitstream_bpp == 12) {
     851       22450 :                     ydst = p->data[0] + p->linesize[0] * y;
     852             : 
     853       22450 :                     if (s->predictor == PLANE && s->interlaced < y) {
     854           0 :                         s->llvidencdsp.diff_bytes(s->temp[1], ydst, ydst - fake_ystride, width);
     855             : 
     856           0 :                         lefty = sub_left_prediction(s, s->temp[0], s->temp[1], width , lefty);
     857             :                     } else {
     858       22450 :                         lefty = sub_left_prediction(s, s->temp[0], ydst, width , lefty);
     859             :                     }
     860       22450 :                     encode_gray_bitstream(s, width);
     861       22450 :                     y++;
     862       22450 :                     if (y >= height) break;
     863             :                 }
     864             : 
     865       66950 :                 ydst = p->data[0] + p->linesize[0] * y;
     866       66950 :                 udst = p->data[1] + p->linesize[1] * cy;
     867       66950 :                 vdst = p->data[2] + p->linesize[2] * cy;
     868             : 
     869       66950 :                 if (s->predictor == PLANE && s->interlaced < cy) {
     870           0 :                     s->llvidencdsp.diff_bytes(s->temp[1],          ydst, ydst - fake_ystride, width);
     871           0 :                     s->llvidencdsp.diff_bytes(s->temp[2],          udst, udst - fake_ustride, width2);
     872           0 :                     s->llvidencdsp.diff_bytes(s->temp[2] + width2, vdst, vdst - fake_vstride, width2);
     873             : 
     874           0 :                     lefty = sub_left_prediction(s, s->temp[0], s->temp[1], width , lefty);
     875           0 :                     leftu = sub_left_prediction(s, s->temp[1], s->temp[2], width2, leftu);
     876           0 :                     leftv = sub_left_prediction(s, s->temp[2], s->temp[2] + width2, width2, leftv);
     877             :                 } else {
     878       66950 :                     lefty = sub_left_prediction(s, s->temp[0], ydst, width , lefty);
     879       66950 :                     leftu = sub_left_prediction(s, s->temp[1], udst, width2, leftu);
     880       66950 :                     leftv = sub_left_prediction(s, s->temp[2], vdst, width2, leftv);
     881             :                 }
     882             : 
     883       66950 :                 encode_422_bitstream(s, 0, width);
     884             :             }
     885             :         }
     886        1200 :     } else if(avctx->pix_fmt == AV_PIX_FMT_RGB32) {
     887         200 :         uint8_t *data = p->data[0] + (height - 1) * p->linesize[0];
     888         200 :         const int stride = -p->linesize[0];
     889         200 :         const int fake_stride = -fake_ystride;
     890             :         int y;
     891             :         int leftr, leftg, leftb, lefta;
     892             : 
     893         200 :         put_bits(&s->pb, 8, lefta = data[A]);
     894         200 :         put_bits(&s->pb, 8, leftr = data[R]);
     895         200 :         put_bits(&s->pb, 8, leftg = data[G]);
     896         200 :         put_bits(&s->pb, 8, leftb = data[B]);
     897             : 
     898         200 :         sub_left_prediction_bgr32(s, s->temp[0], data + 4, width - 1,
     899             :                                   &leftr, &leftg, &leftb, &lefta);
     900         200 :         encode_bgra_bitstream(s, width - 1, 4);
     901             : 
     902       44900 :         for (y = 1; y < s->height; y++) {
     903       44700 :             uint8_t *dst = data + y*stride;
     904       44700 :             if (s->predictor == PLANE && s->interlaced < y) {
     905           0 :                 s->llvidencdsp.diff_bytes(s->temp[1], dst, dst - fake_stride, width * 4);
     906           0 :                 sub_left_prediction_bgr32(s, s->temp[0], s->temp[1], width,
     907             :                                           &leftr, &leftg, &leftb, &lefta);
     908             :             } else {
     909       44700 :                 sub_left_prediction_bgr32(s, s->temp[0], dst, width,
     910             :                                           &leftr, &leftg, &leftb, &lefta);
     911             :             }
     912       44700 :             encode_bgra_bitstream(s, width, 4);
     913             :         }
     914        1000 :     } else if (avctx->pix_fmt == AV_PIX_FMT_RGB24) {
     915         200 :         uint8_t *data = p->data[0] + (height - 1) * p->linesize[0];
     916         200 :         const int stride = -p->linesize[0];
     917         200 :         const int fake_stride = -fake_ystride;
     918             :         int y;
     919             :         int leftr, leftg, leftb;
     920             : 
     921         200 :         put_bits(&s->pb, 8, leftr = data[0]);
     922         200 :         put_bits(&s->pb, 8, leftg = data[1]);
     923         200 :         put_bits(&s->pb, 8, leftb = data[2]);
     924         200 :         put_bits(&s->pb, 8, 0);
     925             : 
     926         200 :         sub_left_prediction_rgb24(s, s->temp[0], data + 3, width - 1,
     927             :                                   &leftr, &leftg, &leftb);
     928         200 :         encode_bgra_bitstream(s, width-1, 3);
     929             : 
     930       44900 :         for (y = 1; y < s->height; y++) {
     931       44700 :             uint8_t *dst = data + y * stride;
     932       44700 :             if (s->predictor == PLANE && s->interlaced < y) {
     933           0 :                 s->llvidencdsp.diff_bytes(s->temp[1], dst, dst - fake_stride,
     934           0 :                                       width * 3);
     935           0 :                 sub_left_prediction_rgb24(s, s->temp[0], s->temp[1], width,
     936             :                                           &leftr, &leftg, &leftb);
     937             :             } else {
     938       44700 :                 sub_left_prediction_rgb24(s, s->temp[0], dst, width,
     939             :                                           &leftr, &leftg, &leftb);
     940             :             }
     941       44700 :             encode_bgra_bitstream(s, width, 3);
     942             :         }
     943         800 :     } else if (s->version > 2) {
     944             :         int plane;
     945        3200 :         for (plane = 0; plane < 1 + 2*s->chroma + s->alpha; plane++) {
     946             :             int left, y;
     947        2400 :             int w = width;
     948        2400 :             int h = height;
     949        2400 :             int fake_stride = fake_ystride;
     950             : 
     951        2400 :             if (s->chroma && (plane == 1 || plane == 2)) {
     952        1600 :                 w >>= s->chroma_h_shift;
     953        1600 :                 h >>= s->chroma_v_shift;
     954        1600 :                 fake_stride = plane == 1 ? fake_ustride : fake_vstride;
     955             :             }
     956             : 
     957        2400 :             left = sub_left_prediction(s, s->temp[0], p->data[plane], w , 0);
     958             : 
     959        2400 :             encode_plane_bitstream(s, w, plane);
     960             : 
     961        2400 :             if (s->predictor==MEDIAN) {
     962             :                 int lefttop;
     963           0 :                 y = 1;
     964           0 :                 if (s->interlaced) {
     965           0 :                     left = sub_left_prediction(s, s->temp[0], p->data[plane] + p->linesize[plane], w , left);
     966             : 
     967           0 :                     encode_plane_bitstream(s, w, plane);
     968           0 :                     y++;
     969             :                 }
     970             : 
     971           0 :                 lefttop = p->data[plane][0];
     972             : 
     973           0 :                 for (; y < h; y++) {
     974           0 :                     uint8_t *dst = p->data[plane] + p->linesize[plane] * y;
     975             : 
     976           0 :                     sub_median_prediction(s, s->temp[0], dst - fake_stride, dst, w , &left, &lefttop);
     977             : 
     978           0 :                     encode_plane_bitstream(s, w, plane);
     979             :                 }
     980             :             } else {
     981      493900 :                 for (y = 1; y < h; y++) {
     982      491500 :                     uint8_t *dst = p->data[plane] + p->linesize[plane] * y;
     983             : 
     984      491500 :                     if (s->predictor == PLANE && s->interlaced < y) {
     985      134100 :                         diff_bytes(s, s->temp[1], dst, dst - fake_stride, w);
     986             : 
     987      134100 :                         left = sub_left_prediction(s, s->temp[0], s->temp[1], w , left);
     988             :                     } else {
     989      357400 :                         left = sub_left_prediction(s, s->temp[0], dst, w , left);
     990             :                     }
     991             : 
     992      491500 :                     encode_plane_bitstream(s, w, plane);
     993             :                 }
     994             :             }
     995             :         }
     996             :     } else {
     997           0 :         av_log(avctx, AV_LOG_ERROR, "Format not supported!\n");
     998             :     }
     999        1600 :     emms_c();
    1000             : 
    1001        1600 :     size += (put_bits_count(&s->pb) + 31) / 8;
    1002        1600 :     put_bits(&s->pb, 16, 0);
    1003        1600 :     put_bits(&s->pb, 15, 0);
    1004        1600 :     size /= 4;
    1005             : 
    1006        1600 :     if ((s->flags & AV_CODEC_FLAG_PASS1) && (s->picture_number & 31) == 0) {
    1007             :         int j;
    1008           0 :         char *p = avctx->stats_out;
    1009           0 :         char *end = p + STATS_OUT_SIZE;
    1010           0 :         for (i = 0; i < 4; i++) {
    1011           0 :             for (j = 0; j < s->vlc_n; j++) {
    1012           0 :                 snprintf(p, end-p, "%"PRIu64" ", s->stats[i][j]);
    1013           0 :                 p += strlen(p);
    1014           0 :                 s->stats[i][j]= 0;
    1015             :             }
    1016           0 :             snprintf(p, end-p, "\n");
    1017           0 :             p++;
    1018           0 :             if (end <= p)
    1019           0 :                 return AVERROR(ENOMEM);
    1020             :         }
    1021        1600 :     } else if (avctx->stats_out)
    1022           0 :         avctx->stats_out[0] = '\0';
    1023        1600 :     if (!(s->avctx->flags2 & AV_CODEC_FLAG2_NO_OUTPUT)) {
    1024        1600 :         flush_put_bits(&s->pb);
    1025        1600 :         s->bdsp.bswap_buf((uint32_t *) pkt->data, (uint32_t *) pkt->data, size);
    1026             :     }
    1027             : 
    1028        1600 :     s->picture_number++;
    1029             : 
    1030        1600 :     pkt->size   = size * 4;
    1031        1600 :     pkt->flags |= AV_PKT_FLAG_KEY;
    1032        1600 :     *got_packet = 1;
    1033             : 
    1034        1600 :     return 0;
    1035             : }
    1036             : 
    1037          32 : static av_cold int encode_end(AVCodecContext *avctx)
    1038             : {
    1039          32 :     HYuvContext *s = avctx->priv_data;
    1040             : 
    1041          32 :     ff_huffyuv_common_end(s);
    1042             : 
    1043          32 :     av_freep(&avctx->extradata);
    1044          32 :     av_freep(&avctx->stats_out);
    1045             : 
    1046          32 :     return 0;
    1047             : }
    1048             : 
    1049             : #define OFFSET(x) offsetof(HYuvContext, x)
    1050             : #define VE AV_OPT_FLAG_VIDEO_PARAM | AV_OPT_FLAG_ENCODING_PARAM
    1051             : 
    1052             : #define COMMON_OPTIONS \
    1053             :     { "non_deterministic", "Allow multithreading for e.g. context=1 at the expense of determinism", \
    1054             :       OFFSET(non_determ), AV_OPT_TYPE_BOOL, { .i64 = 1 }, \
    1055             :       0, 1, VE }, \
    1056             :     { "pred", "Prediction method", OFFSET(predictor), AV_OPT_TYPE_INT, { .i64 = LEFT }, LEFT, MEDIAN, VE, "pred" }, \
    1057             :         { "left",   NULL, 0, AV_OPT_TYPE_CONST, { .i64 = LEFT },   INT_MIN, INT_MAX, VE, "pred" }, \
    1058             :         { "plane",  NULL, 0, AV_OPT_TYPE_CONST, { .i64 = PLANE },  INT_MIN, INT_MAX, VE, "pred" }, \
    1059             :         { "median", NULL, 0, AV_OPT_TYPE_CONST, { .i64 = MEDIAN }, INT_MIN, INT_MAX, VE, "pred" }, \
    1060             : 
    1061             : static const AVOption normal_options[] = {
    1062             :     COMMON_OPTIONS
    1063             :     { NULL },
    1064             : };
    1065             : 
    1066             : static const AVOption ff_options[] = {
    1067             :     COMMON_OPTIONS
    1068             :     { "context", "Set per-frame huffman tables", OFFSET(context), AV_OPT_TYPE_INT, { .i64 = 0 }, 0, 1, VE },
    1069             :     { NULL },
    1070             : };
    1071             : 
    1072             : static const AVClass normal_class = {
    1073             :     .class_name = "huffyuv",
    1074             :     .item_name  = av_default_item_name,
    1075             :     .option     = normal_options,
    1076             :     .version    = LIBAVUTIL_VERSION_INT,
    1077             : };
    1078             : 
    1079             : static const AVClass ff_class = {
    1080             :     .class_name = "ffvhuff",
    1081             :     .item_name  = av_default_item_name,
    1082             :     .option     = ff_options,
    1083             :     .version    = LIBAVUTIL_VERSION_INT,
    1084             : };
    1085             : 
    1086             : AVCodec ff_huffyuv_encoder = {
    1087             :     .name           = "huffyuv",
    1088             :     .long_name      = NULL_IF_CONFIG_SMALL("Huffyuv / HuffYUV"),
    1089             :     .type           = AVMEDIA_TYPE_VIDEO,
    1090             :     .id             = AV_CODEC_ID_HUFFYUV,
    1091             :     .priv_data_size = sizeof(HYuvContext),
    1092             :     .init           = encode_init,
    1093             :     .encode2        = encode_frame,
    1094             :     .close          = encode_end,
    1095             :     .capabilities   = AV_CODEC_CAP_FRAME_THREADS | AV_CODEC_CAP_INTRA_ONLY,
    1096             :     .priv_class     = &normal_class,
    1097             :     .pix_fmts       = (const enum AVPixelFormat[]){
    1098             :         AV_PIX_FMT_YUV422P, AV_PIX_FMT_RGB24,
    1099             :         AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE
    1100             :     },
    1101             :     .caps_internal  = FF_CODEC_CAP_INIT_THREADSAFE |
    1102             :                       FF_CODEC_CAP_INIT_CLEANUP,
    1103             : };
    1104             : 
    1105             : #if CONFIG_FFVHUFF_ENCODER
    1106             : AVCodec ff_ffvhuff_encoder = {
    1107             :     .name           = "ffvhuff",
    1108             :     .long_name      = NULL_IF_CONFIG_SMALL("Huffyuv FFmpeg variant"),
    1109             :     .type           = AVMEDIA_TYPE_VIDEO,
    1110             :     .id             = AV_CODEC_ID_FFVHUFF,
    1111             :     .priv_data_size = sizeof(HYuvContext),
    1112             :     .init           = encode_init,
    1113             :     .encode2        = encode_frame,
    1114             :     .close          = encode_end,
    1115             :     .capabilities   = AV_CODEC_CAP_FRAME_THREADS | AV_CODEC_CAP_INTRA_ONLY,
    1116             :     .priv_class     = &ff_class,
    1117             :     .pix_fmts       = (const enum AVPixelFormat[]){
    1118             :         AV_PIX_FMT_YUV420P, AV_PIX_FMT_YUV422P, AV_PIX_FMT_YUV444P, AV_PIX_FMT_YUV411P,
    1119             :         AV_PIX_FMT_YUV410P, AV_PIX_FMT_YUV440P,
    1120             :         AV_PIX_FMT_GBRP,
    1121             :         AV_PIX_FMT_GBRP9, AV_PIX_FMT_GBRP10, AV_PIX_FMT_GBRP12, AV_PIX_FMT_GBRP14,
    1122             :         AV_PIX_FMT_GRAY8, AV_PIX_FMT_GRAY16,
    1123             :         AV_PIX_FMT_YUVA420P, AV_PIX_FMT_YUVA422P, AV_PIX_FMT_YUVA444P,
    1124             :         AV_PIX_FMT_GBRAP,
    1125             :         AV_PIX_FMT_GRAY8A,
    1126             :         AV_PIX_FMT_YUV420P9, AV_PIX_FMT_YUV420P10, AV_PIX_FMT_YUV420P12, AV_PIX_FMT_YUV420P14, AV_PIX_FMT_YUV420P16,
    1127             :         AV_PIX_FMT_YUV422P9, AV_PIX_FMT_YUV422P10, AV_PIX_FMT_YUV422P12, AV_PIX_FMT_YUV422P14, AV_PIX_FMT_YUV422P16,
    1128             :         AV_PIX_FMT_YUV444P9, AV_PIX_FMT_YUV444P10, AV_PIX_FMT_YUV444P12, AV_PIX_FMT_YUV444P14, AV_PIX_FMT_YUV444P16,
    1129             :         AV_PIX_FMT_YUVA420P9, AV_PIX_FMT_YUVA420P10, AV_PIX_FMT_YUVA420P16,
    1130             :         AV_PIX_FMT_YUVA422P9, AV_PIX_FMT_YUVA422P10, AV_PIX_FMT_YUVA422P16,
    1131             :         AV_PIX_FMT_YUVA444P9, AV_PIX_FMT_YUVA444P10, AV_PIX_FMT_YUVA444P16,
    1132             :         AV_PIX_FMT_RGB24,
    1133             :         AV_PIX_FMT_RGB32, AV_PIX_FMT_NONE
    1134             :     },
    1135             :     .caps_internal  = FF_CODEC_CAP_INIT_THREADSAFE |
    1136             :                       FF_CODEC_CAP_INIT_CLEANUP,
    1137             : };
    1138             : #endif

Generated by: LCOV version 1.13