FFmpeg  3.4.9
utvideodec.c
Go to the documentation of this file.
1 /*
2  * Ut Video decoder
3  * Copyright (c) 2011 Konstantin Shishkov
4  *
5  * This file is part of FFmpeg.
6  *
7  * FFmpeg is free software; you can redistribute it and/or
8  * modify it under the terms of the GNU Lesser General Public
9  * License as published by the Free Software Foundation; either
10  * version 2.1 of the License, or (at your option) any later version.
11  *
12  * FFmpeg is distributed in the hope that it will be useful,
13  * but WITHOUT ANY WARRANTY; without even the implied warranty of
14  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15  * Lesser General Public License for more details.
16  *
17  * You should have received a copy of the GNU Lesser General Public
18  * License along with FFmpeg; if not, write to the Free Software
19  * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
20  */
21 
22 /**
23  * @file
24  * Ut Video decoder
25  */
26 
27 #include <inttypes.h>
28 #include <stdlib.h>
29 
30 #define UNCHECKED_BITSTREAM_READER 1
31 
32 #include "libavutil/intreadwrite.h"
33 #include "libavutil/pixdesc.h"
34 #include "avcodec.h"
35 #include "bswapdsp.h"
36 #include "bytestream.h"
37 #include "get_bits.h"
38 #include "internal.h"
39 #include "thread.h"
40 #include "utvideo.h"
41 
42 static int build_huff10(const uint8_t *src, VLC *vlc, int *fsym)
43 {
44  int i;
45  HuffEntry he[1024];
46  int last;
47  uint32_t codes[1024];
48  uint8_t bits[1024];
49  uint16_t syms[1024];
50  uint32_t code;
51 
52  *fsym = -1;
53  for (i = 0; i < 1024; i++) {
54  he[i].sym = i;
55  he[i].len = *src++;
56  }
57  qsort(he, 1024, sizeof(*he), ff_ut10_huff_cmp_len);
58 
59  if (!he[0].len) {
60  *fsym = he[0].sym;
61  return 0;
62  }
63 
64  last = 1023;
65  while (he[last].len == 255 && last)
66  last--;
67 
68  if (he[last].len > 32) {
69  return -1;
70  }
71 
72  code = 1;
73  for (i = last; i >= 0; i--) {
74  codes[i] = code >> (32 - he[i].len);
75  bits[i] = he[i].len;
76  syms[i] = he[i].sym;
77  code += 0x80000000u >> (he[i].len - 1);
78  }
79 #define VLC_BITS 11
80  return ff_init_vlc_sparse(vlc, VLC_BITS, last + 1,
81  bits, sizeof(*bits), sizeof(*bits),
82  codes, sizeof(*codes), sizeof(*codes),
83  syms, sizeof(*syms), sizeof(*syms), 0);
84 }
85 
86 static int build_huff(const uint8_t *src, VLC *vlc, int *fsym)
87 {
88  int i;
89  HuffEntry he[256];
90  int last;
91  uint32_t codes[256];
92  uint8_t bits[256];
93  uint8_t syms[256];
94  uint32_t code;
95 
96  *fsym = -1;
97  for (i = 0; i < 256; i++) {
98  he[i].sym = i;
99  he[i].len = *src++;
100  }
101  qsort(he, 256, sizeof(*he), ff_ut_huff_cmp_len);
102 
103  if (!he[0].len) {
104  *fsym = he[0].sym;
105  return 0;
106  }
107 
108  last = 255;
109  while (he[last].len == 255 && last)
110  last--;
111 
112  if (he[last].len > 32)
113  return -1;
114 
115  code = 1;
116  for (i = last; i >= 0; i--) {
117  codes[i] = code >> (32 - he[i].len);
118  bits[i] = he[i].len;
119  syms[i] = he[i].sym;
120  code += 0x80000000u >> (he[i].len - 1);
121  }
122 
123  return ff_init_vlc_sparse(vlc, VLC_BITS, last + 1,
124  bits, sizeof(*bits), sizeof(*bits),
125  codes, sizeof(*codes), sizeof(*codes),
126  syms, sizeof(*syms), sizeof(*syms), 0);
127 }
128 
129 static int decode_plane10(UtvideoContext *c, int plane_no,
130  uint16_t *dst, int step, ptrdiff_t stride,
131  int width, int height,
132  const uint8_t *src, const uint8_t *huff,
133  int use_pred)
134 {
135  int i, j, slice, pix, ret;
136  int sstart, send;
137  VLC vlc;
138  GetBitContext gb;
139  int prev, fsym;
140 
141  if ((ret = build_huff10(huff, &vlc, &fsym)) < 0) {
142  av_log(c->avctx, AV_LOG_ERROR, "Cannot build Huffman codes\n");
143  return ret;
144  }
145  if (fsym >= 0) { // build_huff reported a symbol to fill slices with
146  send = 0;
147  for (slice = 0; slice < c->slices; slice++) {
148  uint16_t *dest;
149 
150  sstart = send;
151  send = (height * (slice + 1) / c->slices);
152  dest = dst + sstart * stride;
153 
154  prev = 0x200;
155  for (j = sstart; j < send; j++) {
156  for (i = 0; i < width * step; i += step) {
157  pix = fsym;
158  if (use_pred) {
159  prev += pix;
160  prev &= 0x3FF;
161  pix = prev;
162  }
163  dest[i] = pix;
164  }
165  dest += stride;
166  }
167  }
168  return 0;
169  }
170 
171  send = 0;
172  for (slice = 0; slice < c->slices; slice++) {
173  uint16_t *dest;
174  int slice_data_start, slice_data_end, slice_size;
175 
176  sstart = send;
177  send = (height * (slice + 1) / c->slices);
178  dest = dst + sstart * stride;
179 
180  // slice offset and size validation was done earlier
181  slice_data_start = slice ? AV_RL32(src + slice * 4 - 4) : 0;
182  slice_data_end = AV_RL32(src + slice * 4);
183  slice_size = slice_data_end - slice_data_start;
184 
185  if (!slice_size) {
186  av_log(c->avctx, AV_LOG_ERROR, "Plane has more than one symbol "
187  "yet a slice has a length of zero.\n");
188  goto fail;
189  }
190 
191  memset(c->slice_bits + slice_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
192  c->bdsp.bswap_buf((uint32_t *) c->slice_bits,
193  (uint32_t *)(src + slice_data_start + c->slices * 4),
194  (slice_data_end - slice_data_start + 3) >> 2);
195  init_get_bits(&gb, c->slice_bits, slice_size * 8);
196 
197  prev = 0x200;
198  for (j = sstart; j < send; j++) {
199  int ws = width * step;
200  for (i = 0; i < ws; i += step) {
201  pix = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
202  if (pix < 0) {
203  av_log(c->avctx, AV_LOG_ERROR, "Decoding error\n");
204  goto fail;
205  }
206  if (use_pred) {
207  prev += pix;
208  prev &= 0x3FF;
209  pix = prev;
210  }
211  dest[i] = pix;
212  }
213  dest += stride;
214  if (get_bits_left(&gb) < 0) {
216  "Slice decoding ran out of bits\n");
217  goto fail;
218  }
219  }
220  if (get_bits_left(&gb) > 32)
222  "%d bits left after decoding slice\n", get_bits_left(&gb));
223  }
224 
225  ff_free_vlc(&vlc);
226 
227  return 0;
228 fail:
229  ff_free_vlc(&vlc);
230  return AVERROR_INVALIDDATA;
231 }
232 
233 static int decode_plane(UtvideoContext *c, int plane_no,
234  uint8_t *dst, int step, ptrdiff_t stride,
235  int width, int height,
236  const uint8_t *src, int use_pred)
237 {
238  int i, j, slice, pix;
239  int sstart, send;
240  VLC vlc;
241  GetBitContext gb;
242  int prev, fsym;
243  const int cmask = c->interlaced ? ~(1 + 2 * (!plane_no && c->avctx->pix_fmt == AV_PIX_FMT_YUV420P)) : ~(!plane_no && c->avctx->pix_fmt == AV_PIX_FMT_YUV420P);
244 
245  if (build_huff(src, &vlc, &fsym)) {
246  av_log(c->avctx, AV_LOG_ERROR, "Cannot build Huffman codes\n");
247  return AVERROR_INVALIDDATA;
248  }
249  if (fsym >= 0) { // build_huff reported a symbol to fill slices with
250  send = 0;
251  for (slice = 0; slice < c->slices; slice++) {
252  uint8_t *dest;
253 
254  sstart = send;
255  send = (height * (slice + 1) / c->slices) & cmask;
256  dest = dst + sstart * stride;
257 
258  prev = 0x80;
259  for (j = sstart; j < send; j++) {
260  for (i = 0; i < width * step; i += step) {
261  pix = fsym;
262  if (use_pred) {
263  prev += (unsigned)pix;
264  pix = prev;
265  }
266  dest[i] = pix;
267  }
268  dest += stride;
269  }
270  }
271  return 0;
272  }
273 
274  src += 256;
275 
276  send = 0;
277  for (slice = 0; slice < c->slices; slice++) {
278  uint8_t *dest;
279  int slice_data_start, slice_data_end, slice_size;
280 
281  sstart = send;
282  send = (height * (slice + 1) / c->slices) & cmask;
283  dest = dst + sstart * stride;
284 
285  // slice offset and size validation was done earlier
286  slice_data_start = slice ? AV_RL32(src + slice * 4 - 4) : 0;
287  slice_data_end = AV_RL32(src + slice * 4);
288  slice_size = slice_data_end - slice_data_start;
289 
290  if (!slice_size) {
291  av_log(c->avctx, AV_LOG_ERROR, "Plane has more than one symbol "
292  "yet a slice has a length of zero.\n");
293  goto fail;
294  }
295 
296  memset(c->slice_bits + slice_size, 0, AV_INPUT_BUFFER_PADDING_SIZE);
297  c->bdsp.bswap_buf((uint32_t *) c->slice_bits,
298  (uint32_t *)(src + slice_data_start + c->slices * 4),
299  (slice_data_end - slice_data_start + 3) >> 2);
300  init_get_bits(&gb, c->slice_bits, slice_size * 8);
301 
302  prev = 0x80;
303  for (j = sstart; j < send; j++) {
304  int ws = width * step;
305  for (i = 0; i < ws; i += step) {
306  pix = get_vlc2(&gb, vlc.table, VLC_BITS, 3);
307  if (pix < 0) {
308  av_log(c->avctx, AV_LOG_ERROR, "Decoding error\n");
309  goto fail;
310  }
311  if (use_pred) {
312  prev += pix;
313  pix = prev;
314  }
315  dest[i] = pix;
316  }
317  if (get_bits_left(&gb) < 0) {
319  "Slice decoding ran out of bits\n");
320  goto fail;
321  }
322  dest += stride;
323  }
324  if (get_bits_left(&gb) > 32)
326  "%d bits left after decoding slice\n", get_bits_left(&gb));
327  }
328 
329  ff_free_vlc(&vlc);
330 
331  return 0;
332 fail:
333  ff_free_vlc(&vlc);
334  return AVERROR_INVALIDDATA;
335 }
336 
337 #undef A
338 #undef B
339 #undef C
340 
342  int width, int height, int slices, int rmode)
343 {
344  int i, j, slice;
345  int A, B, C;
346  uint8_t *bsrc;
347  int slice_start, slice_height;
348  const int cmask = ~rmode;
349 
350  for (slice = 0; slice < slices; slice++) {
351  slice_start = ((slice * height) / slices) & cmask;
352  slice_height = ((((slice + 1) * height) / slices) & cmask) -
353  slice_start;
354 
355  if (!slice_height)
356  continue;
357  bsrc = src + slice_start * stride;
358 
359  // first line - left neighbour prediction
360  bsrc[0] += 0x80;
361  c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
362  bsrc += stride;
363  if (slice_height <= 1)
364  continue;
365  // second line - first element has top prediction, the rest uses median
366  C = bsrc[-stride];
367  bsrc[0] += C;
368  A = bsrc[0];
369  for (i = 1; i < width; i++) {
370  B = bsrc[i - stride];
371  bsrc[i] += mid_pred(A, B, (uint8_t)(A + B - C));
372  C = B;
373  A = bsrc[i];
374  }
375  bsrc += stride;
376  // the rest of lines use continuous median prediction
377  for (j = 2; j < slice_height; j++) {
378  c->llviddsp.add_median_pred(bsrc, bsrc - stride,
379  bsrc, width, &A, &B);
380  bsrc += stride;
381  }
382  }
383 }
384 
385 /* UtVideo interlaced mode treats every two lines as a single one,
386  * so restoring function should take care of possible padding between
387  * two parts of the same "line".
388  */
390  int width, int height, int slices, int rmode)
391 {
392  int i, j, slice;
393  int A, B, C;
394  uint8_t *bsrc;
395  int slice_start, slice_height;
396  const int cmask = ~(rmode ? 3 : 1);
397  const ptrdiff_t stride2 = stride << 1;
398 
399  for (slice = 0; slice < slices; slice++) {
400  slice_start = ((slice * height) / slices) & cmask;
401  slice_height = ((((slice + 1) * height) / slices) & cmask) -
402  slice_start;
403  slice_height >>= 1;
404  if (!slice_height)
405  continue;
406 
407  bsrc = src + slice_start * stride;
408 
409  // first line - left neighbour prediction
410  bsrc[0] += 0x80;
411  A = c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
412  c->llviddsp.add_left_pred(bsrc + stride, bsrc + stride, width, A);
413  bsrc += stride2;
414  if (slice_height <= 1)
415  continue;
416  // second line - first element has top prediction, the rest uses median
417  C = bsrc[-stride2];
418  bsrc[0] += C;
419  A = bsrc[0];
420  for (i = 1; i < width; i++) {
421  B = bsrc[i - stride2];
422  bsrc[i] += mid_pred(A, B, (uint8_t)(A + B - C));
423  C = B;
424  A = bsrc[i];
425  }
426  c->llviddsp.add_median_pred(bsrc + stride, bsrc - stride,
427  bsrc + stride, width, &A, &B);
428  bsrc += stride2;
429  // the rest of lines use continuous median prediction
430  for (j = 2; j < slice_height; j++) {
431  c->llviddsp.add_median_pred(bsrc, bsrc - stride2,
432  bsrc, width, &A, &B);
433  c->llviddsp.add_median_pred(bsrc + stride, bsrc - stride,
434  bsrc + stride, width, &A, &B);
435  bsrc += stride2;
436  }
437  }
438 }
439 
441  int width, int height, int slices, int rmode)
442 {
443  int i, j, slice;
444  int A, B, C;
445  uint8_t *bsrc;
446  int slice_start, slice_height;
447  const int cmask = ~rmode;
448 
449  for (slice = 0; slice < slices; slice++) {
450  slice_start = ((slice * height) / slices) & cmask;
451  slice_height = ((((slice + 1) * height) / slices) & cmask) -
452  slice_start;
453 
454  if (!slice_height)
455  continue;
456  bsrc = src + slice_start * stride;
457 
458  // first line - left neighbour prediction
459  bsrc[0] += 0x80;
460  c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
461  bsrc += stride;
462  if (slice_height <= 1)
463  continue;
464  for (j = 1; j < slice_height; j++) {
465  // second line - first element has top prediction, the rest uses gradient
466  bsrc[0] = (bsrc[0] + bsrc[-stride]) & 0xFF;
467  for (i = 1; i < width; i++) {
468  A = bsrc[i - stride];
469  B = bsrc[i - (stride + 1)];
470  C = bsrc[i - 1];
471  bsrc[i] = (A - B + C + bsrc[i]) & 0xFF;
472  }
473  bsrc += stride;
474  }
475  }
476 }
477 
479  int width, int height, int slices, int rmode)
480 {
481  int i, j, slice;
482  int A, B, C;
483  uint8_t *bsrc;
484  int slice_start, slice_height;
485  const int cmask = ~(rmode ? 3 : 1);
486  const ptrdiff_t stride2 = stride << 1;
487 
488  for (slice = 0; slice < slices; slice++) {
489  slice_start = ((slice * height) / slices) & cmask;
490  slice_height = ((((slice + 1) * height) / slices) & cmask) -
491  slice_start;
492  slice_height >>= 1;
493  if (!slice_height)
494  continue;
495 
496  bsrc = src + slice_start * stride;
497 
498  // first line - left neighbour prediction
499  bsrc[0] += 0x80;
500  A = c->llviddsp.add_left_pred(bsrc, bsrc, width, 0);
501  c->llviddsp.add_left_pred(bsrc + stride, bsrc + stride, width, A);
502  bsrc += stride2;
503  if (slice_height <= 1)
504  continue;
505  for (j = 1; j < slice_height; j++) {
506  // second line - first element has top prediction, the rest uses gradient
507  bsrc[0] = (bsrc[0] + bsrc[-stride2]) & 0xFF;
508  for (i = 1; i < width; i++) {
509  A = bsrc[i - stride2];
510  B = bsrc[i - (stride2 + 1)];
511  C = bsrc[i - 1];
512  bsrc[i] = (A - B + C + bsrc[i]) & 0xFF;
513  }
514  A = bsrc[-stride];
515  B = bsrc[-(1 + stride + stride - width)];
516  C = bsrc[width - 1];
517  bsrc[stride] = (A - B + C + bsrc[stride]) & 0xFF;
518  for (i = 1; i < width; i++) {
519  A = bsrc[i - stride];
520  B = bsrc[i - (1 + stride)];
521  C = bsrc[i - 1 + stride];
522  bsrc[i + stride] = (A - B + C + bsrc[i + stride]) & 0xFF;
523  }
524  bsrc += stride2;
525  }
526  }
527 }
528 
529 static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame,
530  AVPacket *avpkt)
531 {
532  const uint8_t *buf = avpkt->data;
533  int buf_size = avpkt->size;
534  UtvideoContext *c = avctx->priv_data;
535  int i, j;
536  const uint8_t *plane_start[5];
537  int plane_size, max_slice_size = 0, slice_start, slice_end, slice_size;
538  int ret;
539  GetByteContext gb;
540  ThreadFrame frame = { .f = data };
541 
542  if ((ret = ff_thread_get_buffer(avctx, &frame, 0)) < 0)
543  return ret;
544 
545  /* parse plane structure to get frame flags and validate slice offsets */
546  bytestream2_init(&gb, buf, buf_size);
547  if (c->pro) {
549  av_log(avctx, AV_LOG_ERROR, "Not enough data for frame information\n");
550  return AVERROR_INVALIDDATA;
551  }
552  c->frame_info = bytestream2_get_le32u(&gb);
553  c->slices = ((c->frame_info >> 16) & 0xff) + 1;
554  for (i = 0; i < c->planes; i++) {
555  plane_start[i] = gb.buffer;
556  if (bytestream2_get_bytes_left(&gb) < 1024 + 4 * c->slices) {
557  av_log(avctx, AV_LOG_ERROR, "Insufficient data for a plane\n");
558  return AVERROR_INVALIDDATA;
559  }
560  slice_start = 0;
561  slice_end = 0;
562  for (j = 0; j < c->slices; j++) {
563  slice_end = bytestream2_get_le32u(&gb);
564  if (slice_end < 0 || slice_end < slice_start ||
565  bytestream2_get_bytes_left(&gb) < slice_end + 1024LL) {
566  av_log(avctx, AV_LOG_ERROR, "Incorrect slice size\n");
567  return AVERROR_INVALIDDATA;
568  }
569  slice_size = slice_end - slice_start;
570  slice_start = slice_end;
571  max_slice_size = FFMAX(max_slice_size, slice_size);
572  }
573  plane_size = slice_end;
574  bytestream2_skipu(&gb, plane_size);
575  bytestream2_skipu(&gb, 1024);
576  }
577  plane_start[c->planes] = gb.buffer;
578  } else {
579  for (i = 0; i < c->planes; i++) {
580  plane_start[i] = gb.buffer;
581  if (bytestream2_get_bytes_left(&gb) < 256 + 4 * c->slices) {
582  av_log(avctx, AV_LOG_ERROR, "Insufficient data for a plane\n");
583  return AVERROR_INVALIDDATA;
584  }
585  bytestream2_skipu(&gb, 256);
586  slice_start = 0;
587  slice_end = 0;
588  for (j = 0; j < c->slices; j++) {
589  slice_end = bytestream2_get_le32u(&gb);
590  if (slice_end < 0 || slice_end < slice_start ||
591  bytestream2_get_bytes_left(&gb) < slice_end) {
592  av_log(avctx, AV_LOG_ERROR, "Incorrect slice size\n");
593  return AVERROR_INVALIDDATA;
594  }
595  slice_size = slice_end - slice_start;
596  slice_start = slice_end;
597  max_slice_size = FFMAX(max_slice_size, slice_size);
598  }
599  plane_size = slice_end;
600  bytestream2_skipu(&gb, plane_size);
601  }
602  plane_start[c->planes] = gb.buffer;
604  av_log(avctx, AV_LOG_ERROR, "Not enough data for frame information\n");
605  return AVERROR_INVALIDDATA;
606  }
607  c->frame_info = bytestream2_get_le32u(&gb);
608  }
609  av_log(avctx, AV_LOG_DEBUG, "frame information flags %"PRIX32"\n",
610  c->frame_info);
611 
612  c->frame_pred = (c->frame_info >> 8) & 3;
613 
614  max_slice_size += 4*avctx->width;
615 
617  max_slice_size + AV_INPUT_BUFFER_PADDING_SIZE);
618 
619  if (!c->slice_bits) {
620  av_log(avctx, AV_LOG_ERROR, "Cannot allocate temporary buffer\n");
621  return AVERROR(ENOMEM);
622  }
623 
624  switch (c->avctx->pix_fmt) {
625  case AV_PIX_FMT_GBRP:
626  case AV_PIX_FMT_GBRAP:
627  for (i = 0; i < c->planes; i++) {
628  ret = decode_plane(c, i, frame.f->data[i], 1,
629  frame.f->linesize[i], avctx->width,
630  avctx->height, plane_start[i],
631  c->frame_pred == PRED_LEFT);
632  if (ret)
633  return ret;
634  if (c->frame_pred == PRED_MEDIAN) {
635  if (!c->interlaced) {
636  restore_median_planar(c, frame.f->data[i],
637  frame.f->linesize[i], avctx->width,
638  avctx->height, c->slices, 0);
639  } else {
640  restore_median_planar_il(c, frame.f->data[i],
641  frame.f->linesize[i],
642  avctx->width, avctx->height, c->slices,
643  0);
644  }
645  } else if (c->frame_pred == PRED_GRADIENT) {
646  if (!c->interlaced) {
647  restore_gradient_planar(c, frame.f->data[i],
648  frame.f->linesize[i], avctx->width,
649  avctx->height, c->slices, 0);
650  } else {
651  restore_gradient_planar_il(c, frame.f->data[i],
652  frame.f->linesize[i],
653  avctx->width, avctx->height, c->slices,
654  0);
655  }
656  }
657  }
658  c->utdsp.restore_rgb_planes(frame.f->data[2], frame.f->data[0], frame.f->data[1],
659  frame.f->linesize[2], frame.f->linesize[0], frame.f->linesize[1],
660  avctx->width, avctx->height);
661  break;
662  case AV_PIX_FMT_GBRAP10:
663  case AV_PIX_FMT_GBRP10:
664  for (i = 0; i < c->planes; i++) {
665  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i], 1,
666  frame.f->linesize[i] / 2, avctx->width,
667  avctx->height, plane_start[i],
668  plane_start[i + 1] - 1024,
669  c->frame_pred == PRED_LEFT);
670  if (ret)
671  return ret;
672  }
673  c->utdsp.restore_rgb_planes10((uint16_t *)frame.f->data[2], (uint16_t *)frame.f->data[0], (uint16_t *)frame.f->data[1],
674  frame.f->linesize[2] / 2, frame.f->linesize[0] / 2, frame.f->linesize[1] / 2,
675  avctx->width, avctx->height);
676  break;
677  case AV_PIX_FMT_YUV420P:
678  for (i = 0; i < 3; i++) {
679  ret = decode_plane(c, i, frame.f->data[i], 1, frame.f->linesize[i],
680  avctx->width >> !!i, avctx->height >> !!i,
681  plane_start[i], c->frame_pred == PRED_LEFT);
682  if (ret)
683  return ret;
684  if (c->frame_pred == PRED_MEDIAN) {
685  if (!c->interlaced) {
686  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
687  avctx->width >> !!i, avctx->height >> !!i,
688  c->slices, !i);
689  } else {
690  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
691  avctx->width >> !!i,
692  avctx->height >> !!i,
693  c->slices, !i);
694  }
695  } else if (c->frame_pred == PRED_GRADIENT) {
696  if (!c->interlaced) {
697  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
698  avctx->width >> !!i, avctx->height >> !!i,
699  c->slices, !i);
700  } else {
701  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
702  avctx->width >> !!i,
703  avctx->height >> !!i,
704  c->slices, !i);
705  }
706  }
707  }
708  break;
709  case AV_PIX_FMT_YUV422P:
710  for (i = 0; i < 3; i++) {
711  ret = decode_plane(c, i, frame.f->data[i], 1, frame.f->linesize[i],
712  avctx->width >> !!i, avctx->height,
713  plane_start[i], c->frame_pred == PRED_LEFT);
714  if (ret)
715  return ret;
716  if (c->frame_pred == PRED_MEDIAN) {
717  if (!c->interlaced) {
718  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
719  avctx->width >> !!i, avctx->height,
720  c->slices, 0);
721  } else {
722  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
723  avctx->width >> !!i, avctx->height,
724  c->slices, 0);
725  }
726  } else if (c->frame_pred == PRED_GRADIENT) {
727  if (!c->interlaced) {
728  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
729  avctx->width >> !!i, avctx->height,
730  c->slices, 0);
731  } else {
732  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
733  avctx->width >> !!i, avctx->height,
734  c->slices, 0);
735  }
736  }
737  }
738  break;
739  case AV_PIX_FMT_YUV444P:
740  for (i = 0; i < 3; i++) {
741  ret = decode_plane(c, i, frame.f->data[i], 1, frame.f->linesize[i],
742  avctx->width, avctx->height,
743  plane_start[i], c->frame_pred == PRED_LEFT);
744  if (ret)
745  return ret;
746  if (c->frame_pred == PRED_MEDIAN) {
747  if (!c->interlaced) {
748  restore_median_planar(c, frame.f->data[i], frame.f->linesize[i],
749  avctx->width, avctx->height,
750  c->slices, 0);
751  } else {
752  restore_median_planar_il(c, frame.f->data[i], frame.f->linesize[i],
753  avctx->width, avctx->height,
754  c->slices, 0);
755  }
756  } else if (c->frame_pred == PRED_GRADIENT) {
757  if (!c->interlaced) {
758  restore_gradient_planar(c, frame.f->data[i], frame.f->linesize[i],
759  avctx->width, avctx->height,
760  c->slices, 0);
761  } else {
762  restore_gradient_planar_il(c, frame.f->data[i], frame.f->linesize[i],
763  avctx->width, avctx->height,
764  c->slices, 0);
765  }
766  }
767  }
768  break;
770  for (i = 0; i < 3; i++) {
771  ret = decode_plane10(c, i, (uint16_t *)frame.f->data[i], 1, frame.f->linesize[i] / 2,
772  avctx->width >> !!i, avctx->height,
773  plane_start[i], plane_start[i + 1] - 1024, c->frame_pred == PRED_LEFT);
774  if (ret)
775  return ret;
776  }
777  break;
778  }
779 
780  frame.f->key_frame = 1;
781  frame.f->pict_type = AV_PICTURE_TYPE_I;
782  frame.f->interlaced_frame = !!c->interlaced;
783 
784  *got_frame = 1;
785 
786  /* always report that the buffer was completely consumed */
787  return buf_size;
788 }
789 
791 {
792  UtvideoContext * const c = avctx->priv_data;
793  int h_shift, v_shift;
794 
795  c->avctx = avctx;
796 
798  ff_bswapdsp_init(&c->bdsp);
800 
801  c->slice_bits_size = 0;
802 
803  switch (avctx->codec_tag) {
804  case MKTAG('U', 'L', 'R', 'G'):
805  c->planes = 3;
806  avctx->pix_fmt = AV_PIX_FMT_GBRP;
807  break;
808  case MKTAG('U', 'L', 'R', 'A'):
809  c->planes = 4;
810  avctx->pix_fmt = AV_PIX_FMT_GBRAP;
811  break;
812  case MKTAG('U', 'L', 'Y', '0'):
813  c->planes = 3;
814  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
815  avctx->colorspace = AVCOL_SPC_BT470BG;
816  break;
817  case MKTAG('U', 'L', 'Y', '2'):
818  c->planes = 3;
819  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
820  avctx->colorspace = AVCOL_SPC_BT470BG;
821  break;
822  case MKTAG('U', 'L', 'Y', '4'):
823  c->planes = 3;
824  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
825  avctx->colorspace = AVCOL_SPC_BT470BG;
826  break;
827  case MKTAG('U', 'Q', 'Y', '2'):
828  c->planes = 3;
829  c->pro = 1;
830  avctx->pix_fmt = AV_PIX_FMT_YUV422P10;
831  break;
832  case MKTAG('U', 'Q', 'R', 'G'):
833  c->planes = 3;
834  c->pro = 1;
835  avctx->pix_fmt = AV_PIX_FMT_GBRP10;
836  break;
837  case MKTAG('U', 'Q', 'R', 'A'):
838  c->planes = 4;
839  c->pro = 1;
840  avctx->pix_fmt = AV_PIX_FMT_GBRAP10;
841  break;
842  case MKTAG('U', 'L', 'H', '0'):
843  c->planes = 3;
844  avctx->pix_fmt = AV_PIX_FMT_YUV420P;
845  avctx->colorspace = AVCOL_SPC_BT709;
846  break;
847  case MKTAG('U', 'L', 'H', '2'):
848  c->planes = 3;
849  avctx->pix_fmt = AV_PIX_FMT_YUV422P;
850  avctx->colorspace = AVCOL_SPC_BT709;
851  break;
852  case MKTAG('U', 'L', 'H', '4'):
853  c->planes = 3;
854  avctx->pix_fmt = AV_PIX_FMT_YUV444P;
855  avctx->colorspace = AVCOL_SPC_BT709;
856  break;
857  default:
858  av_log(avctx, AV_LOG_ERROR, "Unknown Ut Video FOURCC provided (%08X)\n",
859  avctx->codec_tag);
860  return AVERROR_INVALIDDATA;
861  }
862 
863  av_pix_fmt_get_chroma_sub_sample(avctx->pix_fmt, &h_shift, &v_shift);
864  if ((avctx->width & ((1<<h_shift)-1)) ||
865  (avctx->height & ((1<<v_shift)-1))) {
866  avpriv_request_sample(avctx, "Odd dimensions");
867  return AVERROR_PATCHWELCOME;
868  }
869 
870  if (!c->pro && avctx->extradata_size >= 16) {
871  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
872  avctx->extradata[3], avctx->extradata[2],
873  avctx->extradata[1], avctx->extradata[0]);
874  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
875  AV_RB32(avctx->extradata + 4));
876  c->frame_info_size = AV_RL32(avctx->extradata + 8);
877  c->flags = AV_RL32(avctx->extradata + 12);
878 
879  if (c->frame_info_size != 4)
880  avpriv_request_sample(avctx, "Frame info not 4 bytes");
881  av_log(avctx, AV_LOG_DEBUG, "Encoding parameters %08"PRIX32"\n", c->flags);
882  c->slices = (c->flags >> 24) + 1;
883  c->compression = c->flags & 1;
884  c->interlaced = c->flags & 0x800;
885  } else if (c->pro && avctx->extradata_size == 8) {
886  av_log(avctx, AV_LOG_DEBUG, "Encoder version %d.%d.%d.%d\n",
887  avctx->extradata[3], avctx->extradata[2],
888  avctx->extradata[1], avctx->extradata[0]);
889  av_log(avctx, AV_LOG_DEBUG, "Original format %"PRIX32"\n",
890  AV_RB32(avctx->extradata + 4));
891  c->interlaced = 0;
892  c->frame_info_size = 4;
893  } else {
894  av_log(avctx, AV_LOG_ERROR,
895  "Insufficient extradata size %d, should be at least 16\n",
896  avctx->extradata_size);
897  return AVERROR_INVALIDDATA;
898  }
899 
900  return 0;
901 }
902 
904 {
905  UtvideoContext * const c = avctx->priv_data;
906 
907  av_freep(&c->slice_bits);
908 
909  return 0;
910 }
911 
913  .name = "utvideo",
914  .long_name = NULL_IF_CONFIG_SMALL("Ut Video"),
915  .type = AVMEDIA_TYPE_VIDEO,
916  .id = AV_CODEC_ID_UTVIDEO,
917  .priv_data_size = sizeof(UtvideoContext),
918  .init = decode_init,
919  .close = decode_end,
920  .decode = decode_frame,
922  .caps_internal = FF_CODEC_CAP_INIT_THREADSAFE,
923 };
also ITU-R BT1361 / IEC 61966-2-4 xvYCC709 / SMPTE RP177 Annex B
Definition: pixfmt.h:490
static void restore_median_planar_il(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:389
#define AVERROR_INVALIDDATA
Invalid data found when processing input.
Definition: error.h:59
int(* add_left_pred)(uint8_t *dst, const uint8_t *src, ptrdiff_t w, int left)
int ff_ut10_huff_cmp_len(const void *a, const void *b)
Definition: utvideo.c:43
#define C
#define AV_PIX_FMT_GBRAP10
Definition: pixfmt.h:403
uint32_t flags
Definition: utvideo.h:75
planar YUV 4:4:4, 24bpp, (1 Cr & Cb sample per 1x1 Y samples)
Definition: pixfmt.h:71
#define AV_LOG_WARNING
Something somehow does not look correct.
Definition: log.h:182
AVFrame * f
Definition: thread.h:36
Definition: vf_geq.c:47
static av_cold int init(AVCodecContext *avctx)
Definition: avrndec.c:35
also ITU-R BT601-6 625 / ITU-R BT1358 625 / ITU-R BT1700 625 PAL & SECAM / IEC 61966-2-4 xvYCC601 ...
Definition: pixfmt.h:494
#define avpriv_request_sample(...)
int slice_bits_size
Definition: utvideo.h:85
int ff_init_vlc_sparse(VLC *vlc_arg, int nb_bits, int nb_codes, const void *bits, int bits_wrap, int bits_size, const void *codes, int codes_wrap, int codes_size, const void *symbols, int symbols_wrap, int symbols_size, int flags)
Definition: bitstream.c:273
planar GBR 4:4:4 24bpp
Definition: pixfmt.h:184
int size
Definition: avcodec.h:1680
#define AV_PIX_FMT_GBRP10
Definition: pixfmt.h:399
void(* restore_rgb_planes10)(uint16_t *src_r, uint16_t *src_g, uint16_t *src_b, ptrdiff_t linesize_r, ptrdiff_t linesize_g, ptrdiff_t linesize_b, int width, int height)
Definition: utvideodsp.h:31
static av_cold int decode_end(AVCodecContext *avctx)
Definition: utvideodec.c:903
enum AVPixelFormat pix_fmt
Pixel format, see AV_PIX_FMT_xxx.
Definition: avcodec.h:1989
static av_always_inline void bytestream2_init(GetByteContext *g, const uint8_t *buf, int buf_size)
Definition: bytestream.h:133
#define src
Definition: vp8dsp.c:254
int stride
Definition: mace.c:144
AVCodec.
Definition: avcodec.h:3739
static void decode(AVCodecContext *dec_ctx, AVPacket *pkt, AVFrame *frame, FILE *outfile)
Definition: decode_audio.c:42
static int decode_plane(UtvideoContext *c, int plane_no, uint8_t *dst, int step, ptrdiff_t stride, int width, int height, const uint8_t *src, int use_pred)
Definition: utvideodec.c:233
int interlaced
Definition: utvideo.h:79
#define FF_CODEC_CAP_INIT_THREADSAFE
The codec does not modify any global variables in the init function, allowing to call the init functi...
Definition: internal.h:40
uint8_t bits
Definition: crc.c:296
uint8_t
#define av_cold
Definition: attributes.h:82
void(* bswap_buf)(uint32_t *dst, const uint32_t *src, int w)
Definition: bswapdsp.h:25
static void restore_gradient_planar(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:440
#define AV_RB32
Definition: intreadwrite.h:130
static void restore_gradient_planar_il(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:478
Multithreading support functions.
uint8_t * extradata
some codecs need / can use extradata like Huffman tables.
Definition: avcodec.h:1876
uint32_t frame_info
Definition: utvideo.h:75
static AVFrame * frame
const char data[16]
Definition: mxf.c:90
#define height
uint8_t * data
Definition: avcodec.h:1679
const uint8_t * buffer
Definition: bytestream.h:34
void(* add_median_pred)(uint8_t *dst, const uint8_t *top, const uint8_t *diff, ptrdiff_t w, int *left, int *left_top)
static av_always_inline void bytestream2_skipu(GetByteContext *g, unsigned int size)
Definition: bytestream.h:170
bitstream reader API header.
int interlaced_frame
The content of the picture is interlaced.
Definition: frame.h:348
av_cold void ff_utvideodsp_init(UTVideoDSPContext *c)
Definition: utvideodsp.c:75
#define A(x)
Definition: vp56_arith.h:28
#define av_log(a,...)
static int build_huff(const uint8_t *src, VLC *vlc, int *fsym)
Definition: utvideodec.c:86
BswapDSPContext bdsp
Definition: utvideo.h:71
static int get_bits_left(GetBitContext *gb)
Definition: get_bits.h:589
#define AV_LOG_ERROR
Something went wrong and cannot losslessly be recovered.
Definition: log.h:176
static av_cold int decode_init(AVCodecContext *avctx)
Definition: utvideodec.c:790
#define AVERROR(e)
Definition: error.h:43
int av_pix_fmt_get_chroma_sub_sample(enum AVPixelFormat pix_fmt, int *h_shift, int *v_shift)
Utility function to access log2_chroma_w log2_chroma_h from the pixel format AVPixFmtDescriptor.
Definition: pixdesc.c:2447
#define NULL_IF_CONFIG_SMALL(x)
Return NULL if CONFIG_SMALL is true, otherwise the argument without modification. ...
Definition: internal.h:181
AVCodecContext * avctx
Definition: utvideo.h:69
#define AV_LOG_DEBUG
Stuff which is only useful for libav* developers.
Definition: log.h:197
static av_always_inline unsigned int bytestream2_get_bytes_left(GetByteContext *g)
Definition: bytestream.h:154
uint16_t width
Definition: gdv.c:47
const char * name
Name of the codec implementation.
Definition: avcodec.h:3746
uint32_t frame_info_size
Definition: utvideo.h:75
#define FFMAX(a, b)
Definition: common.h:94
#define fail()
Definition: checkasm.h:109
#define AV_CODEC_CAP_FRAME_THREADS
Codec supports frame-level multithreading.
Definition: avcodec.h:1065
Definition: vlc.h:26
planar YUV 4:2:2, 16bpp, (1 Cr & Cb sample per 2x1 Y samples)
Definition: pixfmt.h:70
void av_fast_malloc(void *ptr, unsigned int *size, size_t min_size)
Allocate a buffer, reusing the given one if large enough.
Definition: mem.c:481
void(* restore_rgb_planes)(uint8_t *src_r, uint8_t *src_g, uint8_t *src_b, ptrdiff_t linesize_r, ptrdiff_t linesize_g, ptrdiff_t linesize_b, int width, int height)
Definition: utvideodsp.h:28
static void restore_median_planar(UtvideoContext *c, uint8_t *src, ptrdiff_t stride, int width, int height, int slices, int rmode)
Definition: utvideodec.c:341
int compression
Definition: utvideo.h:78
enum AVPictureType pict_type
Picture type of the frame.
Definition: frame.h:284
int width
picture width / height.
Definition: avcodec.h:1948
static av_always_inline int get_vlc2(GetBitContext *s, VLC_TYPE(*table)[2], int bits, int max_depth)
Parse a vlc code.
Definition: get_bits.h:556
#define AV_RL32
Definition: intreadwrite.h:146
#define AVERROR_PATCHWELCOME
Not yet implemented in FFmpeg, patches welcome.
Definition: error.h:62
static int decode_frame(AVCodecContext *avctx, void *data, int *got_frame, AVPacket *avpkt)
Definition: utvideodec.c:529
Common Ut Video header.
int frame_pred
Definition: utvideo.h:80
uint8_t len
Definition: magicyuv.c:49
Libavcodec external API header.
int linesize[AV_NUM_DATA_POINTERS]
For video, size in bytes of each picture line.
Definition: frame.h:232
int ff_thread_get_buffer(AVCodecContext *avctx, ThreadFrame *f, int flags)
Wrapper around get_buffer() for frame-multithreaded codecs.
main external API structure.
Definition: avcodec.h:1761
unsigned int codec_tag
fourcc (LSB first, so "ABCD" -> (&#39;D&#39;<<24) + (&#39;C&#39;<<16) + (&#39;B&#39;<<8) + &#39;A&#39;).
Definition: avcodec.h:1793
void * buf
Definition: avisynth_c.h:690
#define VLC_BITS
int extradata_size
Definition: avcodec.h:1877
void ff_llviddsp_init(LLVidDSPContext *c)
enum AVColorSpace colorspace
YUV colorspace type.
Definition: avcodec.h:2491
static int init_get_bits(GetBitContext *s, const uint8_t *buffer, int bit_size)
Initialize GetBitContext.
Definition: get_bits.h:426
#define mid_pred
Definition: mathops.h:97
static int build_huff10(const uint8_t *src, VLC *vlc, int *fsym)
Definition: utvideodec.c:42
#define u(width,...)
uint8_t * slice_bits
Definition: utvideo.h:84
#define AV_PIX_FMT_YUV422P10
Definition: pixfmt.h:384
int ff_ut_huff_cmp_len(const void *a, const void *b)
Definition: utvideo.c:37
uint8_t * data[AV_NUM_DATA_POINTERS]
pointer to the picture/channel planes.
Definition: frame.h:215
LLVidDSPContext llviddsp
Definition: utvideo.h:72
planar YUV 4:2:0, 12bpp, (1 Cr & Cb sample per 2x2 Y samples)
Definition: pixfmt.h:66
common internal api header.
planar GBRA 4:4:4:4 32bpp
Definition: pixfmt.h:233
static double c[64]
uint16_t sym
Definition: magicyuv.c:48
#define AV_INPUT_BUFFER_PADDING_SIZE
Required number of additionally allocated bytes at the end of the input bitstream for decoding...
Definition: avcodec.h:777
static int slice_end(AVCodecContext *avctx, AVFrame *pict)
Handle slice ends.
Definition: mpeg12dec.c:2051
av_cold void ff_bswapdsp_init(BswapDSPContext *c)
Definition: bswapdsp.c:49
void * priv_data
Definition: avcodec.h:1803
int len
VLC_TYPE(* table)[2]
code, bits
Definition: vlc.h:28
int key_frame
1 -> keyframe, 0-> not
Definition: frame.h:279
UTVideoDSPContext utdsp
Definition: utvideo.h:70
#define av_freep(p)
#define MKTAG(a, b, c, d)
Definition: common.h:342
AVCodec ff_utvideo_decoder
Definition: utvideodec.c:912
This structure stores compressed data.
Definition: avcodec.h:1656
void ff_free_vlc(VLC *vlc)
Definition: bitstream.c:359
#define AV_CODEC_CAP_DR1
Codec uses get_buffer() for allocating buffers and supports custom allocators.
Definition: avcodec.h:1002
static int decode_plane10(UtvideoContext *c, int plane_no, uint16_t *dst, int step, ptrdiff_t stride, int width, int height, const uint8_t *src, const uint8_t *huff, int use_pred)
Definition: utvideodec.c:129