vp9_decodframe.c 38.8 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
#include <assert.h>
John Koleszar's avatar
John Koleszar committed
12

13
14
15
16
#include "./vp9_rtcd.h"
#include "vpx_mem/vpx_mem.h"
#include "vpx_scale/vpx_scale.h"

Dmitry Kovalev's avatar
Dmitry Kovalev committed
17
#include "vp9/common/vp9_alloccommon.h"
Ronald S. Bultje's avatar
Ronald S. Bultje committed
18
#include "vp9/common/vp9_common.h"
Yaowu Xu's avatar
Yaowu Xu committed
19
#include "vp9/common/vp9_entropy.h"
20
#include "vp9/common/vp9_entropymode.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
21
#include "vp9/common/vp9_extend.h"
22
#include "vp9/common/vp9_idct.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
23
#include "vp9/common/vp9_pred_common.h"
24
#include "vp9/common/vp9_quant_common.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
25
26
#include "vp9/common/vp9_reconintra.h"
#include "vp9/common/vp9_reconinter.h"
27
28
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_tile_common.h"
29
30

#include "vp9/decoder/vp9_dboolhuff.h"
31
32
33
#include "vp9/decoder/vp9_decodframe.h"
#include "vp9/decoder/vp9_detokenize.h"
#include "vp9/decoder/vp9_decodemv.h"
34
#include "vp9/decoder/vp9_dsubexp.h"
35
36
#include "vp9/decoder/vp9_onyxd_int.h"
#include "vp9/decoder/vp9_read_bit_buffer.h"
37
#include "vp9/decoder/vp9_thread.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
38
#include "vp9/decoder/vp9_treereader.h"
39

40
41
static int read_be32(const uint8_t *p) {
  return (p[0] << 24) | (p[1] << 16) | (p[2] << 8) | p[3];
42
43
}

44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
static int is_compound_prediction_allowed(const VP9_COMMON *cm) {
  int i;
  for (i = 1; i < ALLOWED_REFS_PER_FRAME; ++i)
    if  (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1])
      return 1;

  return 0;
}

static void setup_compound_prediction(VP9_COMMON *cm) {
  if (cm->ref_frame_sign_bias[LAST_FRAME] ==
          cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
    cm->comp_fixed_ref = ALTREF_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = GOLDEN_FRAME;
  } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
                 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
    cm->comp_fixed_ref = GOLDEN_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  } else {
    cm->comp_fixed_ref = LAST_FRAME;
    cm->comp_var_ref[0] = GOLDEN_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  }
}

71
// len == 0 is not allowed
72
static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
73
74
75
  return start + len > start && start + len <= end;
}

76
77
78
79
80
static int decode_unsigned_max(struct vp9_read_bit_buffer *rb, int max) {
  const int data = vp9_rb_read_literal(rb, get_unsigned_bits(max));
  return data > max ? max : data;
}

81
82
83
84
85
static TX_MODE read_tx_mode(vp9_reader *r) {
  TX_MODE tx_mode = vp9_read_literal(r, 2);
  if (tx_mode == ALLOW_32X32)
    tx_mode += vp9_read_bit(r);
  return tx_mode;
86
87
}

88
static void read_tx_probs(struct tx_probs *tx_probs, vp9_reader *r) {
89
90
91
  int i, j;

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
92
    for (j = 0; j < TX_SIZES - 3; ++j)
93
      vp9_diff_update_prob(r, &tx_probs->p8x8[i][j]);
94
95

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
96
    for (j = 0; j < TX_SIZES - 2; ++j)
97
      vp9_diff_update_prob(r, &tx_probs->p16x16[i][j]);
98
99

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
100
    for (j = 0; j < TX_SIZES - 1; ++j)
101
      vp9_diff_update_prob(r, &tx_probs->p32x32[i][j]);
John Koleszar's avatar
John Koleszar committed
102
103
}

104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (j = 0; j < SWITCHABLE_FILTERS + 1; ++j)
    for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
      vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
}

static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
    for (j = 0; j < INTER_MODES - 1; ++j)
      vp9_diff_update_prob(r, &fc->inter_mode_probs[i][j]);
}

static INLINE COMPPREDMODE_TYPE read_comp_pred_mode(vp9_reader *r) {
  COMPPREDMODE_TYPE mode = vp9_read_bit(r);
  if (mode)
    mode += vp9_read_bit(r);
  return mode;
}

static void read_comp_pred(VP9_COMMON *cm, vp9_reader *r) {
  int i;

128
129
130
131
132
  const int compound_allowed = is_compound_prediction_allowed(cm);
  cm->comp_pred_mode = compound_allowed ? read_comp_pred_mode(r)
                                        : SINGLE_PREDICTION_ONLY;
  if (compound_allowed)
    setup_compound_prediction(cm);
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193

  if (cm->comp_pred_mode == HYBRID_PREDICTION)
    for (i = 0; i < COMP_INTER_CONTEXTS; i++)
      vp9_diff_update_prob(r, &cm->fc.comp_inter_prob[i]);

  if (cm->comp_pred_mode != COMP_PREDICTION_ONLY)
    for (i = 0; i < REF_CONTEXTS; i++) {
      vp9_diff_update_prob(r, &cm->fc.single_ref_prob[i][0]);
      vp9_diff_update_prob(r, &cm->fc.single_ref_prob[i][1]);
    }

  if (cm->comp_pred_mode != SINGLE_PREDICTION_ONLY)
    for (i = 0; i < REF_CONTEXTS; i++)
      vp9_diff_update_prob(r, &cm->fc.comp_ref_prob[i]);
}

static void update_mv(vp9_reader *r, vp9_prob *p) {
  if (vp9_read(r, NMV_UPDATE_PROB))
    *p = (vp9_read_literal(r, 7) << 1) | 1;
}

static void read_mv_probs(vp9_reader *r, nmv_context *mvc, int allow_hp) {
  int i, j, k;

  for (j = 0; j < MV_JOINTS - 1; ++j)
    update_mv(r, &mvc->joints[j]);

  for (i = 0; i < 2; ++i) {
    nmv_component *const comp = &mvc->comps[i];

    update_mv(r, &comp->sign);

    for (j = 0; j < MV_CLASSES - 1; ++j)
      update_mv(r, &comp->classes[j]);

    for (j = 0; j < CLASS0_SIZE - 1; ++j)
      update_mv(r, &comp->class0[j]);

    for (j = 0; j < MV_OFFSET_BITS; ++j)
      update_mv(r, &comp->bits[j]);
  }

  for (i = 0; i < 2; ++i) {
    nmv_component *const comp = &mvc->comps[i];

    for (j = 0; j < CLASS0_SIZE; ++j)
      for (k = 0; k < 3; ++k)
        update_mv(r, &comp->class0_fp[j][k]);

    for (j = 0; j < 3; ++j)
      update_mv(r, &comp->fp[j]);
  }

  if (allow_hp) {
    for (i = 0; i < 2; ++i) {
      update_mv(r, &mvc->comps[i].class0_hp);
      update_mv(r, &mvc->comps[i].hp);
    }
  }
}

194
static void setup_plane_dequants(VP9_COMMON *cm, MACROBLOCKD *xd, int q_index) {
John Koleszar's avatar
John Koleszar committed
195
  int i;
196
  xd->plane[0].dequant = cm->y_dequant[q_index];
197

198
  for (i = 1; i < MAX_MB_PLANE; i++)
199
    xd->plane[i].dequant = cm->uv_dequant[q_index];
John Koleszar's avatar
John Koleszar committed
200
201
}

202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
// Allocate storage for each tile column.
// TODO(jzern): when max_threads <= 1 the same storage could be used for each
// tile.
static void alloc_tile_storage(VP9D_COMP *pbi, int tile_cols) {
  VP9_COMMON *const cm = &pbi->common;
  int tile_col;

  CHECK_MEM_ERROR(cm, pbi->mi_streams,
                  vpx_realloc(pbi->mi_streams, tile_cols *
                              sizeof(*pbi->mi_streams)));
  for (tile_col = 0; tile_col < tile_cols; ++tile_col) {
    vp9_get_tile_col_offsets(cm, tile_col);
    pbi->mi_streams[tile_col] =
        &cm->mi[cm->mi_rows * cm->cur_tile_mi_col_start];
  }
}

219
static void decode_block(int plane, int block, BLOCK_SIZE plane_bsize,
220
                         TX_SIZE tx_size, void *arg) {
221
  MACROBLOCKD* const xd = arg;
222
  struct macroblockd_plane *const pd = &xd->plane[plane];
223
  int16_t* const qcoeff = BLOCK_OFFSET(pd->qcoeff, block);
224
  const int stride = pd->dst.stride;
225
  const int eob = pd->eobs[block];
226
227
228
229
230
231
232
233
234
235
236
237
  if (eob > 0) {
    TX_TYPE tx_type;
    const int raster_block = txfrm_block_to_raster_block(plane_bsize, tx_size,
                                                         block);
    uint8_t* const dst = raster_block_offset_uint8(plane_bsize, raster_block,
                                                   pd->dst.buf, stride);
    switch (tx_size) {
      case TX_4X4:
        tx_type = get_tx_type_4x4(pd->plane_type, xd, raster_block);
        if (tx_type == DCT_DCT)
          xd->itxm_add(qcoeff, dst, stride, eob);
        else
238
          vp9_iht4x4_add(tx_type, qcoeff, dst, stride, eob);
239
240
241
        break;
      case TX_8X8:
        tx_type = get_tx_type_8x8(pd->plane_type, xd);
242
        vp9_iht8x8_add(tx_type, qcoeff, dst, stride, eob);
243
244
245
        break;
      case TX_16X16:
        tx_type = get_tx_type_16x16(pd->plane_type, xd);
246
        vp9_iht16x16_add(tx_type, qcoeff, dst, stride, eob);
247
248
249
        break;
      case TX_32X32:
        tx_type = DCT_DCT;
250
        vp9_idct32x32_add(qcoeff, dst, stride, eob);
251
252
253
254
255
256
257
258
259
260
        break;
      default:
        assert(!"Invalid transform size");
    }

    if (eob == 1) {
      *((int32_t *)qcoeff) = 0;
    } else {
      if (tx_type == DCT_DCT && tx_size <= TX_16X16 && eob <= 10)
        vpx_memset(qcoeff, 0, 4 * (4 << tx_size) * sizeof(qcoeff[0]));
261
      else
262
        vpx_memset(qcoeff, 0, (16 << (tx_size << 1)) * sizeof(qcoeff[0]));
263
    }
264
265
266
  }
}

267
static void decode_block_intra(int plane, int block, BLOCK_SIZE plane_bsize,
268
                               TX_SIZE tx_size, void *arg) {
269
  MACROBLOCKD* const xd = arg;
270
  struct macroblockd_plane *const pd = &xd->plane[plane];
271
  MODE_INFO *const mi = xd->mi_8x8[0];
272
273
274
  const int raster_block = txfrm_block_to_raster_block(plane_bsize, tx_size,
                                                       block);
  uint8_t* const dst = raster_block_offset_uint8(plane_bsize, raster_block,
275
                                                 pd->dst.buf, pd->dst.stride);
276
277
278
279
  const MB_PREDICTION_MODE mode = (plane == 0)
        ? ((mi->mbmi.sb_type < BLOCK_8X8) ? mi->bmi[raster_block].as_mode
                                          : mi->mbmi.mode)
        : mi->mbmi.uv_mode;
280

281
  if (xd->mb_to_right_edge < 0 || xd->mb_to_bottom_edge < 0)
282
    extend_for_intra(xd, plane_bsize, plane, block, tx_size);
283

284
285
286
  vp9_predict_intra_block(xd, raster_block >> tx_size,
                          b_width_log2(plane_bsize), tx_size, mode,
                          dst, pd->dst.stride, dst, pd->dst.stride);
287

Dmitry Kovalev's avatar
Dmitry Kovalev committed
288
289
  if (!mi->mbmi.skip_coeff)
    decode_block(plane, block, plane_bsize, tx_size, arg);
290
291
}

292
293
static int decode_tokens(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                         BLOCK_SIZE bsize, vp9_reader *r) {
294
  MB_MODE_INFO *const mbmi = &xd->mi_8x8[0]->mbmi;
295

296
297
  if (mbmi->skip_coeff) {
    reset_skip_context(xd, bsize);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
298
    return -1;
299
  } else {
300
301
302
    if (cm->seg.enabled)
      setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
                                                  cm->base_qindex));
303

Dmitry Kovalev's avatar
Dmitry Kovalev committed
304
    // TODO(dkovalev) if (!vp9_reader_has_error(r))
305
    return vp9_decode_tokens(cm, xd, &cm->seg, r, bsize);
306
307
308
  }
}

309
static void set_offsets(VP9D_COMP *pbi, BLOCK_SIZE bsize,
310
                        int mi_row, int mi_col) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
311
312
  VP9_COMMON *const cm = &pbi->common;
  MACROBLOCKD *const xd = &pbi->mb;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
313
314
315
  const int bh = num_8x8_blocks_high_lookup[bsize];
  const int bw = num_8x8_blocks_wide_lookup[bsize];
  const int offset = mi_row * cm->mode_info_stride + mi_col;
316

317
  xd->mode_info_stride = cm->mode_info_stride;
318
319
320
321
322

  xd->mi_8x8 = cm->mi_grid_visible + offset;
  xd->prev_mi_8x8 = cm->prev_mi_grid_visible + offset;

  // we are using the mode info context stream here
323
  xd->mi_8x8[0] = xd->mi_stream;
324
  xd->mi_8x8[0]->mbmi.sb_type = bsize;
325
  ++xd->mi_stream;
326

327
328
  // Special case: if prev_mi is NULL, the previous mode info context
  // cannot be used.
329
  xd->last_mi = cm->prev_mi ? xd->prev_mi_8x8[0] : NULL;
330

331
  set_skip_context(xd, cm->above_context, cm->left_context, mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
332

333
334
  // Distance of Mb to the various image edges. These are specified to 8th pel
  // as they are always compared to values that are in 1/8th pel units
335
  set_mi_row_col(cm, xd, mi_row, bh, mi_col, bw);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
336

337
  setup_dst_planes(xd, &cm->yv12_fb[cm->new_fb_idx], mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
338
}
John Koleszar's avatar
John Koleszar committed
339

340
341
static void set_ref(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                    int idx, int mi_row, int mi_col) {
342
  MB_MODE_INFO *const mbmi = &xd->mi_8x8[0]->mbmi;
343
  const int ref = mbmi->ref_frame[idx] - LAST_FRAME;
344
  const YV12_BUFFER_CONFIG *cfg = get_frame_ref_buffer(cm, ref);
Yunqing Wang's avatar
Yunqing Wang committed
345
346
  const struct scale_factors_common *sfc = &cm->active_ref_scale_comm[ref];
  if (!vp9_is_valid_scale(sfc))
347
348
349
    vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
                       "Invalid scale factors");

Yunqing Wang's avatar
Yunqing Wang committed
350
351
  xd->scale_factor[idx].sfc = sfc;
  setup_pre_planes(xd, idx, cfg, mi_row, mi_col, &xd->scale_factor[idx]);
352
  xd->corrupted |= cfg->corrupted;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
353
}
John Koleszar's avatar
John Koleszar committed
354

355
static void decode_modes_b(VP9D_COMP *pbi, int mi_row, int mi_col,
356
                           vp9_reader *r, BLOCK_SIZE bsize, int index) {
357
  VP9_COMMON *const cm = &pbi->common;
358
  MACROBLOCKD *const xd = &pbi->mb;
359
  const int less8x8 = bsize < BLOCK_8X8;
360
  MB_MODE_INFO *mbmi;
361
  int eobtotal;
362

363
  if (less8x8)
364
    if (index > 0)
365
      return;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
366

367
  set_offsets(pbi, bsize, mi_row, mi_col);
368
  vp9_read_mode_info(cm, xd, mi_row, mi_col, r);
369

370
  if (less8x8)
371
    bsize = BLOCK_8X8;
372
373

  // Has to be called after set_offsets
374
  mbmi = &xd->mi_8x8[0]->mbmi;
375
  eobtotal = decode_tokens(cm, xd, bsize, r);
376

377
  if (!is_inter_block(mbmi)) {
378
379
    // Intra reconstruction
    foreach_transformed_block(xd, bsize, decode_block_intra, xd);
380
  } else {
381
    // Inter reconstruction
382
383
384
385
386
    const int decode_blocks = (eobtotal > 0);

    if (!less8x8) {
      assert(mbmi->sb_type == bsize);
      if (eobtotal == 0)
387
        mbmi->skip_coeff = 1;  // skip loopfilter
388
    }
389

390
    set_ref(cm, xd, 0, mi_row, mi_col);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
391
    if (has_second_ref(mbmi))
392
      set_ref(cm, xd, 1, mi_row, mi_col);
393

394
395
    xd->subpix.filter_x = xd->subpix.filter_y =
        vp9_get_filter_kernel(mbmi->interp_filter);
396
    vp9_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);
397
398
399

    if (decode_blocks)
      foreach_transformed_block(xd, bsize, decode_block, xd);
400
  }
401
  xd->corrupted |= vp9_reader_has_error(r);
402
403
}

404
static void decode_modes_sb(VP9D_COMP *pbi, int mi_row, int mi_col,
405
                            vp9_reader* r, BLOCK_SIZE bsize, int index) {
406
  VP9_COMMON *const cm = &pbi->common;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
407
  const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
408
  PARTITION_TYPE partition = PARTITION_NONE;
409
  BLOCK_SIZE subsize;
410

411
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
412
413
    return;

414
  if (bsize < BLOCK_8X8) {
415
    if (index > 0)
416
      return;
417
  } else {
418
    int pl;
419
    const int idx = check_bsize_coverage(hbs, cm->mi_rows, cm->mi_cols,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
420
                                         mi_row, mi_col);
421
422
    pl = partition_plane_context(cm->above_seg_context, cm->left_seg_context,
                                 mi_row, mi_col, bsize);
423
424
425

    if (idx == 0)
      partition = treed_read(r, vp9_partition_tree,
426
                             cm->fc.partition_prob[cm->frame_type][pl]);
427
    else if (idx > 0 &&
428
        !vp9_read(r, cm->fc.partition_prob[cm->frame_type][pl][idx]))
429
430
431
432
      partition = (idx == 1) ? PARTITION_HORZ : PARTITION_VERT;
    else
      partition = PARTITION_SPLIT;

433
434
    if (!cm->frame_parallel_decoding_mode)
      ++cm->counts.partition[pl][partition];
435
436
  }

437
  subsize = get_subsize(bsize, partition);
438

439
440
  switch (partition) {
    case PARTITION_NONE:
441
      decode_modes_b(pbi, mi_row, mi_col, r, subsize, 0);
442
443
      break;
    case PARTITION_HORZ:
444
      decode_modes_b(pbi, mi_row, mi_col, r, subsize, 0);
445
      if (mi_row + hbs < cm->mi_rows)
446
        decode_modes_b(pbi, mi_row + hbs, mi_col, r, subsize, 1);
447
448
      break;
    case PARTITION_VERT:
449
      decode_modes_b(pbi, mi_row, mi_col, r, subsize, 0);
450
      if (mi_col + hbs < cm->mi_cols)
451
        decode_modes_b(pbi, mi_row, mi_col + hbs, r, subsize, 1);
452
      break;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
453
454
    case PARTITION_SPLIT: {
      int n;
455
      for (n = 0; n < 4; n++) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
456
        const int j = n >> 1, i = n & 1;
457
        decode_modes_sb(pbi, mi_row + j * hbs, mi_col + i * hbs,
458
                        r, subsize, n);
459
      }
Dmitry Kovalev's avatar
Dmitry Kovalev committed
460
    } break;
461
    default:
462
      assert(!"Invalid partition type");
463
  }
464

465
  // update partition context
466
  if (bsize >= BLOCK_8X8 &&
467
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
468
469
    update_partition_context(cm->above_seg_context, cm->left_seg_context,
                             mi_row, mi_col, subsize, bsize);
470
471
}

472
473
474
475
static void setup_token_decoder(const uint8_t *data,
                                const uint8_t *data_end,
                                size_t read_size,
                                struct vpx_internal_error_info *error_info,
476
                                vp9_reader *r) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
477
478
479
  // Validate the calculated partition length. If the buffer
  // described by the partition can't be fully read, then restrict
  // it to the portion that can be (for EC mode) or throw an error.
480
  if (!read_is_valid(data, read_size, data_end))
481
    vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
482
                       "Truncated packet or corrupt tile length");
John Koleszar's avatar
John Koleszar committed
483

484
  if (vp9_reader_init(r, data, read_size))
485
    vpx_internal_error(error_info, VPX_CODEC_MEM_ERROR,
John Koleszar's avatar
John Koleszar committed
486
                       "Failed to allocate bool decoder %d", 1);
John Koleszar's avatar
John Koleszar committed
487
488
}

489
static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs,
490
                                   vp9_reader *r) {
491
492
493
494
495
496
497
498
499
  int i, j, k, l, m;

  if (vp9_read_bit(r))
    for (i = 0; i < BLOCK_TYPES; i++)
      for (j = 0; j < REF_TYPES; j++)
        for (k = 0; k < COEF_BANDS; k++)
          for (l = 0; l < PREV_COEF_CONTEXTS; l++)
            if (k > 0 || l < 3)
              for (m = 0; m < UNCONSTRAINED_NODES; m++)
500
                vp9_diff_update_prob(r, &coef_probs[i][j][k][l][m]);
501
}
502

503
static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode,
504
                            vp9_reader *r) {
505
  read_coef_probs_common(fc->coef_probs[TX_4X4], r);
506

507
  if (tx_mode > ONLY_4X4)
508
    read_coef_probs_common(fc->coef_probs[TX_8X8], r);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
509

510
  if (tx_mode > ALLOW_8X8)
511
    read_coef_probs_common(fc->coef_probs[TX_16X16], r);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
512

513
  if (tx_mode > ALLOW_16X16)
514
    read_coef_probs_common(fc->coef_probs[TX_32X32], r);
515
516
}

517
518
static void setup_segmentation(struct segmentation *seg,
                               struct vp9_read_bit_buffer *rb) {
519
520
  int i, j;

521
522
  seg->update_map = 0;
  seg->update_data = 0;
523

524
525
  seg->enabled = vp9_rb_read_bit(rb);
  if (!seg->enabled)
526
527
528
    return;

  // Segmentation map update
529
530
  seg->update_map = vp9_rb_read_bit(rb);
  if (seg->update_map) {
Paul Wilkins's avatar
Paul Wilkins committed
531
    for (i = 0; i < SEG_TREE_PROBS; i++)
532
533
      seg->tree_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                               : MAX_PROB;
534

535
536
    seg->temporal_update = vp9_rb_read_bit(rb);
    if (seg->temporal_update) {
537
      for (i = 0; i < PREDICTION_PROBS; i++)
538
539
        seg->pred_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                                 : MAX_PROB;
540
541
    } else {
      for (i = 0; i < PREDICTION_PROBS; i++)
542
        seg->pred_probs[i] = MAX_PROB;
543
    }
544
  }
545

546
  // Segmentation data update
547
548
549
  seg->update_data = vp9_rb_read_bit(rb);
  if (seg->update_data) {
    seg->abs_delta = vp9_rb_read_bit(rb);
550

551
    vp9_clearall_segfeatures(seg);
552

Paul Wilkins's avatar
Paul Wilkins committed
553
    for (i = 0; i < MAX_SEGMENTS; i++) {
554
555
      for (j = 0; j < SEG_LVL_MAX; j++) {
        int data = 0;
556
        const int feature_enabled = vp9_rb_read_bit(rb);
557
        if (feature_enabled) {
558
          vp9_enable_segfeature(seg, i, j);
559
          data = decode_unsigned_max(rb, vp9_seg_feature_data_max(j));
560
          if (vp9_is_segfeature_signed(j))
561
            data = vp9_rb_read_bit(rb) ? -data : data;
562
        }
563
        vp9_set_segdata(seg, i, j, data);
564
565
566
567
568
      }
    }
  }
}

569
570
571
572
static void setup_loopfilter(struct loopfilter *lf,
                             struct vp9_read_bit_buffer *rb) {
  lf->filter_level = vp9_rb_read_literal(rb, 6);
  lf->sharpness_level = vp9_rb_read_literal(rb, 3);
573
574
575

  // Read in loop filter deltas applied at the MB level based on mode or ref
  // frame.
576
  lf->mode_ref_delta_update = 0;
577

578
579
580
581
  lf->mode_ref_delta_enabled = vp9_rb_read_bit(rb);
  if (lf->mode_ref_delta_enabled) {
    lf->mode_ref_delta_update = vp9_rb_read_bit(rb);
    if (lf->mode_ref_delta_update) {
582
583
      int i;

584
585
      for (i = 0; i < MAX_REF_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
586
          lf->ref_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
587

588
589
      for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
590
          lf->mode_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
591
592
593
594
    }
  }
}

595
596
static int read_delta_q(struct vp9_read_bit_buffer *rb, int *delta_q) {
  const int old = *delta_q;
597
  *delta_q = vp9_rb_read_bit(rb) ? vp9_rb_read_signed_literal(rb, 4) : 0;
598
599
  return old != *delta_q;
}
600

601
static void setup_quantization(VP9D_COMP *pbi, struct vp9_read_bit_buffer *rb) {
602
  MACROBLOCKD *const xd = &pbi->mb;
603
604
  VP9_COMMON *const cm = &pbi->common;
  int update = 0;
605

606
607
608
609
610
611
  cm->base_qindex = vp9_rb_read_literal(rb, QINDEX_BITS);
  update |= read_delta_q(rb, &cm->y_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_ac_delta_q);
  if (update)
    vp9_init_dequantizer(cm);
612
613
614
615
616

  xd->lossless = cm->base_qindex == 0 &&
                 cm->y_dc_delta_q == 0 &&
                 cm->uv_dc_delta_q == 0 &&
                 cm->uv_ac_delta_q == 0;
617

618
  xd->itxm_add = xd->lossless ? vp9_iwht4x4_add : vp9_idct4x4_add;
619
620
}

621
622
623
624
625
626
static INTERPOLATION_TYPE read_interp_filter_type(
                              struct vp9_read_bit_buffer *rb) {
  const INTERPOLATION_TYPE literal_to_type[] = { EIGHTTAP_SMOOTH,
                                                 EIGHTTAP,
                                                 EIGHTTAP_SHARP,
                                                 BILINEAR };
627
  return vp9_rb_read_bit(rb) ? SWITCHABLE
628
                             : literal_to_type[vp9_rb_read_literal(rb, 2)];
629
630
}

631
static void read_frame_size(struct vp9_read_bit_buffer *rb,
632
                            int *width, int *height) {
633
634
  const int w = vp9_rb_read_literal(rb, 16) + 1;
  const int h = vp9_rb_read_literal(rb, 16) + 1;
635
636
  *width = w;
  *height = h;
637
638
}

639
static void setup_display_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
640
641
642
  cm->display_width = cm->width;
  cm->display_height = cm->height;
  if (vp9_rb_read_bit(rb))
643
    read_frame_size(rb, &cm->display_width, &cm->display_height);
644
}
645

646
647
static void apply_frame_size(VP9D_COMP *pbi, int width, int height) {
  VP9_COMMON *cm = &pbi->common;
648

649
  if (cm->width != width || cm->height != height) {
650
    if (!pbi->initial_width || !pbi->initial_height) {
651
652
      if (vp9_alloc_frame_buffers(cm, width, height))
        vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
653
                           "Failed to allocate frame buffers");
654
655
      pbi->initial_width = width;
      pbi->initial_height = height;
656
657
    } else {
      if (width > pbi->initial_width)
658
        vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
659
                           "Frame width too large");
660

661
      if (height > pbi->initial_height)
662
        vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
663
                           "Frame height too large");
664
665
    }

666
667
    cm->width = width;
    cm->height = height;
668

669
    vp9_update_frame_size(cm);
670
  }
671

672
673
  vp9_realloc_frame_buffer(&cm->yv12_fb[cm->new_fb_idx], cm->width, cm->height,
                           cm->subsampling_x, cm->subsampling_y,
674
                           VP9BORDERINPIXELS);
675
676
}

677
678
679
static void setup_frame_size(VP9D_COMP *pbi,
                             struct vp9_read_bit_buffer *rb) {
  int width, height;
680
  read_frame_size(rb, &width, &height);
681
  apply_frame_size(pbi, width, height);
682
  setup_display_size(&pbi->common, rb);
683
684
}

685
686
687
688
689
690
691
692
static void setup_frame_size_with_refs(VP9D_COMP *pbi,
                                       struct vp9_read_bit_buffer *rb) {
  VP9_COMMON *const cm = &pbi->common;

  int width, height;
  int found = 0, i;
  for (i = 0; i < ALLOWED_REFS_PER_FRAME; ++i) {
    if (vp9_rb_read_bit(rb)) {
693
      YV12_BUFFER_CONFIG *const cfg = get_frame_ref_buffer(cm, i);
694
695
696
697
698
699
700
701
      width = cfg->y_crop_width;
      height = cfg->y_crop_height;
      found = 1;
      break;
    }
  }

  if (!found)
702
    read_frame_size(rb, &width, &height);
703

704
705
706
707
  if (!width || !height)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Referenced frame with invalid size");

708
  apply_frame_size(pbi, width, height);
709
  setup_display_size(cm, rb);
710
711
}

712
static void decode_tile(VP9D_COMP *pbi, vp9_reader *r, int tile_col) {
713
  const int num_threads = pbi->oxcf.max_threads;
714
  VP9_COMMON *const cm = &pbi->common;
715
  int mi_row, mi_col;
716
  YV12_BUFFER_CONFIG *const fb = &cm->yv12_fb[cm->new_fb_idx];
717
718
719
  MACROBLOCKD *xd = &pbi->mb;

  xd->mi_stream = pbi->mi_streams[tile_col];
720

721
  if (pbi->do_loopfilter_inline) {
722
723
724
725
726
727
    LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1;
    lf_data->frame_buffer = fb;
    lf_data->cm = cm;
    lf_data->xd = pbi->mb;
    lf_data->stop = 0;
    lf_data->y_only = 0;
728
    vp9_loop_filter_frame_init(cm, cm->lf.filter_level);
729
730
  }

731
  for (mi_row = cm->cur_tile_mi_row_start; mi_row < cm->cur_tile_mi_row_end;
732
       mi_row += MI_BLOCK_SIZE) {
733
    // For a SB there are 2 left contexts, each pertaining to a MB row within
734
735
736
    vp9_zero(cm->left_context);
    vp9_zero(cm->left_seg_context);
    for (mi_col = cm->cur_tile_mi_col_start; mi_col < cm->cur_tile_mi_col_end;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
737
         mi_col += MI_BLOCK_SIZE)
738
      decode_modes_sb(pbi, mi_row, mi_col, r, BLOCK_64X64, 0);
739
740
741

    if (pbi->do_loopfilter_inline) {
      const int lf_start = mi_row - MI_BLOCK_SIZE;
742
      LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1;
743

744
745
      // delay the loopfilter by 1 macroblock row.
      if (lf_start < 0) continue;
746

747
748
      // decoding has completed: finish up the loop filter in this thread.
      if (mi_row + MI_BLOCK_SIZE >= cm->cur_tile_mi_row_end) continue;
749

750
751
752
753
      vp9_worker_sync(&pbi->lf_worker);
      lf_data->start = lf_start;
      lf_data->stop = mi_row;
      if (num_threads > 1) {
754
755
        vp9_worker_launch(&pbi->lf_worker);
      } else {
756
        vp9_worker_execute(&pbi->lf_worker);
757
      }
758
759
760
761
    }
  }

  if (pbi->do_loopfilter_inline) {
762
    LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1;
763

764
765
766
767
    vp9_worker_sync(&pbi->lf_worker);
    lf_data->start = lf_data->stop;
    lf_data->stop = cm->mi_rows;
    vp9_worker_execute(&pbi->lf_worker);
768
769
770
  }
}

771
static void setup_tile_info(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
772
773
  int min_log2_tile_cols, max_log2_tile_cols, max_ones;
  vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
774

Dmitry Kovalev's avatar
Dmitry Kovalev committed
775
776
777
778
779
  // columns
  max_ones = max_log2_tile_cols - min_log2_tile_cols;
  cm->log2_tile_cols = min_log2_tile_cols;
  while (max_ones-- && vp9_rb_read_bit(rb))
    cm->log2_tile_cols++;
780

Dmitry Kovalev's avatar
Dmitry Kovalev committed
781
  // rows
782
783
784
785
786
  cm->log2_tile_rows = vp9_rb_read_bit(rb);
  if (cm->log2_tile_rows)
    cm->log2_tile_rows += vp9_rb_read_bit(rb);
}

787
788
789
static const uint8_t *decode_tiles(VP9D_COMP *pbi, const uint8_t *data) {
  vp9_reader residual_bc;

790
  VP9_COMMON *const cm = &pbi->common;
791

792
  const uint8_t *const data_end = pbi->source + pbi->source_sz;
793
794
795
  const int aligned_mi_cols = mi_cols_aligned_to_sb(cm->mi_cols);
  const int tile_cols = 1 << cm->log2_tile_cols;
  const int tile_rows = 1 << cm->log2_tile_rows;
796
  int tile_row, tile_col;
797

798
799
  // Note: this memset assumes above_context[0], [1] and [2]
  // are allocated as part of the same buffer.
800
  vpx_memset(cm->above_context[0], 0,
801
             sizeof(ENTROPY_CONTEXT) * MAX_MB_PLANE * (2 * aligned_mi_cols));
802

803
  vpx_memset(cm->above_seg_context, 0,
804
             sizeof(PARTITION_CONTEXT) * aligned_mi_cols);
805
806
807

  if (pbi->oxcf.inv_tile_order) {
    const uint8_t *data_ptr2[4][1 << 6];
808
    vp9_reader bc_bak = {0};
809
810

    // pre-initialize the offsets, we're going to read in inverse order
811
    data_ptr2[0][0] = data;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
812
    for (tile_row = 0; tile_row < tile_rows; tile_row++) {
813
      if (tile_row) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
814
815
816
        const int size = read_be32(data_ptr2[tile_row - 1][tile_cols - 1]);
        data_ptr2[tile_row - 1][tile_cols - 1] += 4;
        data_ptr2[tile_row][0] = data_ptr2[tile_row - 1][tile_cols - 1] + size;
817
818
      }

Dmitry Kovalev's avatar
Dmitry Kovalev committed
819
      for (tile_col = 1; tile_col < tile_cols; tile_col++) {
820
        const int size = read_be32(data_ptr2[tile_row][tile_col - 1]);
821
822
823
824
825
826
        data_ptr2[tile_row][tile_col - 1] += 4;
        data_ptr2[tile_row][tile_col] =
            data_ptr2[tile_row][tile_col - 1] + size;
      }
    }

Dmitry Kovalev's avatar
Dmitry Kovalev committed
827
    for (tile_row = 0; tile_row < tile_rows; tile_row++) {
828
      vp9_get_tile_row_offsets(cm, tile_row);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
829
      for (tile_col = tile_cols - 1; tile_col >= 0; tile_col--) {
830
        vp9_get_tile_col_offsets(cm, tile_col);
831
        setup_token_decoder(data_ptr2[tile_row][tile_col], data_end,
832
                            data_end - data_ptr2[tile_row][tile_col],
833
                            &cm->error, &residual_bc);
834
        decode_tile(pbi, &residual_bc, tile_col);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
835
        if (tile_row == tile_rows - 1 && tile_col == tile_cols - 1)
836
          bc_bak = residual_bc;
837
838
      }
    }
839
    residual_bc = bc_bak;
840
841
842
  } else {
    int has_more;

Dmitry Kovalev's avatar
Dmitry Kovalev committed
843
    for (tile_row = 0; tile_row < tile_rows; tile_row++) {
844
      vp9_get_tile_row_offsets(cm, tile_row);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
845
      for (tile_col = 0; tile_col < tile_cols; tile_col++) {
846
847
        size_t size;

848
        vp9_get_tile_col_offsets(cm, tile_col);
849

Dmitry Kovalev's avatar
Dmitry Kovalev committed
850
        has_more = tile_col < tile_cols - 1 || tile_row < tile_rows - 1;
851
        if (has_more) {