vp9_decodeframe.c 56.3 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
#include <assert.h>
12
#include <stdlib.h>  // qsort()
John Koleszar's avatar
John Koleszar committed
13

14
#include "./vp9_rtcd.h"
15
16
#include "./vpx_scale_rtcd.h"

17
#include "vpx_mem/vpx_mem.h"
18
#include "vpx_ports/mem_ops.h"
19
20
#include "vpx_scale/vpx_scale.h"

Dmitry Kovalev's avatar
Dmitry Kovalev committed
21
#include "vp9/common/vp9_alloccommon.h"
Ronald S. Bultje's avatar
Ronald S. Bultje committed
22
#include "vp9/common/vp9_common.h"
Yaowu Xu's avatar
Yaowu Xu committed
23
#include "vp9/common/vp9_entropy.h"
24
#include "vp9/common/vp9_entropymode.h"
25
#include "vp9/common/vp9_idct.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
26
#include "vp9/common/vp9_pred_common.h"
27
#include "vp9/common/vp9_quant_common.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
28
29
#include "vp9/common/vp9_reconintra.h"
#include "vp9/common/vp9_reconinter.h"
30
#include "vp9/common/vp9_seg_common.h"
hkuang's avatar
hkuang committed
31
#include "vp9/common/vp9_thread.h"
32
#include "vp9/common/vp9_tile_common.h"
33

Yaowu Xu's avatar
Yaowu Xu committed
34
#include "vp9/decoder/vp9_decodeframe.h"
35
36
#include "vp9/decoder/vp9_detokenize.h"
#include "vp9/decoder/vp9_decodemv.h"
37
#include "vp9/decoder/vp9_decoder.h"
38
#include "vp9/decoder/vp9_dsubexp.h"
39
#include "vp9/decoder/vp9_dthread.h"
40
#include "vp9/decoder/vp9_read_bit_buffer.h"
41
#include "vp9/decoder/vp9_reader.h"
42

43
44
#define MAX_VP9_HEADER_SIZE 80

45
static int is_compound_reference_allowed(const VP9_COMMON *cm) {
46
  int i;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
47
  for (i = 1; i < REFS_PER_FRAME; ++i)
48
    if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1])
49
50
51
52
53
      return 1;

  return 0;
}

54
static void setup_compound_reference_mode(VP9_COMMON *cm) {
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
  if (cm->ref_frame_sign_bias[LAST_FRAME] ==
          cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
    cm->comp_fixed_ref = ALTREF_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = GOLDEN_FRAME;
  } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
                 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
    cm->comp_fixed_ref = GOLDEN_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  } else {
    cm->comp_fixed_ref = LAST_FRAME;
    cm->comp_var_ref[0] = GOLDEN_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  }
}

72
static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
Johann's avatar
Johann committed
73
  return len != 0 && len <= (size_t)(end - start);
74
75
}

76
77
78
79
80
static int decode_unsigned_max(struct vp9_read_bit_buffer *rb, int max) {
  const int data = vp9_rb_read_literal(rb, get_unsigned_bits(max));
  return data > max ? max : data;
}

81
82
83
84
85
static TX_MODE read_tx_mode(vp9_reader *r) {
  TX_MODE tx_mode = vp9_read_literal(r, 2);
  if (tx_mode == ALLOW_32X32)
    tx_mode += vp9_read_bit(r);
  return tx_mode;
86
87
}

88
static void read_tx_mode_probs(struct tx_probs *tx_probs, vp9_reader *r) {
89
90
91
  int i, j;

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
92
    for (j = 0; j < TX_SIZES - 3; ++j)
93
      vp9_diff_update_prob(r, &tx_probs->p8x8[i][j]);
94
95

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
96
    for (j = 0; j < TX_SIZES - 2; ++j)
97
      vp9_diff_update_prob(r, &tx_probs->p16x16[i][j]);
98
99

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
100
    for (j = 0; j < TX_SIZES - 1; ++j)
101
      vp9_diff_update_prob(r, &tx_probs->p32x32[i][j]);
John Koleszar's avatar
John Koleszar committed
102
103
}

104
105
static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
106
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
107
108
109
110
111
112
113
114
115
116
117
    for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
      vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
}

static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
    for (j = 0; j < INTER_MODES - 1; ++j)
      vp9_diff_update_prob(r, &fc->inter_mode_probs[i][j]);
}

118
119
static REFERENCE_MODE read_frame_reference_mode(const VP9_COMMON *cm,
                                                vp9_reader *r) {
120
  if (is_compound_reference_allowed(cm)) {
121
122
123
    return vp9_read_bit(r) ? (vp9_read_bit(r) ? REFERENCE_MODE_SELECT
                                              : COMPOUND_REFERENCE)
                           : SINGLE_REFERENCE;
124
125
126
  } else {
    return SINGLE_REFERENCE;
  }
127
128
}

129
130
static void read_frame_reference_mode_probs(VP9_COMMON *cm, vp9_reader *r) {
  FRAME_CONTEXT *const fc = &cm->fc;
131
  int i;
132

133
  if (cm->reference_mode == REFERENCE_MODE_SELECT)
134
135
    for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
      vp9_diff_update_prob(r, &fc->comp_inter_prob[i]);
136

137
  if (cm->reference_mode != COMPOUND_REFERENCE)
138
139
140
    for (i = 0; i < REF_CONTEXTS; ++i) {
      vp9_diff_update_prob(r, &fc->single_ref_prob[i][0]);
      vp9_diff_update_prob(r, &fc->single_ref_prob[i][1]);
141
142
    }

143
  if (cm->reference_mode != SINGLE_REFERENCE)
144
145
    for (i = 0; i < REF_CONTEXTS; ++i)
      vp9_diff_update_prob(r, &fc->comp_ref_prob[i]);
146
147
}

148
149
150
static void update_mv_probs(vp9_prob *p, int n, vp9_reader *r) {
  int i;
  for (i = 0; i < n; ++i)
151
    if (vp9_read(r, MV_UPDATE_PROB))
Dmitry Kovalev's avatar
Dmitry Kovalev committed
152
      p[i] = (vp9_read_literal(r, 7) << 1) | 1;
153
154
}

155
156
static void read_mv_probs(nmv_context *ctx, int allow_hp, vp9_reader *r) {
  int i, j;
157

158
  update_mv_probs(ctx->joints, MV_JOINTS - 1, r);
159
160

  for (i = 0; i < 2; ++i) {
161
162
163
164
165
    nmv_component *const comp_ctx = &ctx->comps[i];
    update_mv_probs(&comp_ctx->sign, 1, r);
    update_mv_probs(comp_ctx->classes, MV_CLASSES - 1, r);
    update_mv_probs(comp_ctx->class0, CLASS0_SIZE - 1, r);
    update_mv_probs(comp_ctx->bits, MV_OFFSET_BITS, r);
166
167
168
  }

  for (i = 0; i < 2; ++i) {
169
    nmv_component *const comp_ctx = &ctx->comps[i];
170
    for (j = 0; j < CLASS0_SIZE; ++j)
Dmitry Kovalev's avatar
Dmitry Kovalev committed
171
      update_mv_probs(comp_ctx->class0_fp[j], MV_FP_SIZE - 1, r);
172
    update_mv_probs(comp_ctx->fp, 3, r);
173
174
175
176
  }

  if (allow_hp) {
    for (i = 0; i < 2; ++i) {
177
178
179
      nmv_component *const comp_ctx = &ctx->comps[i];
      update_mv_probs(&comp_ctx->class0_hp, 1, r);
      update_mv_probs(&comp_ctx->hp, 1, r);
180
181
182
183
    }
  }
}

184
static void setup_plane_dequants(VP9_COMMON *cm, MACROBLOCKD *xd, int q_index) {
John Koleszar's avatar
John Koleszar committed
185
  int i;
186
  xd->plane[0].dequant = cm->y_dequant[q_index];
187

188
  for (i = 1; i < MAX_MB_PLANE; i++)
189
    xd->plane[i].dequant = cm->uv_dequant[q_index];
John Koleszar's avatar
John Koleszar committed
190
191
}

192
static void inverse_transform_block(MACROBLOCKD* xd, int plane, int block,
193
194
                                    TX_SIZE tx_size, uint8_t *dst, int stride,
                                    int eob) {
195
  struct macroblockd_plane *const pd = &xd->plane[plane];
196
  if (eob > 0) {
197
    TX_TYPE tx_type = DCT_DCT;
198
    tran_low_t *const dqcoeff = BLOCK_OFFSET(pd->dqcoeff, block);
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
#if CONFIG_VP9_HIGHBITDEPTH
    if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
      if (xd->lossless) {
        tx_type = DCT_DCT;
        vp9_high_iwht4x4_add(dqcoeff, dst, stride, eob, xd->bd);
      } else {
        const PLANE_TYPE plane_type = pd->plane_type;
        switch (tx_size) {
          case TX_4X4:
            tx_type = get_tx_type_4x4(plane_type, xd, block);
            vp9_high_iht4x4_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
            break;
          case TX_8X8:
            tx_type = get_tx_type(plane_type, xd);
            vp9_high_iht8x8_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
            break;
          case TX_16X16:
            tx_type = get_tx_type(plane_type, xd);
            vp9_high_iht16x16_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
            break;
          case TX_32X32:
            tx_type = DCT_DCT;
            vp9_high_idct32x32_add(dqcoeff, dst, stride, eob, xd->bd);
            break;
          default:
            assert(0 && "Invalid transform size");
        }
      }
    } else {
      if (xd->lossless) {
        tx_type = DCT_DCT;
        vp9_iwht4x4_add(dqcoeff, dst, stride, eob);
      } else {
        const PLANE_TYPE plane_type = pd->plane_type;
        switch (tx_size) {
          case TX_4X4:
            tx_type = get_tx_type_4x4(plane_type, xd, block);
            vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_8X8:
            tx_type = get_tx_type(plane_type, xd);
            vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_16X16:
            tx_type = get_tx_type(plane_type, xd);
            vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_32X32:
            tx_type = DCT_DCT;
            vp9_idct32x32_add(dqcoeff, dst, stride, eob);
            break;
          default:
            assert(0 && "Invalid transform size");
            return;
        }
      }
    }
#else
257
258
259
260
261
262
263
264
    if (xd->lossless) {
      tx_type = DCT_DCT;
      vp9_iwht4x4_add(dqcoeff, dst, stride, eob);
    } else {
      const PLANE_TYPE plane_type = pd->plane_type;
      switch (tx_size) {
        case TX_4X4:
          tx_type = get_tx_type_4x4(plane_type, xd, block);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
265
          vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob);
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
          break;
        case TX_8X8:
          tx_type = get_tx_type(plane_type, xd);
          vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob);
          break;
        case TX_16X16:
          tx_type = get_tx_type(plane_type, xd);
          vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob);
          break;
        case TX_32X32:
          tx_type = DCT_DCT;
          vp9_idct32x32_add(dqcoeff, dst, stride, eob);
          break;
        default:
          assert(0 && "Invalid transform size");
281
          return;
282
      }
283
    }
284
#endif  // CONFIG_VP9_HIGHBITDEPTH
285
286

    if (eob == 1) {
287
      vpx_memset(dqcoeff, 0, 2 * sizeof(dqcoeff[0]));
288
    } else {
289
      if (tx_type == DCT_DCT && tx_size <= TX_16X16 && eob <= 10)
290
        vpx_memset(dqcoeff, 0, 4 * (4 << tx_size) * sizeof(dqcoeff[0]));
291
      else if (tx_size == TX_32X32 && eob <= 34)
292
        vpx_memset(dqcoeff, 0, 256 * sizeof(dqcoeff[0]));
293
      else
294
        vpx_memset(dqcoeff, 0, (16 << (tx_size << 1)) * sizeof(dqcoeff[0]));
295
    }
296
297
298
  }
}

299
300
301
302
303
304
305
306
307
struct intra_args {
  VP9_COMMON *cm;
  MACROBLOCKD *xd;
  vp9_reader *r;
};

static void predict_and_reconstruct_intra_block(int plane, int block,
                                                BLOCK_SIZE plane_bsize,
                                                TX_SIZE tx_size, void *arg) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
308
  struct intra_args *const args = (struct intra_args *)arg;
309
310
  VP9_COMMON *const cm = args->cm;
  MACROBLOCKD *const xd = args->xd;
311
  struct macroblockd_plane *const pd = &xd->plane[plane];
hkuang's avatar
hkuang committed
312
  MODE_INFO *const mi = xd->mi[0].src_mi;
313
314
  const PREDICTION_MODE mode = (plane == 0) ? get_y_mode(mi, block)
                                            : mi->mbmi.uv_mode;
315
316
317
318
  int x, y;
  uint8_t *dst;
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y);
  dst = &pd->dst.buf[4 * y * pd->dst.stride + 4 * x];
319

320
  vp9_predict_intra_block(xd, block >> (tx_size << 1),
321
                          b_width_log2(plane_bsize), tx_size, mode,
322
323
                          dst, pd->dst.stride, dst, pd->dst.stride,
                          x, y, plane);
324

325
  if (!mi->mbmi.skip) {
326
327
328
329
330
    const int eob = vp9_decode_block_tokens(cm, xd, plane, block,
                                            plane_bsize, x, y, tx_size,
                                            args->r);
    inverse_transform_block(xd, plane, block, tx_size, dst, pd->dst.stride,
                            eob);
331
  }
332
333
}

334
335
336
337
338
339
340
341
342
343
struct inter_args {
  VP9_COMMON *cm;
  MACROBLOCKD *xd;
  vp9_reader *r;
  int *eobtotal;
};

static void reconstruct_inter_block(int plane, int block,
                                    BLOCK_SIZE plane_bsize,
                                    TX_SIZE tx_size, void *arg) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
344
  struct inter_args *args = (struct inter_args *)arg;
345
346
  VP9_COMMON *const cm = args->cm;
  MACROBLOCKD *const xd = args->xd;
347
  struct macroblockd_plane *const pd = &xd->plane[plane];
348
  int x, y, eob;
349
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y);
350
351
  eob = vp9_decode_block_tokens(cm, xd, plane, block, plane_bsize, x, y,
                                tx_size, args->r);
352
353
  inverse_transform_block(xd, plane, block, tx_size,
                          &pd->dst.buf[4 * y * pd->dst.stride + 4 * x],
354
355
                          pd->dst.stride, eob);
  *args->eobtotal += eob;
356
357
}

358
359
360
static MB_MODE_INFO *set_offsets(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                                 const TileInfo *const tile,
                                 BLOCK_SIZE bsize, int mi_row, int mi_col) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
361
  const int bw = num_8x8_blocks_wide_lookup[bsize];
362
363
364
  const int bh = num_8x8_blocks_high_lookup[bsize];
  const int x_mis = MIN(bw, cm->mi_cols - mi_col);
  const int y_mis = MIN(bh, cm->mi_rows - mi_row);
365
  const int offset = mi_row * cm->mi_stride + mi_col;
366
  int x, y;
367

hkuang's avatar
hkuang committed
368
369
370
371
  xd->mi = cm->mi + offset;
  xd->mi[0].src_mi = &xd->mi[0];  // Point to self.
  xd->mi[0].mbmi.sb_type = bsize;

372
  for (y = 0; y < y_mis; ++y)
hkuang's avatar
hkuang committed
373
374
375
    for (x = !y; x < x_mis; ++x) {
      xd->mi[y * cm->mi_stride + x].src_mi = &xd->mi[0];
    }
376

377
  set_skip_context(xd, mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
378

379
380
  // Distance of Mb to the various image edges. These are specified to 8th pel
  // as they are always compared to values that are in 1/8th pel units
James Zern's avatar
James Zern committed
381
  set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
382

383
  vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col);
hkuang's avatar
hkuang committed
384
  return &xd->mi[0].mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
385
}
John Koleszar's avatar
John Koleszar committed
386

387
388
static void set_ref(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                    int idx, int mi_row, int mi_col) {
hkuang's avatar
hkuang committed
389
  MB_MODE_INFO *const mbmi = &xd->mi[0].src_mi->mbmi;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
390
391
392
  RefBuffer *ref_buffer = &cm->frame_refs[mbmi->ref_frame[idx] - LAST_FRAME];
  xd->block_refs[idx] = ref_buffer;
  if (!vp9_is_valid_scale(&ref_buffer->sf))
393
394
    vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
                       "Invalid scale factors");
395
396
397
  if (ref_buffer->buf->corrupted)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Block reference is corrupt");
398
399
  vp9_setup_pre_planes(xd, idx, ref_buffer->buf, mi_row, mi_col,
                       &ref_buffer->sf);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
400
  xd->corrupted |= ref_buffer->buf->corrupted;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
401
}
John Koleszar's avatar
John Koleszar committed
402

403
404
405
406
static void decode_block(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                         const TileInfo *const tile,
                         int mi_row, int mi_col,
                         vp9_reader *r, BLOCK_SIZE bsize) {
407
  const int less8x8 = bsize < BLOCK_8X8;
408
  MB_MODE_INFO *mbmi = set_offsets(cm, xd, tile, bsize, mi_row, mi_col);
James Zern's avatar
James Zern committed
409
  vp9_read_mode_info(cm, xd, tile, mi_row, mi_col, r);
410

411
  if (less8x8)
412
    bsize = BLOCK_8X8;
413

414
  if (mbmi->skip) {
415
    reset_skip_context(xd, bsize);
416
  } else {
417
418
419
420
    if (cm->seg.enabled)
      setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
                                                  cm->base_qindex));
  }
421

422
  if (!is_inter_block(mbmi)) {
423
    struct intra_args arg = { cm, xd, r };
424
425
    vp9_foreach_transformed_block(xd, bsize,
                                  predict_and_reconstruct_intra_block, &arg);
426
427
  } else {
    // Setup
428
    set_ref(cm, xd, 0, mi_row, mi_col);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
429
    if (has_second_ref(mbmi))
430
      set_ref(cm, xd, 1, mi_row, mi_col);
431

432
    // Prediction
433
    vp9_dec_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);
434

435
    // Reconstruction
436
    if (!mbmi->skip) {
437
      int eobtotal = 0;
438
      struct inter_args arg = { cm, xd, r, &eobtotal };
439
      vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
440
      if (!less8x8 && eobtotal == 0)
441
        mbmi->skip = 1;  // skip loopfilter
442
    }
443
  }
444

445
  xd->corrupted |= vp9_reader_has_error(r);
446
447
}

448
449
static PARTITION_TYPE read_partition(VP9_COMMON *cm, MACROBLOCKD *xd, int hbs,
                                     int mi_row, int mi_col, BLOCK_SIZE bsize,
450
                                     vp9_reader *r) {
451
  const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
452
453
454
455
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
  PARTITION_TYPE p;
456
457

  if (has_rows && has_cols)
Dmitry Kovalev's avatar
Dmitry Kovalev committed
458
    p = (PARTITION_TYPE)vp9_read_tree(r, vp9_partition_tree, probs);
459
  else if (!has_rows && has_cols)
460
    p = vp9_read(r, probs[1]) ? PARTITION_SPLIT : PARTITION_HORZ;
461
  else if (has_rows && !has_cols)
462
    p = vp9_read(r, probs[2]) ? PARTITION_SPLIT : PARTITION_VERT;
463
  else
464
465
466
467
468
469
    p = PARTITION_SPLIT;

  if (!cm->frame_parallel_decoding_mode)
    ++cm->counts.partition[ctx][p];

  return p;
470
471
}

472
473
474
475
static void decode_partition(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                             const TileInfo *const tile,
                             int mi_row, int mi_col,
                             vp9_reader* r, BLOCK_SIZE bsize) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
476
  const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
477
  PARTITION_TYPE partition;
478
  BLOCK_SIZE subsize, uv_subsize;
479

480
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
481
482
    return;

483
  partition = read_partition(cm, xd, hbs, mi_row, mi_col, bsize, r);
484
  subsize = get_subsize(bsize, partition);
485
486
487
488
  uv_subsize = ss_size_lookup[subsize][cm->subsampling_x][cm->subsampling_y];
  if (subsize >= BLOCK_8X8 && uv_subsize == BLOCK_INVALID)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Invalid block size.");
489
  if (subsize < BLOCK_8X8) {
490
    decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
491
492
493
  } else {
    switch (partition) {
      case PARTITION_NONE:
494
        decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
495
496
        break;
      case PARTITION_HORZ:
497
        decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
498
        if (mi_row + hbs < cm->mi_rows)
499
          decode_block(cm, xd, tile, mi_row + hbs, mi_col, r, subsize);
500
501
        break;
      case PARTITION_VERT:
502
        decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
503
        if (mi_col + hbs < cm->mi_cols)
504
          decode_block(cm, xd, tile, mi_row, mi_col + hbs, r, subsize);
505
506
        break;
      case PARTITION_SPLIT:
507
508
509
510
        decode_partition(cm, xd, tile, mi_row,       mi_col,       r, subsize);
        decode_partition(cm, xd, tile, mi_row,       mi_col + hbs, r, subsize);
        decode_partition(cm, xd, tile, mi_row + hbs, mi_col,       r, subsize);
        decode_partition(cm, xd, tile, mi_row + hbs, mi_col + hbs, r, subsize);
511
512
        break;
      default:
James Zern's avatar
James Zern committed
513
        assert(0 && "Invalid partition type");
514
    }
515
  }
516

517
  // update partition context
518
  if (bsize >= BLOCK_8X8 &&
519
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
520
    update_partition_context(xd, mi_row, mi_col, subsize, bsize);
521
522
}

523
524
525
526
static void setup_token_decoder(const uint8_t *data,
                                const uint8_t *data_end,
                                size_t read_size,
                                struct vpx_internal_error_info *error_info,
527
528
529
                                vp9_reader *r,
                                vpx_decrypt_cb decrypt_cb,
                                void *decrypt_state) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
530
531
532
  // Validate the calculated partition length. If the buffer
  // described by the partition can't be fully read, then restrict
  // it to the portion that can be (for EC mode) or throw an error.
533
  if (!read_is_valid(data, read_size, data_end))
534
    vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
535
                       "Truncated packet or corrupt tile length");
John Koleszar's avatar
John Koleszar committed
536

537
  if (vp9_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
538
    vpx_internal_error(error_info, VPX_CODEC_MEM_ERROR,
John Koleszar's avatar
John Koleszar committed
539
                       "Failed to allocate bool decoder %d", 1);
John Koleszar's avatar
John Koleszar committed
540
541
}

542
static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs,
543
                                   vp9_reader *r) {
544
545
546
  int i, j, k, l, m;

  if (vp9_read_bit(r))
547
    for (i = 0; i < PLANE_TYPES; ++i)
548
549
550
551
552
      for (j = 0; j < REF_TYPES; ++j)
        for (k = 0; k < COEF_BANDS; ++k)
          for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l)
            for (m = 0; m < UNCONSTRAINED_NODES; ++m)
              vp9_diff_update_prob(r, &coef_probs[i][j][k][l][m]);
553
}
554

555
static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode,
556
                            vp9_reader *r) {
Yaowu Xu's avatar
Yaowu Xu committed
557
558
559
560
    const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
    TX_SIZE tx_size;
    for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
      read_coef_probs_common(fc->coef_probs[tx_size], r);
561
562
}

563
564
static void setup_segmentation(struct segmentation *seg,
                               struct vp9_read_bit_buffer *rb) {
565
566
  int i, j;

567
568
  seg->update_map = 0;
  seg->update_data = 0;
569

570
571
  seg->enabled = vp9_rb_read_bit(rb);
  if (!seg->enabled)
572
573
574
    return;

  // Segmentation map update
575
576
  seg->update_map = vp9_rb_read_bit(rb);
  if (seg->update_map) {
Paul Wilkins's avatar
Paul Wilkins committed
577
    for (i = 0; i < SEG_TREE_PROBS; i++)
578
579
      seg->tree_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                               : MAX_PROB;
580

581
582
    seg->temporal_update = vp9_rb_read_bit(rb);
    if (seg->temporal_update) {
583
      for (i = 0; i < PREDICTION_PROBS; i++)
584
585
        seg->pred_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                                 : MAX_PROB;
586
587
    } else {
      for (i = 0; i < PREDICTION_PROBS; i++)
588
        seg->pred_probs[i] = MAX_PROB;
589
    }
590
  }
591

592
  // Segmentation data update
593
594
595
  seg->update_data = vp9_rb_read_bit(rb);
  if (seg->update_data) {
    seg->abs_delta = vp9_rb_read_bit(rb);
596

597
    vp9_clearall_segfeatures(seg);
598

Paul Wilkins's avatar
Paul Wilkins committed
599
    for (i = 0; i < MAX_SEGMENTS; i++) {
600
601
      for (j = 0; j < SEG_LVL_MAX; j++) {
        int data = 0;
602
        const int feature_enabled = vp9_rb_read_bit(rb);
603
        if (feature_enabled) {
604
          vp9_enable_segfeature(seg, i, j);
605
          data = decode_unsigned_max(rb, vp9_seg_feature_data_max(j));
606
          if (vp9_is_segfeature_signed(j))
607
            data = vp9_rb_read_bit(rb) ? -data : data;
608
        }
609
        vp9_set_segdata(seg, i, j, data);
610
611
612
613
614
      }
    }
  }
}

615
616
617
618
static void setup_loopfilter(struct loopfilter *lf,
                             struct vp9_read_bit_buffer *rb) {
  lf->filter_level = vp9_rb_read_literal(rb, 6);
  lf->sharpness_level = vp9_rb_read_literal(rb, 3);
619
620
621

  // Read in loop filter deltas applied at the MB level based on mode or ref
  // frame.
622
  lf->mode_ref_delta_update = 0;
623

624
625
626
627
  lf->mode_ref_delta_enabled = vp9_rb_read_bit(rb);
  if (lf->mode_ref_delta_enabled) {
    lf->mode_ref_delta_update = vp9_rb_read_bit(rb);
    if (lf->mode_ref_delta_update) {
628
629
      int i;

630
631
      for (i = 0; i < MAX_REF_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
632
          lf->ref_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
633

634
635
      for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
636
          lf->mode_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
637
638
639
640
    }
  }
}

641
642
static int read_delta_q(struct vp9_read_bit_buffer *rb, int *delta_q) {
  const int old = *delta_q;
643
  *delta_q = vp9_rb_read_bit(rb) ? vp9_rb_read_signed_literal(rb, 4) : 0;
644
645
  return old != *delta_q;
}
646

647
648
static void setup_quantization(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                               struct vp9_read_bit_buffer *rb) {
649
  int update = 0;
650

651
652
653
654
  cm->base_qindex = vp9_rb_read_literal(rb, QINDEX_BITS);
  update |= read_delta_q(rb, &cm->y_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_ac_delta_q);
655
  if (update || cm->bit_depth != cm->dequant_bit_depth) {
656
    vp9_init_dequantizer(cm);
657
658
    cm->dequant_bit_depth = cm->bit_depth;
  }
659
660
661
662
663

  xd->lossless = cm->base_qindex == 0 &&
                 cm->y_dc_delta_q == 0 &&
                 cm->uv_dc_delta_q == 0 &&
                 cm->uv_ac_delta_q == 0;
664
665
666
#if CONFIG_VP9_HIGHBITDEPTH
  xd->bd = (int)cm->bit_depth;
#endif
667
668
}

669
670
671
672
673
static INTERP_FILTER read_interp_filter(struct vp9_read_bit_buffer *rb) {
  const INTERP_FILTER literal_to_filter[] = { EIGHTTAP_SMOOTH,
                                              EIGHTTAP,
                                              EIGHTTAP_SHARP,
                                              BILINEAR };
674
  return vp9_rb_read_bit(rb) ? SWITCHABLE
675
                             : literal_to_filter[vp9_rb_read_literal(rb, 2)];
676
677
}

678
679
void vp9_read_frame_size(struct vp9_read_bit_buffer *rb,
                         int *width, int *height) {
680
681
  const int w = vp9_rb_read_literal(rb, 16) + 1;
  const int h = vp9_rb_read_literal(rb, 16) + 1;
682
683
  *width = w;
  *height = h;
684
685
}

686
static void setup_display_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
687
688
689
  cm->display_width = cm->width;
  cm->display_height = cm->height;
  if (vp9_rb_read_bit(rb))
690
    vp9_read_frame_size(rb, &cm->display_width, &cm->display_height);
691
}
692

Adrian Grange's avatar
Adrian Grange committed
693
static void resize_context_buffers(VP9_COMMON *cm, int width, int height) {
694
695
696
697
698
#if CONFIG_SIZE_LIMIT
  if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Width and height beyond allowed size.");
#endif
699
  if (cm->width != width || cm->height != height) {
700
    const int new_mi_rows =
701
        ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
702
    const int new_mi_cols =
703
704
705
706
707
        ALIGN_POWER_OF_TWO(width,  MI_SIZE_LOG2) >> MI_SIZE_LOG2;

    // Allocations in vp9_alloc_context_buffers() depend on individual
    // dimensions as well as the overall size.
    if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Adrian Grange's avatar
Adrian Grange committed
708
      if (vp9_alloc_context_buffers(cm, width, height))
709
        vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
710
                           "Failed to allocate context buffers");
Adrian Grange's avatar
Adrian Grange committed
711
712
    } else {
      vp9_set_mb_mi(cm, width, height);
713
    }
Adrian Grange's avatar
Adrian Grange committed
714
    vp9_init_context_buffers(cm);
715
716
    cm->width = width;
    cm->height = height;
717
  }
Adrian Grange's avatar
Adrian Grange committed
718
719
720
721
722
723
724
}

static void setup_frame_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
  int width, height;
  vp9_read_frame_size(rb, &width, &height);
  resize_context_buffers(cm, width, height);
  setup_display_size(cm, rb);
725

726
727
  if (vp9_realloc_frame_buffer(
          get_frame_new_buffer(cm), cm->width, cm->height,
728
729
730
731
732
          cm->subsampling_x, cm->subsampling_y,
#if CONFIG_VP9_HIGHBITDEPTH
          cm->use_highbitdepth,
#endif
          VP9_DEC_BORDER_IN_PIXELS,
733
734
735
736
737
          &cm->frame_bufs[cm->new_fb_idx].raw_frame_buffer, cm->get_fb_cb,
          cm->cb_priv)) {
    vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
                       "Failed to allocate frame buffer");
  }
738
739
  cm->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
  cm->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
740
741
742
743
744
745
746
747
748
  cm->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
}

static INLINE int valid_ref_frame_img_fmt(vpx_bit_depth_t ref_bit_depth,
                                          int ref_xss, int ref_yss,
                                          vpx_bit_depth_t this_bit_depth,
                                          int this_xss, int this_yss) {
  return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
         ref_yss == this_yss;
749
750
}

751
static void setup_frame_size_with_refs(VP9_COMMON *cm,
752
753
754
                                       struct vp9_read_bit_buffer *rb) {
  int width, height;
  int found = 0, i;
755
  int has_valid_ref_frame = 0;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
756
  for (i = 0; i < REFS_PER_FRAME; ++i) {
757
    if (vp9_rb_read_bit(rb)) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
758
759
760
      YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
      width = buf->y_crop_width;
      height = buf->y_crop_height;
761
762
763
764
      if (buf->corrupted) {
        vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                           "Frame reference is corrupt");
      }
765
766
767
768
769
770
      found = 1;
      break;
    }
  }

  if (!found)
771
    vp9_read_frame_size(rb, &width, &height);
772

773
774
775
776
777
778
  if (width <=0 || height <= 0)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Invalid frame size");

  // Check to make sure at least one of frames that this frame references
  // has valid dimensions.
779
780
  for (i = 0; i < REFS_PER_FRAME; ++i) {
    RefBuffer *const ref_frame = &cm->frame_refs[i];
781
782
    has_valid_ref_frame |= valid_ref_frame_size(ref_frame->buf->y_crop_width,
                                                ref_frame->buf->y_crop_height,
783
                                                width, height);
784
  }
785
786
787
  if (!has_valid_ref_frame)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Referenced frame has invalid size");
788
789
790
791
  for (i = 0; i < REFS_PER_FRAME; ++i) {
    RefBuffer *const ref_frame = &cm->frame_refs[i];
    if (!valid_ref_frame_img_fmt(
            ref_frame->buf->bit_depth,
792
793
            ref_frame->buf->subsampling_x,
            ref_frame->buf->subsampling_y,
794
795
796
797
798
799
            cm->bit_depth,
            cm->subsampling_x,
            cm->subsampling_y))
      vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                         "Referenced frame has incompatible color space");
  }
800

Adrian Grange's avatar
Adrian Grange committed
801
  resize_context_buffers(cm, width, height);
802
  setup_display_size(cm, rb);
Adrian Grange's avatar
Adrian Grange committed
803
804
805

  if (vp9_realloc_frame_buffer(
          get_frame_new_buffer(cm), cm->width, cm->height,
806
807
808
809
810
          cm->subsampling_x, cm->subsampling_y,
#if CONFIG_VP9_HIGHBITDEPTH
          cm->use_highbitdepth,
#endif
          VP9_DEC_BORDER_IN_PIXELS,
Adrian Grange's avatar
Adrian Grange committed
811
812
813
814
815
          &cm->frame_bufs[cm->new_fb_idx].raw_frame_buffer, cm->get_fb_cb,
          cm->cb_priv)) {
    vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
                       "Failed to allocate frame buffer");
  }
816
817
  cm->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
  cm->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
818
  cm->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
819
820
}

821
static void setup_tile_info(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
822
823
  int min_log2_tile_cols, max_log2_tile_cols, max_ones;
  vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
824

Dmitry Kovalev's avatar
Dmitry Kovalev committed
825
826
827
828
829
  // columns
  max_ones = max_log2_tile_cols - min_log2_tile_cols;
  cm->log2_tile_cols = min_log2_tile_cols;
  while (max_ones-- && vp9_rb_read_bit(rb))
    cm->log2_tile_cols++;
830

James Zern's avatar
James Zern committed
831
832
833
834
  if (cm->log2_tile_cols > 6)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Invalid number of tile columns");

Dmitry Kovalev's avatar
Dmitry Kovalev committed
835
  // rows
836
837
838
839
840
  cm->log2_tile_rows = vp9_rb_read_bit(rb);
  if (cm->log2_tile_rows)
    cm->log2_tile_rows += vp9_rb_read_bit(rb);
}

841
842
843
844
845
846
typedef struct TileBuffer {
  const uint8_t *data;
  size_t size;
  int col;  // only used with multi-threaded decoding
} TileBuffer;

James Zern's avatar
James Zern committed
847
848
// Reads the next tile returning its size and adjusting '*data' accordingly
// based on 'is_last'.
849
850
851
852
853
854
static void get_tile_buffer(const uint8_t *const data_end,
                            int is_last,
                            struct vpx_internal_error_info *error_info,
                            const uint8_t **data,
                            vpx_decrypt_cb decrypt_cb, void *decrypt_state,
                            TileBuffer *buf) {
James Zern's avatar
James Zern committed
855
856
857
858
859
  size_t size;

  if (!is_last) {
    if (!read_is_valid(*data, 4, data_end))
      vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
Johann's avatar
Johann committed
860
                         "Truncated packet or corrupt tile length");
James Zern's avatar
James Zern committed
861

862
863
864
865
866
867
868
    if (decrypt_cb) {
      uint8_t be_data[4];
      decrypt_cb(decrypt_state, *data, be_data, 4);
      size = mem_get_be32(be_data);
    } else {
      size = mem_get_be32(*data);
    }
James Zern's avatar
James Zern committed
869
    *data += 4;
Johann's avatar
Johann committed
870

Johann's avatar
Johann committed
871
    if (size > (size_t)(data_end - *data))
Johann's avatar
Johann committed
872
873
      vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
                         "Truncated packet or corrupt tile size");
James Zern's avatar
James Zern committed
874
875
876
  } else {
    size = data_end - *data;
  }
877
878
879
880
881

  buf->data = *data;
  buf->size = size;

  *data += size;
James Zern's avatar
James Zern committed
882
883
}

884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
static void get_tile_buffers(VP9Decoder *pbi,
                             const uint8_t *data, const uint8_t *data_end,
                             int tile_cols, int tile_rows,
                             TileBuffer (*tile_buffers)[1 << 6]) {
  int r, c;

  for (r = 0; r < tile_rows; ++r) {
    for (c = 0; c < tile_cols; ++c) {
      const int is_last = (r == tile_rows - 1) && (c == tile_cols - 1);
      TileBuffer *const buf = &tile_buffers[r][c];
      buf->col = c;
      get_tile_buffer(data_end, is_last, &pbi->common.error, &data,
                      pbi->decrypt_cb, pbi->decrypt_state, buf);
    }
  }
}
900

901
static const uint8_t *decode_tiles(VP9Decoder *pbi,
902
                                   const uint8_t *data,
903
                                   const uint8_t *data_end) {