vp9_decodeframe.c 74.4 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5 6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9 10
 */

11
#include <assert.h>
12
#include <stdlib.h>  // qsort()
John Koleszar's avatar
John Koleszar committed
13

14
#include "./vp9_rtcd.h"
15 16
#include "./vpx_scale_rtcd.h"

17
#include "vpx_mem/vpx_mem.h"
18
#include "vpx_ports/mem.h"
19
#include "vpx_ports/mem_ops.h"
20 21
#include "vpx_scale/vpx_scale.h"

Dmitry Kovalev's avatar
Dmitry Kovalev committed
22
#include "vp9/common/vp9_alloccommon.h"
Ronald S. Bultje's avatar
Ronald S. Bultje committed
23
#include "vp9/common/vp9_common.h"
Yaowu Xu's avatar
Yaowu Xu committed
24
#include "vp9/common/vp9_entropy.h"
25
#include "vp9/common/vp9_entropymode.h"
26
#include "vp9/common/vp9_idct.h"
27
#include "vp9/common/vp9_thread_common.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
28
#include "vp9/common/vp9_pred_common.h"
29
#include "vp9/common/vp9_quant_common.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
30 31
#include "vp9/common/vp9_reconintra.h"
#include "vp9/common/vp9_reconinter.h"
32
#include "vp9/common/vp9_seg_common.h"
hkuang's avatar
hkuang committed
33
#include "vp9/common/vp9_thread.h"
34
#include "vp9/common/vp9_tile_common.h"
35

Yaowu Xu's avatar
Yaowu Xu committed
36
#include "vp9/decoder/vp9_decodeframe.h"
37 38
#include "vp9/decoder/vp9_detokenize.h"
#include "vp9/decoder/vp9_decodemv.h"
39
#include "vp9/decoder/vp9_decoder.h"
40
#include "vp9/decoder/vp9_dsubexp.h"
41
#include "vp9/decoder/vp9_read_bit_buffer.h"
42
#include "vp9/decoder/vp9_reader.h"
43

44 45
#define MAX_VP9_HEADER_SIZE 80

46
static int is_compound_reference_allowed(const VP9_COMMON *cm) {
47
  int i;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
48
  for (i = 1; i < REFS_PER_FRAME; ++i)
49
    if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1])
50 51 52 53 54
      return 1;

  return 0;
}

55
static void setup_compound_reference_mode(VP9_COMMON *cm) {
56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72
  if (cm->ref_frame_sign_bias[LAST_FRAME] ==
          cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
    cm->comp_fixed_ref = ALTREF_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = GOLDEN_FRAME;
  } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
                 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
    cm->comp_fixed_ref = GOLDEN_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  } else {
    cm->comp_fixed_ref = LAST_FRAME;
    cm->comp_var_ref[0] = GOLDEN_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  }
}

73
static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
Johann's avatar
Johann committed
74
  return len != 0 && len <= (size_t)(end - start);
75 76
}

77 78 79 80 81
static int decode_unsigned_max(struct vp9_read_bit_buffer *rb, int max) {
  const int data = vp9_rb_read_literal(rb, get_unsigned_bits(max));
  return data > max ? max : data;
}

82 83 84 85 86
static TX_MODE read_tx_mode(vp9_reader *r) {
  TX_MODE tx_mode = vp9_read_literal(r, 2);
  if (tx_mode == ALLOW_32X32)
    tx_mode += vp9_read_bit(r);
  return tx_mode;
87 88
}

89
static void read_tx_mode_probs(struct tx_probs *tx_probs, vp9_reader *r) {
90 91 92
  int i, j;

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
93
    for (j = 0; j < TX_SIZES - 3; ++j)
94
      vp9_diff_update_prob(r, &tx_probs->p8x8[i][j]);
95 96

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
97
    for (j = 0; j < TX_SIZES - 2; ++j)
98
      vp9_diff_update_prob(r, &tx_probs->p16x16[i][j]);
99 100

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
101
    for (j = 0; j < TX_SIZES - 1; ++j)
102
      vp9_diff_update_prob(r, &tx_probs->p32x32[i][j]);
John Koleszar's avatar
John Koleszar committed
103 104
}

105 106
static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
107
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
108 109 110 111 112 113 114 115 116 117 118
    for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
      vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
}

static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
    for (j = 0; j < INTER_MODES - 1; ++j)
      vp9_diff_update_prob(r, &fc->inter_mode_probs[i][j]);
}

119 120
static REFERENCE_MODE read_frame_reference_mode(const VP9_COMMON *cm,
                                                vp9_reader *r) {
121
  if (is_compound_reference_allowed(cm)) {
122 123 124
    return vp9_read_bit(r) ? (vp9_read_bit(r) ? REFERENCE_MODE_SELECT
                                              : COMPOUND_REFERENCE)
                           : SINGLE_REFERENCE;
125 126 127
  } else {
    return SINGLE_REFERENCE;
  }
128 129
}

130
static void read_frame_reference_mode_probs(VP9_COMMON *cm, vp9_reader *r) {
131
  FRAME_CONTEXT *const fc = cm->fc;
132
  int i;
133

134
  if (cm->reference_mode == REFERENCE_MODE_SELECT)
135 136
    for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
      vp9_diff_update_prob(r, &fc->comp_inter_prob[i]);
137

138
  if (cm->reference_mode != COMPOUND_REFERENCE)
139 140 141
    for (i = 0; i < REF_CONTEXTS; ++i) {
      vp9_diff_update_prob(r, &fc->single_ref_prob[i][0]);
      vp9_diff_update_prob(r, &fc->single_ref_prob[i][1]);
142 143
    }

144
  if (cm->reference_mode != SINGLE_REFERENCE)
145 146
    for (i = 0; i < REF_CONTEXTS; ++i)
      vp9_diff_update_prob(r, &fc->comp_ref_prob[i]);
147 148
}

149 150 151
static void update_mv_probs(vp9_prob *p, int n, vp9_reader *r) {
  int i;
  for (i = 0; i < n; ++i)
152
    if (vp9_read(r, MV_UPDATE_PROB))
Dmitry Kovalev's avatar
Dmitry Kovalev committed
153
      p[i] = (vp9_read_literal(r, 7) << 1) | 1;
154 155
}

156 157
static void read_mv_probs(nmv_context *ctx, int allow_hp, vp9_reader *r) {
  int i, j;
158

159
  update_mv_probs(ctx->joints, MV_JOINTS - 1, r);
160 161

  for (i = 0; i < 2; ++i) {
162 163 164 165 166
    nmv_component *const comp_ctx = &ctx->comps[i];
    update_mv_probs(&comp_ctx->sign, 1, r);
    update_mv_probs(comp_ctx->classes, MV_CLASSES - 1, r);
    update_mv_probs(comp_ctx->class0, CLASS0_SIZE - 1, r);
    update_mv_probs(comp_ctx->bits, MV_OFFSET_BITS, r);
167 168 169
  }

  for (i = 0; i < 2; ++i) {
170
    nmv_component *const comp_ctx = &ctx->comps[i];
171
    for (j = 0; j < CLASS0_SIZE; ++j)
Dmitry Kovalev's avatar
Dmitry Kovalev committed
172
      update_mv_probs(comp_ctx->class0_fp[j], MV_FP_SIZE - 1, r);
173
    update_mv_probs(comp_ctx->fp, 3, r);
174 175 176 177
  }

  if (allow_hp) {
    for (i = 0; i < 2; ++i) {
178 179 180
      nmv_component *const comp_ctx = &ctx->comps[i];
      update_mv_probs(&comp_ctx->class0_hp, 1, r);
      update_mv_probs(&comp_ctx->hp, 1, r);
181 182 183 184
    }
  }
}

185
static void inverse_transform_block(MACROBLOCKD* xd, int plane, int block,
186 187
                                    TX_SIZE tx_size, uint8_t *dst, int stride,
                                    int eob) {
188
  struct macroblockd_plane *const pd = &xd->plane[plane];
189
  if (eob > 0) {
190
    TX_TYPE tx_type = DCT_DCT;
191
    tran_low_t *const dqcoeff = BLOCK_OFFSET(pd->dqcoeff, block);
192 193 194 195
#if CONFIG_VP9_HIGHBITDEPTH
    if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
      if (xd->lossless) {
        tx_type = DCT_DCT;
196
        vp9_highbd_iwht4x4_add(dqcoeff, dst, stride, eob, xd->bd);
197 198 199 200 201
      } else {
        const PLANE_TYPE plane_type = pd->plane_type;
        switch (tx_size) {
          case TX_4X4:
            tx_type = get_tx_type_4x4(plane_type, xd, block);
202
            vp9_highbd_iht4x4_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
203 204 205
            break;
          case TX_8X8:
            tx_type = get_tx_type(plane_type, xd);
206
            vp9_highbd_iht8x8_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
207 208 209
            break;
          case TX_16X16:
            tx_type = get_tx_type(plane_type, xd);
210
            vp9_highbd_iht16x16_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
211 212 213
            break;
          case TX_32X32:
            tx_type = DCT_DCT;
214
            vp9_highbd_idct32x32_add(dqcoeff, dst, stride, eob, xd->bd);
215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249
            break;
          default:
            assert(0 && "Invalid transform size");
        }
      }
    } else {
      if (xd->lossless) {
        tx_type = DCT_DCT;
        vp9_iwht4x4_add(dqcoeff, dst, stride, eob);
      } else {
        const PLANE_TYPE plane_type = pd->plane_type;
        switch (tx_size) {
          case TX_4X4:
            tx_type = get_tx_type_4x4(plane_type, xd, block);
            vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_8X8:
            tx_type = get_tx_type(plane_type, xd);
            vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_16X16:
            tx_type = get_tx_type(plane_type, xd);
            vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_32X32:
            tx_type = DCT_DCT;
            vp9_idct32x32_add(dqcoeff, dst, stride, eob);
            break;
          default:
            assert(0 && "Invalid transform size");
            return;
        }
      }
    }
#else
250 251 252 253 254 255 256 257
    if (xd->lossless) {
      tx_type = DCT_DCT;
      vp9_iwht4x4_add(dqcoeff, dst, stride, eob);
    } else {
      const PLANE_TYPE plane_type = pd->plane_type;
      switch (tx_size) {
        case TX_4X4:
          tx_type = get_tx_type_4x4(plane_type, xd, block);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
258
          vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob);
259 260 261 262 263 264 265 266 267 268 269 270 271 272 273
          break;
        case TX_8X8:
          tx_type = get_tx_type(plane_type, xd);
          vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob);
          break;
        case TX_16X16:
          tx_type = get_tx_type(plane_type, xd);
          vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob);
          break;
        case TX_32X32:
          tx_type = DCT_DCT;
          vp9_idct32x32_add(dqcoeff, dst, stride, eob);
          break;
        default:
          assert(0 && "Invalid transform size");
274
          return;
275
      }
276
    }
277
#endif  // CONFIG_VP9_HIGHBITDEPTH
278 279

    if (eob == 1) {
James Zern's avatar
James Zern committed
280
      memset(dqcoeff, 0, 2 * sizeof(dqcoeff[0]));
281
    } else {
282
      if (tx_type == DCT_DCT && tx_size <= TX_16X16 && eob <= 10)
James Zern's avatar
James Zern committed
283
        memset(dqcoeff, 0, 4 * (4 << tx_size) * sizeof(dqcoeff[0]));
284
      else if (tx_size == TX_32X32 && eob <= 34)
James Zern's avatar
James Zern committed
285
        memset(dqcoeff, 0, 256 * sizeof(dqcoeff[0]));
286
      else
James Zern's avatar
James Zern committed
287
        memset(dqcoeff, 0, (16 << (tx_size << 1)) * sizeof(dqcoeff[0]));
288
    }
289 290 291
  }
}

292 293 294
struct intra_args {
  VP9_COMMON *cm;
  MACROBLOCKD *xd;
295
  FRAME_COUNTS *counts;
296
  vp9_reader *r;
297
  int seg_id;
298 299 300 301 302
};

static void predict_and_reconstruct_intra_block(int plane, int block,
                                                BLOCK_SIZE plane_bsize,
                                                TX_SIZE tx_size, void *arg) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
303
  struct intra_args *const args = (struct intra_args *)arg;
304 305
  VP9_COMMON *const cm = args->cm;
  MACROBLOCKD *const xd = args->xd;
306
  struct macroblockd_plane *const pd = &xd->plane[plane];
307
  MODE_INFO *const mi = xd->mi[0];
308 309
  const PREDICTION_MODE mode = (plane == 0) ? get_y_mode(mi, block)
                                            : mi->mbmi.uv_mode;
310 311 312 313
  int x, y;
  uint8_t *dst;
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y);
  dst = &pd->dst.buf[4 * y * pd->dst.stride + 4 * x];
314

315
  vp9_predict_intra_block(xd, block >> (tx_size << 1),
316
                          b_width_log2_lookup[plane_bsize], tx_size, mode,
317 318
                          dst, pd->dst.stride, dst, pd->dst.stride,
                          x, y, plane);
319

320
  if (!mi->mbmi.skip) {
321
    const int eob = vp9_decode_block_tokens(cm, xd, args->counts, plane, block,
322
                                            plane_bsize, x, y, tx_size,
323
                                            args->r, args->seg_id);
324 325
    inverse_transform_block(xd, plane, block, tx_size, dst, pd->dst.stride,
                            eob);
326
  }
327 328
}

329 330 331 332
struct inter_args {
  VP9_COMMON *cm;
  MACROBLOCKD *xd;
  vp9_reader *r;
333
  FRAME_COUNTS *counts;
334
  int *eobtotal;
335
  int seg_id;
336 337 338 339 340
};

static void reconstruct_inter_block(int plane, int block,
                                    BLOCK_SIZE plane_bsize,
                                    TX_SIZE tx_size, void *arg) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
341
  struct inter_args *args = (struct inter_args *)arg;
342 343
  VP9_COMMON *const cm = args->cm;
  MACROBLOCKD *const xd = args->xd;
344
  struct macroblockd_plane *const pd = &xd->plane[plane];
345
  int x, y, eob;
346
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y);
347
  eob = vp9_decode_block_tokens(cm, xd, args->counts, plane, block, plane_bsize,
348
                                x, y, tx_size, args->r, args->seg_id);
349 350
  inverse_transform_block(xd, plane, block, tx_size,
                          &pd->dst.buf[4 * y * pd->dst.stride + 4 * x],
351 352
                          pd->dst.stride, eob);
  *args->eobtotal += eob;
353 354
}

355 356 357
static MB_MODE_INFO *set_offsets(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                                 const TileInfo *const tile,
                                 BLOCK_SIZE bsize, int mi_row, int mi_col) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
358
  const int bw = num_8x8_blocks_wide_lookup[bsize];
359 360 361
  const int bh = num_8x8_blocks_high_lookup[bsize];
  const int x_mis = MIN(bw, cm->mi_cols - mi_col);
  const int y_mis = MIN(bh, cm->mi_rows - mi_row);
362
  const int offset = mi_row * cm->mi_stride + mi_col;
363
  int x, y;
364

365 366 367
  xd->mi = cm->mi_grid_visible + offset;
  xd->mi[0] = &cm->mi[offset];
  xd->mi[0]->mbmi.sb_type = bsize;
368
  for (y = 0; y < y_mis; ++y)
hkuang's avatar
hkuang committed
369
    for (x = !y; x < x_mis; ++x) {
370
      xd->mi[y * cm->mi_stride + x] = xd->mi[0];
hkuang's avatar
hkuang committed
371
    }
372

373
  set_skip_context(xd, mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
374

375 376
  // Distance of Mb to the various image edges. These are specified to 8th pel
  // as they are always compared to values that are in 1/8th pel units
James Zern's avatar
James Zern committed
377
  set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
378

379
  vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col);
380
  return &xd->mi[0]->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
381
}
John Koleszar's avatar
John Koleszar committed
382

383
static void decode_block(VP9Decoder *const pbi, MACROBLOCKD *const xd,
384
                         FRAME_COUNTS *counts,
385 386 387
                         const TileInfo *const tile,
                         int mi_row, int mi_col,
                         vp9_reader *r, BLOCK_SIZE bsize) {
388
  VP9_COMMON *const cm = &pbi->common;
389
  const int less8x8 = bsize < BLOCK_8X8;
390
  MB_MODE_INFO *mbmi = set_offsets(cm, xd, tile, bsize, mi_row, mi_col);
391
  vp9_read_mode_info(pbi, xd, counts, tile, mi_row, mi_col, r);
392

393
  if (less8x8)
394
    bsize = BLOCK_8X8;
395

396
  if (mbmi->skip) {
397 398
    reset_skip_context(xd, bsize);
  }
399

400
  if (!is_inter_block(mbmi)) {
401
    struct intra_args arg = {cm, xd, counts, r, mbmi->segment_id};
402 403
    vp9_foreach_transformed_block(xd, bsize,
                                  predict_and_reconstruct_intra_block, &arg);
404 405
  } else {
    // Prediction
406
    vp9_dec_build_inter_predictors_sb(pbi, xd, mi_row, mi_col, bsize);
407

408
    // Reconstruction
409
    if (!mbmi->skip) {
410
      int eobtotal = 0;
411
      struct inter_args arg = {cm, xd, r, counts, &eobtotal, mbmi->segment_id};
412
      vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
413
      if (!less8x8 && eobtotal == 0)
414
        mbmi->skip = 1;  // skip loopfilter
415
    }
416
  }
417

418
  xd->corrupted |= vp9_reader_has_error(r);
419 420
}

421 422
static PARTITION_TYPE read_partition(VP9_COMMON *cm, MACROBLOCKD *xd,
                                     FRAME_COUNTS *counts, int hbs,
423
                                     int mi_row, int mi_col, BLOCK_SIZE bsize,
424
                                     vp9_reader *r) {
425
  const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
426 427 428 429
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
  PARTITION_TYPE p;
430 431

  if (has_rows && has_cols)
Dmitry Kovalev's avatar
Dmitry Kovalev committed
432
    p = (PARTITION_TYPE)vp9_read_tree(r, vp9_partition_tree, probs);
433
  else if (!has_rows && has_cols)
434
    p = vp9_read(r, probs[1]) ? PARTITION_SPLIT : PARTITION_HORZ;
435
  else if (has_rows && !has_cols)
436
    p = vp9_read(r, probs[2]) ? PARTITION_SPLIT : PARTITION_VERT;
437
  else
438 439 440
    p = PARTITION_SPLIT;

  if (!cm->frame_parallel_decoding_mode)
441
    ++counts->partition[ctx][p];
442 443

  return p;
444 445
}

446
static void decode_partition(VP9Decoder *const pbi, MACROBLOCKD *const xd,
447
                             FRAME_COUNTS *counts,
448 449 450
                             const TileInfo *const tile,
                             int mi_row, int mi_col,
                             vp9_reader* r, BLOCK_SIZE bsize) {
451
  VP9_COMMON *const cm = &pbi->common;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
452
  const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
453
  PARTITION_TYPE partition;
454
  BLOCK_SIZE subsize, uv_subsize;
455

456
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
457 458
    return;

459
  partition = read_partition(cm, xd, counts, hbs, mi_row, mi_col, bsize, r);
460
  subsize = get_subsize(bsize, partition);
461 462
  uv_subsize = ss_size_lookup[subsize][cm->subsampling_x][cm->subsampling_y];
  if (subsize >= BLOCK_8X8 && uv_subsize == BLOCK_INVALID)
463 464
    vpx_internal_error(xd->error_info,
                       VPX_CODEC_CORRUPT_FRAME, "Invalid block size.");
465
  if (subsize < BLOCK_8X8) {
466
    decode_block(pbi, xd, counts, tile, mi_row, mi_col, r, subsize);
467 468 469
  } else {
    switch (partition) {
      case PARTITION_NONE:
470
        decode_block(pbi, xd, counts, tile, mi_row, mi_col, r, subsize);
471 472
        break;
      case PARTITION_HORZ:
473
        decode_block(pbi, xd, counts, tile, mi_row, mi_col, r, subsize);
474
        if (mi_row + hbs < cm->mi_rows)
475
          decode_block(pbi, xd, counts, tile, mi_row + hbs, mi_col, r, subsize);
476 477
        break;
      case PARTITION_VERT:
478
        decode_block(pbi, xd, counts, tile, mi_row, mi_col, r, subsize);
479
        if (mi_col + hbs < cm->mi_cols)
480
          decode_block(pbi, xd, counts, tile, mi_row, mi_col + hbs, r, subsize);
481 482
        break;
      case PARTITION_SPLIT:
483 484 485 486 487 488 489
        decode_partition(pbi, xd, counts, tile, mi_row, mi_col, r, subsize);
        decode_partition(pbi, xd, counts, tile, mi_row, mi_col + hbs, r,
                         subsize);
        decode_partition(pbi, xd, counts, tile, mi_row + hbs, mi_col, r,
                         subsize);
        decode_partition(pbi, xd, counts, tile, mi_row + hbs, mi_col + hbs, r,
                         subsize);
490 491
        break;
      default:
James Zern's avatar
James Zern committed
492
        assert(0 && "Invalid partition type");
493
    }
494
  }
495

496
  // update partition context
497
  if (bsize >= BLOCK_8X8 &&
498
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
499
    update_partition_context(xd, mi_row, mi_col, subsize, bsize);
500 501
}

502 503 504 505
static void setup_token_decoder(const uint8_t *data,
                                const uint8_t *data_end,
                                size_t read_size,
                                struct vpx_internal_error_info *error_info,
506 507 508
                                vp9_reader *r,
                                vpx_decrypt_cb decrypt_cb,
                                void *decrypt_state) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
509 510 511
  // Validate the calculated partition length. If the buffer
  // described by the partition can't be fully read, then restrict
  // it to the portion that can be (for EC mode) or throw an error.
512
  if (!read_is_valid(data, read_size, data_end))
513
    vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
514
                       "Truncated packet or corrupt tile length");
John Koleszar's avatar
John Koleszar committed
515

516
  if (vp9_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
517
    vpx_internal_error(error_info, VPX_CODEC_MEM_ERROR,
John Koleszar's avatar
John Koleszar committed
518
                       "Failed to allocate bool decoder %d", 1);
John Koleszar's avatar
John Koleszar committed
519 520
}

521
static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs,
522
                                   vp9_reader *r) {
523 524 525
  int i, j, k, l, m;

  if (vp9_read_bit(r))
526
    for (i = 0; i < PLANE_TYPES; ++i)
527 528 529 530 531
      for (j = 0; j < REF_TYPES; ++j)
        for (k = 0; k < COEF_BANDS; ++k)
          for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l)
            for (m = 0; m < UNCONSTRAINED_NODES; ++m)
              vp9_diff_update_prob(r, &coef_probs[i][j][k][l][m]);
532
}
533

534
static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode,
535
                            vp9_reader *r) {
Yaowu Xu's avatar
Yaowu Xu committed
536 537 538 539
    const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
    TX_SIZE tx_size;
    for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
      read_coef_probs_common(fc->coef_probs[tx_size], r);
540 541
}

542 543
static void setup_segmentation(struct segmentation *seg,
                               struct vp9_read_bit_buffer *rb) {
544 545
  int i, j;

546 547
  seg->update_map = 0;
  seg->update_data = 0;
548

549 550
  seg->enabled = vp9_rb_read_bit(rb);
  if (!seg->enabled)
551 552 553
    return;

  // Segmentation map update
554 555
  seg->update_map = vp9_rb_read_bit(rb);
  if (seg->update_map) {
Paul Wilkins's avatar
Paul Wilkins committed
556
    for (i = 0; i < SEG_TREE_PROBS; i++)
557 558
      seg->tree_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                               : MAX_PROB;
559

560 561
    seg->temporal_update = vp9_rb_read_bit(rb);
    if (seg->temporal_update) {
562
      for (i = 0; i < PREDICTION_PROBS; i++)
563 564
        seg->pred_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                                 : MAX_PROB;
565 566
    } else {
      for (i = 0; i < PREDICTION_PROBS; i++)
567
        seg->pred_probs[i] = MAX_PROB;
568
    }
569
  }
570

571
  // Segmentation data update
572 573 574
  seg->update_data = vp9_rb_read_bit(rb);
  if (seg->update_data) {
    seg->abs_delta = vp9_rb_read_bit(rb);
575

576
    vp9_clearall_segfeatures(seg);
577

Paul Wilkins's avatar
Paul Wilkins committed
578
    for (i = 0; i < MAX_SEGMENTS; i++) {
579 580
      for (j = 0; j < SEG_LVL_MAX; j++) {
        int data = 0;
581
        const int feature_enabled = vp9_rb_read_bit(rb);
582
        if (feature_enabled) {
583
          vp9_enable_segfeature(seg, i, j);
584
          data = decode_unsigned_max(rb, vp9_seg_feature_data_max(j));
585
          if (vp9_is_segfeature_signed(j))
586
            data = vp9_rb_read_bit(rb) ? -data : data;
587
        }
588
        vp9_set_segdata(seg, i, j, data);
589 590 591 592 593
      }
    }
  }
}

594 595 596 597
static void setup_loopfilter(struct loopfilter *lf,
                             struct vp9_read_bit_buffer *rb) {
  lf->filter_level = vp9_rb_read_literal(rb, 6);
  lf->sharpness_level = vp9_rb_read_literal(rb, 3);
598 599 600

  // Read in loop filter deltas applied at the MB level based on mode or ref
  // frame.
601
  lf->mode_ref_delta_update = 0;
602

603 604 605 606
  lf->mode_ref_delta_enabled = vp9_rb_read_bit(rb);
  if (lf->mode_ref_delta_enabled) {
    lf->mode_ref_delta_update = vp9_rb_read_bit(rb);
    if (lf->mode_ref_delta_update) {
607 608
      int i;

609 610
      for (i = 0; i < MAX_REF_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
611
          lf->ref_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
612

613 614
      for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
615
          lf->mode_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
616 617 618 619
    }
  }
}

620 621
static INLINE int read_delta_q(struct vp9_read_bit_buffer *rb) {
  return vp9_rb_read_bit(rb) ? vp9_rb_read_signed_literal(rb, 4) : 0;
622
}
623

624 625
static void setup_quantization(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                               struct vp9_read_bit_buffer *rb) {
626
  cm->base_qindex = vp9_rb_read_literal(rb, QINDEX_BITS);
627 628 629 630
  cm->y_dc_delta_q = read_delta_q(rb);
  cm->uv_dc_delta_q = read_delta_q(rb);
  cm->uv_ac_delta_q = read_delta_q(rb);
  cm->dequant_bit_depth = cm->bit_depth;
631 632 633 634
  xd->lossless = cm->base_qindex == 0 &&
                 cm->y_dc_delta_q == 0 &&
                 cm->uv_dc_delta_q == 0 &&
                 cm->uv_ac_delta_q == 0;
635

636 637 638
#if CONFIG_VP9_HIGHBITDEPTH
  xd->bd = (int)cm->bit_depth;
#endif
639 640
}

641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667
static void setup_segmentation_dequant(VP9_COMMON *const cm) {
  // Build y/uv dequant values based on segmentation.
  if (cm->seg.enabled) {
    int i;
    for (i = 0; i < MAX_SEGMENTS; ++i) {
      const int qindex = vp9_get_qindex(&cm->seg, i, cm->base_qindex);
      cm->y_dequant[i][0] = vp9_dc_quant(qindex, cm->y_dc_delta_q,
                                         cm->bit_depth);
      cm->y_dequant[i][1] = vp9_ac_quant(qindex, 0, cm->bit_depth);
      cm->uv_dequant[i][0] = vp9_dc_quant(qindex, cm->uv_dc_delta_q,
                                          cm->bit_depth);
      cm->uv_dequant[i][1] = vp9_ac_quant(qindex, cm->uv_ac_delta_q,
                                          cm->bit_depth);
    }
  } else {
    const int qindex = cm->base_qindex;
    // When segmentation is disabled, only the first value is used.  The
    // remaining are don't cares.
    cm->y_dequant[0][0] = vp9_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
    cm->y_dequant[0][1] = vp9_ac_quant(qindex, 0, cm->bit_depth);
    cm->uv_dequant[0][0] = vp9_dc_quant(qindex, cm->uv_dc_delta_q,
                                        cm->bit_depth);
    cm->uv_dequant[0][1] = vp9_ac_quant(qindex, cm->uv_ac_delta_q,
                                        cm->bit_depth);
  }
}

668 669 670 671 672
static INTERP_FILTER read_interp_filter(struct vp9_read_bit_buffer *rb) {
  const INTERP_FILTER literal_to_filter[] = { EIGHTTAP_SMOOTH,
                                              EIGHTTAP,
                                              EIGHTTAP_SHARP,
                                              BILINEAR };
673
  return vp9_rb_read_bit(rb) ? SWITCHABLE
674
                             : literal_to_filter[vp9_rb_read_literal(rb, 2)];
675 676
}

677 678
void vp9_read_frame_size(struct vp9_read_bit_buffer *rb,
                         int *width, int *height) {
hkuang's avatar
hkuang committed
679 680
  *width = vp9_rb_read_literal(rb, 16) + 1;
  *height = vp9_rb_read_literal(rb, 16) + 1;
681 682
}

683
static void setup_display_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
684 685 686
  cm->display_width = cm->width;
  cm->display_height = cm->height;
  if (vp9_rb_read_bit(rb))
687
    vp9_read_frame_size(rb, &cm->display_width, &cm->display_height);
688
}
689

690 691 692 693 694 695 696 697
static void resize_mv_buffer(VP9_COMMON *cm) {
  vpx_free(cm->cur_frame->mvs);
  cm->cur_frame->mi_rows = cm->mi_rows;
  cm->cur_frame->mi_cols = cm->mi_cols;
  cm->cur_frame->mvs = (MV_REF *)vpx_calloc(cm->mi_rows * cm->mi_cols,
                                            sizeof(*cm->cur_frame->mvs));
}

Adrian Grange's avatar
Adrian Grange committed
698
static void resize_context_buffers(VP9_COMMON *cm, int width, int height) {
699 700 701
#if CONFIG_SIZE_LIMIT
  if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
702 703
                       "Dimensions of %dx%d beyond allowed size of %dx%d.",
                       width, height, DECODE_WIDTH_LIMIT, DECODE_HEIGHT_LIMIT);
704
#endif
705
  if (cm->width != width || cm->height != height) {
706
    const int new_mi_rows =
707
        ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
708
    const int new_mi_cols =
709 710 711 712 713
        ALIGN_POWER_OF_TWO(width,  MI_SIZE_LOG2) >> MI_SIZE_LOG2;

    // Allocations in vp9_alloc_context_buffers() depend on individual
    // dimensions as well as the overall size.
    if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Adrian Grange's avatar
Adrian Grange committed
714
      if (vp9_alloc_context_buffers(cm, width, height))
715
        vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
716
                           "Failed to allocate context buffers");
Adrian Grange's avatar
Adrian Grange committed
717 718
    } else {
      vp9_set_mb_mi(cm, width, height);
719
    }
Adrian Grange's avatar
Adrian Grange committed
720
    vp9_init_context_buffers(cm);
721 722
    cm->width = width;
    cm->height = height;
723
  }
724 725 726 727
  if (cm->cur_frame->mvs == NULL || cm->mi_rows > cm->cur_frame->mi_rows ||
      cm->mi_cols > cm->cur_frame->mi_cols) {
    resize_mv_buffer(cm);
  }
Adrian Grange's avatar
Adrian Grange committed
728 729 730 731
}

static void setup_frame_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
  int width, height;
732
  BufferPool *const pool = cm->buffer_pool;
Adrian Grange's avatar
Adrian Grange committed
733 734 735
  vp9_read_frame_size(rb, &width, &height);
  resize_context_buffers(cm, width, height);
  setup_display_size(cm, rb);
736

737
  lock_buffer_pool(pool);
738 739
  if (vp9_realloc_frame_buffer(
          get_frame_new_buffer(cm), cm->width, cm->height,
740 741 742 743 744
          cm->subsampling_x, cm->subsampling_y,
#if CONFIG_VP9_HIGHBITDEPTH
          cm->use_highbitdepth,
#endif
          VP9_DEC_BORDER_IN_PIXELS,
745
          cm->byte_alignment,
746 747 748
          &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
          pool->cb_priv)) {
    unlock_buffer_pool(pool);
749 750 751
    vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
                       "Failed to allocate frame buffer");
  }
752 753 754 755 756
  unlock_buffer_pool(pool);

  pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
  pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
  pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
757
  pool->frame_bufs[cm->new_fb_idx].buf.color_space = cm->color_space;
758 759 760 761 762 763 764 765
}

static INLINE int valid_ref_frame_img_fmt(vpx_bit_depth_t ref_bit_depth,
                                          int ref_xss, int ref_yss,
                                          vpx_bit_depth_t this_bit_depth,
                                          int this_xss, int this_yss) {
  return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
         ref_yss == this_yss;
766 767
}

768
static void setup_frame_size_with_refs(VP9_COMMON *cm,
769 770 771
                                       struct vp9_read_bit_buffer *rb) {
  int width, height;
  int found = 0, i;
772
  int has_valid_ref_frame = 0;
773
  BufferPool *const pool = cm