vp9_decodeframe.c 72.7 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5 6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9 10
 */

11
#include <assert.h>
12
#include <stdlib.h>  // qsort()
John Koleszar's avatar
John Koleszar committed
13

14
#include "./vp9_rtcd.h"
15 16
#include "./vpx_scale_rtcd.h"

17
#include "vpx_mem/vpx_mem.h"
18
#include "vpx_ports/mem_ops.h"
19 20
#include "vpx_scale/vpx_scale.h"

Dmitry Kovalev's avatar
Dmitry Kovalev committed
21
#include "vp9/common/vp9_alloccommon.h"
Ronald S. Bultje's avatar
Ronald S. Bultje committed
22
#include "vp9/common/vp9_common.h"
Yaowu Xu's avatar
Yaowu Xu committed
23
#include "vp9/common/vp9_entropy.h"
24
#include "vp9/common/vp9_entropymode.h"
25
#include "vp9/common/vp9_idct.h"
26
#include "vp9/common/vp9_thread_common.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
27
#include "vp9/common/vp9_pred_common.h"
28
#include "vp9/common/vp9_quant_common.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
29 30
#include "vp9/common/vp9_reconintra.h"
#include "vp9/common/vp9_reconinter.h"
31
#include "vp9/common/vp9_seg_common.h"
hkuang's avatar
hkuang committed
32
#include "vp9/common/vp9_thread.h"
33
#include "vp9/common/vp9_tile_common.h"
34

Yaowu Xu's avatar
Yaowu Xu committed
35
#include "vp9/decoder/vp9_decodeframe.h"
36 37
#include "vp9/decoder/vp9_detokenize.h"
#include "vp9/decoder/vp9_decodemv.h"
38
#include "vp9/decoder/vp9_decoder.h"
39
#include "vp9/decoder/vp9_dsubexp.h"
40
#include "vp9/decoder/vp9_read_bit_buffer.h"
41
#include "vp9/decoder/vp9_reader.h"
42

43 44
#define MAX_VP9_HEADER_SIZE 80

45
static int is_compound_reference_allowed(const VP9_COMMON *cm) {
46
  int i;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
47
  for (i = 1; i < REFS_PER_FRAME; ++i)
48
    if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1])
49 50 51 52 53
      return 1;

  return 0;
}

54
static void setup_compound_reference_mode(VP9_COMMON *cm) {
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
  if (cm->ref_frame_sign_bias[LAST_FRAME] ==
          cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
    cm->comp_fixed_ref = ALTREF_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = GOLDEN_FRAME;
  } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
                 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
    cm->comp_fixed_ref = GOLDEN_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  } else {
    cm->comp_fixed_ref = LAST_FRAME;
    cm->comp_var_ref[0] = GOLDEN_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  }
}

72
static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
Johann's avatar
Johann committed
73
  return len != 0 && len <= (size_t)(end - start);
74 75
}

76 77 78 79 80
static int decode_unsigned_max(struct vp9_read_bit_buffer *rb, int max) {
  const int data = vp9_rb_read_literal(rb, get_unsigned_bits(max));
  return data > max ? max : data;
}

81 82 83 84 85
static TX_MODE read_tx_mode(vp9_reader *r) {
  TX_MODE tx_mode = vp9_read_literal(r, 2);
  if (tx_mode == ALLOW_32X32)
    tx_mode += vp9_read_bit(r);
  return tx_mode;
86 87
}

88
static void read_tx_mode_probs(struct tx_probs *tx_probs, vp9_reader *r) {
89 90 91
  int i, j;

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
92
    for (j = 0; j < TX_SIZES - 3; ++j)
93
      vp9_diff_update_prob(r, &tx_probs->p8x8[i][j]);
94 95

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
96
    for (j = 0; j < TX_SIZES - 2; ++j)
97
      vp9_diff_update_prob(r, &tx_probs->p16x16[i][j]);
98 99

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
100
    for (j = 0; j < TX_SIZES - 1; ++j)
101
      vp9_diff_update_prob(r, &tx_probs->p32x32[i][j]);
John Koleszar's avatar
John Koleszar committed
102 103
}

104 105
static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
106
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
107 108 109 110 111 112 113 114 115 116 117
    for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
      vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
}

static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
    for (j = 0; j < INTER_MODES - 1; ++j)
      vp9_diff_update_prob(r, &fc->inter_mode_probs[i][j]);
}

118 119
static REFERENCE_MODE read_frame_reference_mode(const VP9_COMMON *cm,
                                                vp9_reader *r) {
120
  if (is_compound_reference_allowed(cm)) {
121 122 123
    return vp9_read_bit(r) ? (vp9_read_bit(r) ? REFERENCE_MODE_SELECT
                                              : COMPOUND_REFERENCE)
                           : SINGLE_REFERENCE;
124 125 126
  } else {
    return SINGLE_REFERENCE;
  }
127 128
}

129
static void read_frame_reference_mode_probs(VP9_COMMON *cm, vp9_reader *r) {
130
  FRAME_CONTEXT *const fc = cm->fc;
131
  int i;
132

133
  if (cm->reference_mode == REFERENCE_MODE_SELECT)
134 135
    for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
      vp9_diff_update_prob(r, &fc->comp_inter_prob[i]);
136

137
  if (cm->reference_mode != COMPOUND_REFERENCE)
138 139 140
    for (i = 0; i < REF_CONTEXTS; ++i) {
      vp9_diff_update_prob(r, &fc->single_ref_prob[i][0]);
      vp9_diff_update_prob(r, &fc->single_ref_prob[i][1]);
141 142
    }

143
  if (cm->reference_mode != SINGLE_REFERENCE)
144 145
    for (i = 0; i < REF_CONTEXTS; ++i)
      vp9_diff_update_prob(r, &fc->comp_ref_prob[i]);
146 147
}

148 149 150
static void update_mv_probs(vp9_prob *p, int n, vp9_reader *r) {
  int i;
  for (i = 0; i < n; ++i)
151
    if (vp9_read(r, MV_UPDATE_PROB))
Dmitry Kovalev's avatar
Dmitry Kovalev committed
152
      p[i] = (vp9_read_literal(r, 7) << 1) | 1;
153 154
}

155 156
static void read_mv_probs(nmv_context *ctx, int allow_hp, vp9_reader *r) {
  int i, j;
157

158
  update_mv_probs(ctx->joints, MV_JOINTS - 1, r);
159 160

  for (i = 0; i < 2; ++i) {
161 162 163 164 165
    nmv_component *const comp_ctx = &ctx->comps[i];
    update_mv_probs(&comp_ctx->sign, 1, r);
    update_mv_probs(comp_ctx->classes, MV_CLASSES - 1, r);
    update_mv_probs(comp_ctx->class0, CLASS0_SIZE - 1, r);
    update_mv_probs(comp_ctx->bits, MV_OFFSET_BITS, r);
166 167 168
  }

  for (i = 0; i < 2; ++i) {
169
    nmv_component *const comp_ctx = &ctx->comps[i];
170
    for (j = 0; j < CLASS0_SIZE; ++j)
Dmitry Kovalev's avatar
Dmitry Kovalev committed
171
      update_mv_probs(comp_ctx->class0_fp[j], MV_FP_SIZE - 1, r);
172
    update_mv_probs(comp_ctx->fp, 3, r);
173 174 175 176
  }

  if (allow_hp) {
    for (i = 0; i < 2; ++i) {
177 178 179
      nmv_component *const comp_ctx = &ctx->comps[i];
      update_mv_probs(&comp_ctx->class0_hp, 1, r);
      update_mv_probs(&comp_ctx->hp, 1, r);
180 181 182 183
    }
  }
}

184
static void inverse_transform_block(MACROBLOCKD* xd, int plane, int block,
185 186
                                    TX_SIZE tx_size, uint8_t *dst, int stride,
                                    int eob) {
187
  struct macroblockd_plane *const pd = &xd->plane[plane];
188
  if (eob > 0) {
189
    TX_TYPE tx_type = DCT_DCT;
190
    tran_low_t *const dqcoeff = BLOCK_OFFSET(pd->dqcoeff, block);
191 192 193 194
#if CONFIG_VP9_HIGHBITDEPTH
    if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
      if (xd->lossless) {
        tx_type = DCT_DCT;
195
        vp9_highbd_iwht4x4_add(dqcoeff, dst, stride, eob, xd->bd);
196 197 198 199 200
      } else {
        const PLANE_TYPE plane_type = pd->plane_type;
        switch (tx_size) {
          case TX_4X4:
            tx_type = get_tx_type_4x4(plane_type, xd, block);
201
            vp9_highbd_iht4x4_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
202 203 204
            break;
          case TX_8X8:
            tx_type = get_tx_type(plane_type, xd);
205
            vp9_highbd_iht8x8_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
206 207 208
            break;
          case TX_16X16:
            tx_type = get_tx_type(plane_type, xd);
209
            vp9_highbd_iht16x16_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
210 211 212
            break;
          case TX_32X32:
            tx_type = DCT_DCT;
213
            vp9_highbd_idct32x32_add(dqcoeff, dst, stride, eob, xd->bd);
214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248
            break;
          default:
            assert(0 && "Invalid transform size");
        }
      }
    } else {
      if (xd->lossless) {
        tx_type = DCT_DCT;
        vp9_iwht4x4_add(dqcoeff, dst, stride, eob);
      } else {
        const PLANE_TYPE plane_type = pd->plane_type;
        switch (tx_size) {
          case TX_4X4:
            tx_type = get_tx_type_4x4(plane_type, xd, block);
            vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_8X8:
            tx_type = get_tx_type(plane_type, xd);
            vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_16X16:
            tx_type = get_tx_type(plane_type, xd);
            vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_32X32:
            tx_type = DCT_DCT;
            vp9_idct32x32_add(dqcoeff, dst, stride, eob);
            break;
          default:
            assert(0 && "Invalid transform size");
            return;
        }
      }
    }
#else
249 250 251 252 253 254 255 256
    if (xd->lossless) {
      tx_type = DCT_DCT;
      vp9_iwht4x4_add(dqcoeff, dst, stride, eob);
    } else {
      const PLANE_TYPE plane_type = pd->plane_type;
      switch (tx_size) {
        case TX_4X4:
          tx_type = get_tx_type_4x4(plane_type, xd, block);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
257
          vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob);
258 259 260 261 262 263 264 265 266 267 268 269 270 271 272
          break;
        case TX_8X8:
          tx_type = get_tx_type(plane_type, xd);
          vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob);
          break;
        case TX_16X16:
          tx_type = get_tx_type(plane_type, xd);
          vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob);
          break;
        case TX_32X32:
          tx_type = DCT_DCT;
          vp9_idct32x32_add(dqcoeff, dst, stride, eob);
          break;
        default:
          assert(0 && "Invalid transform size");
273
          return;
274
      }
275
    }
276
#endif  // CONFIG_VP9_HIGHBITDEPTH
277 278

    if (eob == 1) {
James Zern's avatar
James Zern committed
279
      memset(dqcoeff, 0, 2 * sizeof(dqcoeff[0]));
280
    } else {
281
      if (tx_type == DCT_DCT && tx_size <= TX_16X16 && eob <= 10)
James Zern's avatar
James Zern committed
282
        memset(dqcoeff, 0, 4 * (4 << tx_size) * sizeof(dqcoeff[0]));
283
      else if (tx_size == TX_32X32 && eob <= 34)
James Zern's avatar
James Zern committed
284
        memset(dqcoeff, 0, 256 * sizeof(dqcoeff[0]));
285
      else
James Zern's avatar
James Zern committed
286
        memset(dqcoeff, 0, (16 << (tx_size << 1)) * sizeof(dqcoeff[0]));
287
    }
288 289 290
  }
}

291 292 293
struct intra_args {
  VP9_COMMON *cm;
  MACROBLOCKD *xd;
294
  FRAME_COUNTS *counts;
295
  vp9_reader *r;
296
  int seg_id;
297 298 299 300 301
};

static void predict_and_reconstruct_intra_block(int plane, int block,
                                                BLOCK_SIZE plane_bsize,
                                                TX_SIZE tx_size, void *arg) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
302
  struct intra_args *const args = (struct intra_args *)arg;
303 304
  VP9_COMMON *const cm = args->cm;
  MACROBLOCKD *const xd = args->xd;
305
  struct macroblockd_plane *const pd = &xd->plane[plane];
306
  MODE_INFO *const mi = xd->mi[0];
307 308
  const PREDICTION_MODE mode = (plane == 0) ? get_y_mode(mi, block)
                                            : mi->mbmi.uv_mode;
309 310 311 312
  int x, y;
  uint8_t *dst;
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y);
  dst = &pd->dst.buf[4 * y * pd->dst.stride + 4 * x];
313

314
  vp9_predict_intra_block(xd, block >> (tx_size << 1),
315
                          b_width_log2_lookup[plane_bsize], tx_size, mode,
316 317
                          dst, pd->dst.stride, dst, pd->dst.stride,
                          x, y, plane);
318

319
  if (!mi->mbmi.skip) {
320
    const int eob = vp9_decode_block_tokens(cm, xd, args->counts, plane, block,
321
                                            plane_bsize, x, y, tx_size,
322
                                            args->r, args->seg_id);
323 324
    inverse_transform_block(xd, plane, block, tx_size, dst, pd->dst.stride,
                            eob);
325
  }
326 327
}

328 329 330 331
struct inter_args {
  VP9_COMMON *cm;
  MACROBLOCKD *xd;
  vp9_reader *r;
332
  FRAME_COUNTS *counts;
333
  int *eobtotal;
334
  int seg_id;
335 336 337 338 339
};

static void reconstruct_inter_block(int plane, int block,
                                    BLOCK_SIZE plane_bsize,
                                    TX_SIZE tx_size, void *arg) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
340
  struct inter_args *args = (struct inter_args *)arg;
341 342
  VP9_COMMON *const cm = args->cm;
  MACROBLOCKD *const xd = args->xd;
343
  struct macroblockd_plane *const pd = &xd->plane[plane];
344
  int x, y, eob;
345
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y);
346
  eob = vp9_decode_block_tokens(cm, xd, args->counts, plane, block, plane_bsize,
347
                                x, y, tx_size, args->r, args->seg_id);
348 349
  inverse_transform_block(xd, plane, block, tx_size,
                          &pd->dst.buf[4 * y * pd->dst.stride + 4 * x],
350 351
                          pd->dst.stride, eob);
  *args->eobtotal += eob;
352 353
}

354 355 356
static MB_MODE_INFO *set_offsets(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                                 const TileInfo *const tile,
                                 BLOCK_SIZE bsize, int mi_row, int mi_col) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
357
  const int bw = num_8x8_blocks_wide_lookup[bsize];
358 359 360
  const int bh = num_8x8_blocks_high_lookup[bsize];
  const int x_mis = MIN(bw, cm->mi_cols - mi_col);
  const int y_mis = MIN(bh, cm->mi_rows - mi_row);
361
  const int offset = mi_row * cm->mi_stride + mi_col;
362
  int x, y;
363

364 365 366
  xd->mi = cm->mi_grid_visible + offset;
  xd->mi[0] = &cm->mi[offset];
  xd->mi[0]->mbmi.sb_type = bsize;
367
  for (y = 0; y < y_mis; ++y)
hkuang's avatar
hkuang committed
368
    for (x = !y; x < x_mis; ++x) {
369
      xd->mi[y * cm->mi_stride + x] = xd->mi[0];
hkuang's avatar
hkuang committed
370
    }
371

372
  set_skip_context(xd, mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
373

374 375
  // Distance of Mb to the various image edges. These are specified to 8th pel
  // as they are always compared to values that are in 1/8th pel units
James Zern's avatar
James Zern committed
376
  set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
377

378
  vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col);
379
  return &xd->mi[0]->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
380
}
John Koleszar's avatar
John Koleszar committed
381

382
static void decode_block(VP9Decoder *const pbi, MACROBLOCKD *const xd,
383
                         FRAME_COUNTS *counts,
384 385 386
                         const TileInfo *const tile,
                         int mi_row, int mi_col,
                         vp9_reader *r, BLOCK_SIZE bsize) {
387
  VP9_COMMON *const cm = &pbi->common;
388
  const int less8x8 = bsize < BLOCK_8X8;
389
  MB_MODE_INFO *mbmi = set_offsets(cm, xd, tile, bsize, mi_row, mi_col);
390
  vp9_read_mode_info(pbi, xd, counts, tile, mi_row, mi_col, r);
391

392
  if (less8x8)
393
    bsize = BLOCK_8X8;
394

395
  if (mbmi->skip) {
396 397
    reset_skip_context(xd, bsize);
  }
398

399
  if (!is_inter_block(mbmi)) {
400
    struct intra_args arg = {cm, xd, counts, r, mbmi->segment_id};
401 402
    vp9_foreach_transformed_block(xd, bsize,
                                  predict_and_reconstruct_intra_block, &arg);
403 404
  } else {
    // Prediction
405
    vp9_dec_build_inter_predictors_sb(pbi, xd, mi_row, mi_col, bsize);
406

407
    // Reconstruction
408
    if (!mbmi->skip) {
409
      int eobtotal = 0;
410
      struct inter_args arg = {cm, xd, r, counts, &eobtotal, mbmi->segment_id};
411
      vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
412
      if (!less8x8 && eobtotal == 0)
413
        mbmi->skip = 1;  // skip loopfilter
414
    }
415
  }
416

417
  xd->corrupted |= vp9_reader_has_error(r);
418 419
}

420 421
static PARTITION_TYPE read_partition(VP9_COMMON *cm, MACROBLOCKD *xd,
                                     FRAME_COUNTS *counts, int hbs,
422
                                     int mi_row, int mi_col, BLOCK_SIZE bsize,
423
                                     vp9_reader *r) {
424
  const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
425 426 427 428
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
  PARTITION_TYPE p;
429 430

  if (has_rows && has_cols)
Dmitry Kovalev's avatar
Dmitry Kovalev committed
431
    p = (PARTITION_TYPE)vp9_read_tree(r, vp9_partition_tree, probs);
432
  else if (!has_rows && has_cols)
433
    p = vp9_read(r, probs[1]) ? PARTITION_SPLIT : PARTITION_HORZ;
434
  else if (has_rows && !has_cols)
435
    p = vp9_read(r, probs[2]) ? PARTITION_SPLIT : PARTITION_VERT;
436
  else
437 438 439
    p = PARTITION_SPLIT;

  if (!cm->frame_parallel_decoding_mode)
440
    ++counts->partition[ctx][p];
441 442

  return p;
443 444
}

445
static void decode_partition(VP9Decoder *const pbi, MACROBLOCKD *const xd,
446
                             FRAME_COUNTS *counts,
447 448 449
                             const TileInfo *const tile,
                             int mi_row, int mi_col,
                             vp9_reader* r, BLOCK_SIZE bsize) {
450
  VP9_COMMON *const cm = &pbi->common;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
451
  const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
452
  PARTITION_TYPE partition;
453
  BLOCK_SIZE subsize, uv_subsize;
454

455
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
456 457
    return;

458
  partition = read_partition(cm, xd, counts, hbs, mi_row, mi_col, bsize, r);
459
  subsize = get_subsize(bsize, partition);
460 461
  uv_subsize = ss_size_lookup[subsize][cm->subsampling_x][cm->subsampling_y];
  if (subsize >= BLOCK_8X8 && uv_subsize == BLOCK_INVALID)
462 463
    vpx_internal_error(xd->error_info,
                       VPX_CODEC_CORRUPT_FRAME, "Invalid block size.");
464
  if (subsize < BLOCK_8X8) {
465
    decode_block(pbi, xd, counts, tile, mi_row, mi_col, r, subsize);
466 467 468
  } else {
    switch (partition) {
      case PARTITION_NONE:
469
        decode_block(pbi, xd, counts, tile, mi_row, mi_col, r, subsize);
470 471
        break;
      case PARTITION_HORZ:
472
        decode_block(pbi, xd, counts, tile, mi_row, mi_col, r, subsize);
473
        if (mi_row + hbs < cm->mi_rows)
474
          decode_block(pbi, xd, counts, tile, mi_row + hbs, mi_col, r, subsize);
475 476
        break;
      case PARTITION_VERT:
477
        decode_block(pbi, xd, counts, tile, mi_row, mi_col, r, subsize);
478
        if (mi_col + hbs < cm->mi_cols)
479
          decode_block(pbi, xd, counts, tile, mi_row, mi_col + hbs, r, subsize);
480 481
        break;
      case PARTITION_SPLIT:
482 483 484 485 486 487 488
        decode_partition(pbi, xd, counts, tile, mi_row, mi_col, r, subsize);
        decode_partition(pbi, xd, counts, tile, mi_row, mi_col + hbs, r,
                         subsize);
        decode_partition(pbi, xd, counts, tile, mi_row + hbs, mi_col, r,
                         subsize);
        decode_partition(pbi, xd, counts, tile, mi_row + hbs, mi_col + hbs, r,
                         subsize);
489 490
        break;
      default:
James Zern's avatar
James Zern committed
491
        assert(0 && "Invalid partition type");
492
    }
493
  }
494

495
  // update partition context
496
  if (bsize >= BLOCK_8X8 &&
497
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
498
    update_partition_context(xd, mi_row, mi_col, subsize, bsize);
499 500
}

501 502 503 504
static void setup_token_decoder(const uint8_t *data,
                                const uint8_t *data_end,
                                size_t read_size,
                                struct vpx_internal_error_info *error_info,
505 506 507
                                vp9_reader *r,
                                vpx_decrypt_cb decrypt_cb,
                                void *decrypt_state) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
508 509 510
  // Validate the calculated partition length. If the buffer
  // described by the partition can't be fully read, then restrict
  // it to the portion that can be (for EC mode) or throw an error.
511
  if (!read_is_valid(data, read_size, data_end))
512
    vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
513
                       "Truncated packet or corrupt tile length");
John Koleszar's avatar
John Koleszar committed
514

515
  if (vp9_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
516
    vpx_internal_error(error_info, VPX_CODEC_MEM_ERROR,
John Koleszar's avatar
John Koleszar committed
517
                       "Failed to allocate bool decoder %d", 1);
John Koleszar's avatar
John Koleszar committed
518 519
}

520
static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs,
521
                                   vp9_reader *r) {
522 523 524
  int i, j, k, l, m;

  if (vp9_read_bit(r))
525
    for (i = 0; i < PLANE_TYPES; ++i)
526 527 528 529 530
      for (j = 0; j < REF_TYPES; ++j)
        for (k = 0; k < COEF_BANDS; ++k)
          for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l)
            for (m = 0; m < UNCONSTRAINED_NODES; ++m)
              vp9_diff_update_prob(r, &coef_probs[i][j][k][l][m]);
531
}
532

533
static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode,
534
                            vp9_reader *r) {
Yaowu Xu's avatar
Yaowu Xu committed
535 536 537 538
    const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
    TX_SIZE tx_size;
    for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
      read_coef_probs_common(fc->coef_probs[tx_size], r);
539 540
}

541 542
static void setup_segmentation(struct segmentation *seg,
                               struct vp9_read_bit_buffer *rb) {
543 544
  int i, j;

545 546
  seg->update_map = 0;
  seg->update_data = 0;
547

548 549
  seg->enabled = vp9_rb_read_bit(rb);
  if (!seg->enabled)
550 551 552
    return;

  // Segmentation map update
553 554
  seg->update_map = vp9_rb_read_bit(rb);
  if (seg->update_map) {
Paul Wilkins's avatar
Paul Wilkins committed
555
    for (i = 0; i < SEG_TREE_PROBS; i++)
556 557
      seg->tree_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                               : MAX_PROB;
558

559 560
    seg->temporal_update = vp9_rb_read_bit(rb);
    if (seg->temporal_update) {
561
      for (i = 0; i < PREDICTION_PROBS; i++)
562 563
        seg->pred_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                                 : MAX_PROB;
564 565
    } else {
      for (i = 0; i < PREDICTION_PROBS; i++)
566
        seg->pred_probs[i] = MAX_PROB;
567
    }
568
  }
569

570
  // Segmentation data update
571 572 573
  seg->update_data = vp9_rb_read_bit(rb);
  if (seg->update_data) {
    seg->abs_delta = vp9_rb_read_bit(rb);
574

575
    vp9_clearall_segfeatures(seg);
576

Paul Wilkins's avatar
Paul Wilkins committed
577
    for (i = 0; i < MAX_SEGMENTS; i++) {
578 579
      for (j = 0; j < SEG_LVL_MAX; j++) {
        int data = 0;
580
        const int feature_enabled = vp9_rb_read_bit(rb);
581
        if (feature_enabled) {
582
          vp9_enable_segfeature(seg, i, j);
583
          data = decode_unsigned_max(rb, vp9_seg_feature_data_max(j));
584
          if (vp9_is_segfeature_signed(j))
585
            data = vp9_rb_read_bit(rb) ? -data : data;
586
        }
587
        vp9_set_segdata(seg, i, j, data);
588 589 590 591 592
      }
    }
  }
}

593 594 595 596
static void setup_loopfilter(struct loopfilter *lf,
                             struct vp9_read_bit_buffer *rb) {
  lf->filter_level = vp9_rb_read_literal(rb, 6);
  lf->sharpness_level = vp9_rb_read_literal(rb, 3);
597 598 599

  // Read in loop filter deltas applied at the MB level based on mode or ref
  // frame.
600
  lf->mode_ref_delta_update = 0;
601

602 603 604 605
  lf->mode_ref_delta_enabled = vp9_rb_read_bit(rb);
  if (lf->mode_ref_delta_enabled) {
    lf->mode_ref_delta_update = vp9_rb_read_bit(rb);
    if (lf->mode_ref_delta_update) {
606 607
      int i;

608 609
      for (i = 0; i < MAX_REF_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
610
          lf->ref_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
611

612 613
      for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
614
          lf->mode_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
615 616 617 618
    }
  }
}

619 620
static INLINE int read_delta_q(struct vp9_read_bit_buffer *rb) {
  return vp9_rb_read_bit(rb) ? vp9_rb_read_signed_literal(rb, 4) : 0;
621
}
622

623 624
static void setup_quantization(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                               struct vp9_read_bit_buffer *rb) {
625
  cm->base_qindex = vp9_rb_read_literal(rb, QINDEX_BITS);
626 627 628 629
  cm->y_dc_delta_q = read_delta_q(rb);
  cm->uv_dc_delta_q = read_delta_q(rb);
  cm->uv_ac_delta_q = read_delta_q(rb);
  cm->dequant_bit_depth = cm->bit_depth;
630 631 632 633
  xd->lossless = cm->base_qindex == 0 &&
                 cm->y_dc_delta_q == 0 &&
                 cm->uv_dc_delta_q == 0 &&
                 cm->uv_ac_delta_q == 0;
634

635 636 637
#if CONFIG_VP9_HIGHBITDEPTH
  xd->bd = (int)cm->bit_depth;
#endif
638 639
}

640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659 660 661 662 663 664 665 666
static void setup_segmentation_dequant(VP9_COMMON *const cm) {
  // Build y/uv dequant values based on segmentation.
  if (cm->seg.enabled) {
    int i;
    for (i = 0; i < MAX_SEGMENTS; ++i) {
      const int qindex = vp9_get_qindex(&cm->seg, i, cm->base_qindex);
      cm->y_dequant[i][0] = vp9_dc_quant(qindex, cm->y_dc_delta_q,
                                         cm->bit_depth);
      cm->y_dequant[i][1] = vp9_ac_quant(qindex, 0, cm->bit_depth);
      cm->uv_dequant[i][0] = vp9_dc_quant(qindex, cm->uv_dc_delta_q,
                                          cm->bit_depth);
      cm->uv_dequant[i][1] = vp9_ac_quant(qindex, cm->uv_ac_delta_q,
                                          cm->bit_depth);
    }
  } else {
    const int qindex = cm->base_qindex;
    // When segmentation is disabled, only the first value is used.  The
    // remaining are don't cares.
    cm->y_dequant[0][0] = vp9_dc_quant(qindex, cm->y_dc_delta_q, cm->bit_depth);
    cm->y_dequant[0][1] = vp9_ac_quant(qindex, 0, cm->bit_depth);
    cm->uv_dequant[0][0] = vp9_dc_quant(qindex, cm->uv_dc_delta_q,
                                        cm->bit_depth);
    cm->uv_dequant[0][1] = vp9_ac_quant(qindex, cm->uv_ac_delta_q,
                                        cm->bit_depth);
  }
}

667 668 669 670 671
static INTERP_FILTER read_interp_filter(struct vp9_read_bit_buffer *rb) {
  const INTERP_FILTER literal_to_filter[] = { EIGHTTAP_SMOOTH,
                                              EIGHTTAP,
                                              EIGHTTAP_SHARP,
                                              BILINEAR };
672
  return vp9_rb_read_bit(rb) ? SWITCHABLE
673
                             : literal_to_filter[vp9_rb_read_literal(rb, 2)];
674 675
}

676 677
void vp9_read_frame_size(struct vp9_read_bit_buffer *rb,
                         int *width, int *height) {
hkuang's avatar
hkuang committed
678 679
  *width = vp9_rb_read_literal(rb, 16) + 1;
  *height = vp9_rb_read_literal(rb, 16) + 1;
680 681
}

682
static void setup_display_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
683 684 685
  cm->display_width = cm->width;
  cm->display_height = cm->height;
  if (vp9_rb_read_bit(rb))
686
    vp9_read_frame_size(rb, &cm->display_width, &cm->display_height);
687
}
688

689 690 691 692 693 694 695 696
static void resize_mv_buffer(VP9_COMMON *cm) {
  vpx_free(cm->cur_frame->mvs);
  cm->cur_frame->mi_rows = cm->mi_rows;
  cm->cur_frame->mi_cols = cm->mi_cols;
  cm->cur_frame->mvs = (MV_REF *)vpx_calloc(cm->mi_rows * cm->mi_cols,
                                            sizeof(*cm->cur_frame->mvs));
}

Adrian Grange's avatar
Adrian Grange committed
697
static void resize_context_buffers(VP9_COMMON *cm, int width, int height) {
698 699 700 701 702
#if CONFIG_SIZE_LIMIT
  if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Width and height beyond allowed size.");
#endif
703
  if (cm->width != width || cm->height != height) {
704
    const int new_mi_rows =
705
        ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
706
    const int new_mi_cols =
707 708 709 710 711
        ALIGN_POWER_OF_TWO(width,  MI_SIZE_LOG2) >> MI_SIZE_LOG2;

    // Allocations in vp9_alloc_context_buffers() depend on individual
    // dimensions as well as the overall size.
    if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Adrian Grange's avatar
Adrian Grange committed
712
      if (vp9_alloc_context_buffers(cm, width, height))
713
        vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
714
                           "Failed to allocate context buffers");
Adrian Grange's avatar
Adrian Grange committed
715 716
    } else {
      vp9_set_mb_mi(cm, width, height);
717
    }
Adrian Grange's avatar
Adrian Grange committed
718
    vp9_init_context_buffers(cm);
719 720
    cm->width = width;
    cm->height = height;
721
  }
722 723 724 725
  if (cm->cur_frame->mvs == NULL || cm->mi_rows > cm->cur_frame->mi_rows ||
      cm->mi_cols > cm->cur_frame->mi_cols) {
    resize_mv_buffer(cm);
  }
Adrian Grange's avatar
Adrian Grange committed
726 727 728 729
}

static void setup_frame_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
  int width, height;
730
  BufferPool *const pool = cm->buffer_pool;
Adrian Grange's avatar
Adrian Grange committed
731 732 733
  vp9_read_frame_size(rb, &width, &height);
  resize_context_buffers(cm, width, height);
  setup_display_size(cm, rb);
734

735
  lock_buffer_pool(pool);
736 737
  if (vp9_realloc_frame_buffer(
          get_frame_new_buffer(cm), cm->width, cm->height,
738 739 740 741 742
          cm->subsampling_x, cm->subsampling_y,
#if CONFIG_VP9_HIGHBITDEPTH
          cm->use_highbitdepth,
#endif
          VP9_DEC_BORDER_IN_PIXELS,
743
          cm->byte_alignment,
744 745 746
          &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
          pool->cb_priv)) {
    unlock_buffer_pool(pool);