vp9_decodeframe.c 71.1 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5 6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9 10
 */

11
#include <assert.h>
12
#include <stdlib.h>  // qsort()
John Koleszar's avatar
John Koleszar committed
13

14
#include "./vp9_rtcd.h"
15 16
#include "./vpx_scale_rtcd.h"

17
#include "vpx_mem/vpx_mem.h"
18
#include "vpx_ports/mem_ops.h"
19 20
#include "vpx_scale/vpx_scale.h"

Dmitry Kovalev's avatar
Dmitry Kovalev committed
21
#include "vp9/common/vp9_alloccommon.h"
Ronald S. Bultje's avatar
Ronald S. Bultje committed
22
#include "vp9/common/vp9_common.h"
Yaowu Xu's avatar
Yaowu Xu committed
23
#include "vp9/common/vp9_entropy.h"
24
#include "vp9/common/vp9_entropymode.h"
25
#include "vp9/common/vp9_idct.h"
26
#include "vp9/common/vp9_loopfilter_thread.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
27
#include "vp9/common/vp9_pred_common.h"
28
#include "vp9/common/vp9_quant_common.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
29 30
#include "vp9/common/vp9_reconintra.h"
#include "vp9/common/vp9_reconinter.h"
31
#include "vp9/common/vp9_seg_common.h"
hkuang's avatar
hkuang committed
32
#include "vp9/common/vp9_thread.h"
33
#include "vp9/common/vp9_tile_common.h"
34

Yaowu Xu's avatar
Yaowu Xu committed
35
#include "vp9/decoder/vp9_decodeframe.h"
36 37
#include "vp9/decoder/vp9_detokenize.h"
#include "vp9/decoder/vp9_decodemv.h"
38
#include "vp9/decoder/vp9_decoder.h"
39
#include "vp9/decoder/vp9_dsubexp.h"
40
#include "vp9/decoder/vp9_read_bit_buffer.h"
41
#include "vp9/decoder/vp9_reader.h"
42

43 44
#define MAX_VP9_HEADER_SIZE 80

45
static int is_compound_reference_allowed(const VP9_COMMON *cm) {
46
  int i;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
47
  for (i = 1; i < REFS_PER_FRAME; ++i)
48
    if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1])
49 50 51 52 53
      return 1;

  return 0;
}

54
static void setup_compound_reference_mode(VP9_COMMON *cm) {
55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71
  if (cm->ref_frame_sign_bias[LAST_FRAME] ==
          cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
    cm->comp_fixed_ref = ALTREF_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = GOLDEN_FRAME;
  } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
                 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
    cm->comp_fixed_ref = GOLDEN_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  } else {
    cm->comp_fixed_ref = LAST_FRAME;
    cm->comp_var_ref[0] = GOLDEN_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  }
}

72
static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
Johann's avatar
Johann committed
73
  return len != 0 && len <= (size_t)(end - start);
74 75
}

76 77 78 79 80
static int decode_unsigned_max(struct vp9_read_bit_buffer *rb, int max) {
  const int data = vp9_rb_read_literal(rb, get_unsigned_bits(max));
  return data > max ? max : data;
}

81 82 83 84 85
static TX_MODE read_tx_mode(vp9_reader *r) {
  TX_MODE tx_mode = vp9_read_literal(r, 2);
  if (tx_mode == ALLOW_32X32)
    tx_mode += vp9_read_bit(r);
  return tx_mode;
86 87
}

88
static void read_tx_mode_probs(struct tx_probs *tx_probs, vp9_reader *r) {
89 90 91
  int i, j;

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
92
    for (j = 0; j < TX_SIZES - 3; ++j)
93
      vp9_diff_update_prob(r, &tx_probs->p8x8[i][j]);
94 95

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
96
    for (j = 0; j < TX_SIZES - 2; ++j)
97
      vp9_diff_update_prob(r, &tx_probs->p16x16[i][j]);
98 99

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
100
    for (j = 0; j < TX_SIZES - 1; ++j)
101
      vp9_diff_update_prob(r, &tx_probs->p32x32[i][j]);
John Koleszar's avatar
John Koleszar committed
102 103
}

104 105
static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
106
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
107 108 109 110 111 112 113 114 115 116 117
    for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
      vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
}

static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
    for (j = 0; j < INTER_MODES - 1; ++j)
      vp9_diff_update_prob(r, &fc->inter_mode_probs[i][j]);
}

118 119
static REFERENCE_MODE read_frame_reference_mode(const VP9_COMMON *cm,
                                                vp9_reader *r) {
120
  if (is_compound_reference_allowed(cm)) {
121 122 123
    return vp9_read_bit(r) ? (vp9_read_bit(r) ? REFERENCE_MODE_SELECT
                                              : COMPOUND_REFERENCE)
                           : SINGLE_REFERENCE;
124 125 126
  } else {
    return SINGLE_REFERENCE;
  }
127 128
}

129
static void read_frame_reference_mode_probs(VP9_COMMON *cm, vp9_reader *r) {
130
  FRAME_CONTEXT *const fc = cm->fc;
131
  int i;
132

133
  if (cm->reference_mode == REFERENCE_MODE_SELECT)
134 135
    for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
      vp9_diff_update_prob(r, &fc->comp_inter_prob[i]);
136

137
  if (cm->reference_mode != COMPOUND_REFERENCE)
138 139 140
    for (i = 0; i < REF_CONTEXTS; ++i) {
      vp9_diff_update_prob(r, &fc->single_ref_prob[i][0]);
      vp9_diff_update_prob(r, &fc->single_ref_prob[i][1]);
141 142
    }

143
  if (cm->reference_mode != SINGLE_REFERENCE)
144 145
    for (i = 0; i < REF_CONTEXTS; ++i)
      vp9_diff_update_prob(r, &fc->comp_ref_prob[i]);
146 147
}

148 149 150
static void update_mv_probs(vp9_prob *p, int n, vp9_reader *r) {
  int i;
  for (i = 0; i < n; ++i)
151
    if (vp9_read(r, MV_UPDATE_PROB))
Dmitry Kovalev's avatar
Dmitry Kovalev committed
152
      p[i] = (vp9_read_literal(r, 7) << 1) | 1;
153 154
}

155 156
static void read_mv_probs(nmv_context *ctx, int allow_hp, vp9_reader *r) {
  int i, j;
157

158
  update_mv_probs(ctx->joints, MV_JOINTS - 1, r);
159 160

  for (i = 0; i < 2; ++i) {
161 162 163 164 165
    nmv_component *const comp_ctx = &ctx->comps[i];
    update_mv_probs(&comp_ctx->sign, 1, r);
    update_mv_probs(comp_ctx->classes, MV_CLASSES - 1, r);
    update_mv_probs(comp_ctx->class0, CLASS0_SIZE - 1, r);
    update_mv_probs(comp_ctx->bits, MV_OFFSET_BITS, r);
166 167 168
  }

  for (i = 0; i < 2; ++i) {
169
    nmv_component *const comp_ctx = &ctx->comps[i];
170
    for (j = 0; j < CLASS0_SIZE; ++j)
Dmitry Kovalev's avatar
Dmitry Kovalev committed
171
      update_mv_probs(comp_ctx->class0_fp[j], MV_FP_SIZE - 1, r);
172
    update_mv_probs(comp_ctx->fp, 3, r);
173 174 175 176
  }

  if (allow_hp) {
    for (i = 0; i < 2; ++i) {
177 178 179
      nmv_component *const comp_ctx = &ctx->comps[i];
      update_mv_probs(&comp_ctx->class0_hp, 1, r);
      update_mv_probs(&comp_ctx->hp, 1, r);
180 181 182 183
    }
  }
}

184
static void setup_plane_dequants(VP9_COMMON *cm, MACROBLOCKD *xd, int q_index) {
John Koleszar's avatar
John Koleszar committed
185
  int i;
186
  xd->plane[0].dequant = cm->y_dequant[q_index];
187

188
  for (i = 1; i < MAX_MB_PLANE; i++)
189
    xd->plane[i].dequant = cm->uv_dequant[q_index];
John Koleszar's avatar
John Koleszar committed
190 191
}

192
static void inverse_transform_block(MACROBLOCKD* xd, int plane, int block,
193 194
                                    TX_SIZE tx_size, uint8_t *dst, int stride,
                                    int eob) {
195
  struct macroblockd_plane *const pd = &xd->plane[plane];
196
  if (eob > 0) {
197
    TX_TYPE tx_type = DCT_DCT;
198
    tran_low_t *const dqcoeff = BLOCK_OFFSET(pd->dqcoeff, block);
199 200 201 202
#if CONFIG_VP9_HIGHBITDEPTH
    if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
      if (xd->lossless) {
        tx_type = DCT_DCT;
203
        vp9_highbd_iwht4x4_add(dqcoeff, dst, stride, eob, xd->bd);
204 205 206 207 208
      } else {
        const PLANE_TYPE plane_type = pd->plane_type;
        switch (tx_size) {
          case TX_4X4:
            tx_type = get_tx_type_4x4(plane_type, xd, block);
209
            vp9_highbd_iht4x4_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
210 211 212
            break;
          case TX_8X8:
            tx_type = get_tx_type(plane_type, xd);
213
            vp9_highbd_iht8x8_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
214 215 216
            break;
          case TX_16X16:
            tx_type = get_tx_type(plane_type, xd);
217
            vp9_highbd_iht16x16_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
218 219 220
            break;
          case TX_32X32:
            tx_type = DCT_DCT;
221
            vp9_highbd_idct32x32_add(dqcoeff, dst, stride, eob, xd->bd);
222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256
            break;
          default:
            assert(0 && "Invalid transform size");
        }
      }
    } else {
      if (xd->lossless) {
        tx_type = DCT_DCT;
        vp9_iwht4x4_add(dqcoeff, dst, stride, eob);
      } else {
        const PLANE_TYPE plane_type = pd->plane_type;
        switch (tx_size) {
          case TX_4X4:
            tx_type = get_tx_type_4x4(plane_type, xd, block);
            vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_8X8:
            tx_type = get_tx_type(plane_type, xd);
            vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_16X16:
            tx_type = get_tx_type(plane_type, xd);
            vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_32X32:
            tx_type = DCT_DCT;
            vp9_idct32x32_add(dqcoeff, dst, stride, eob);
            break;
          default:
            assert(0 && "Invalid transform size");
            return;
        }
      }
    }
#else
257 258 259 260 261 262 263 264
    if (xd->lossless) {
      tx_type = DCT_DCT;
      vp9_iwht4x4_add(dqcoeff, dst, stride, eob);
    } else {
      const PLANE_TYPE plane_type = pd->plane_type;
      switch (tx_size) {
        case TX_4X4:
          tx_type = get_tx_type_4x4(plane_type, xd, block);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
265
          vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob);
266 267 268 269 270 271 272 273 274 275 276 277 278 279 280
          break;
        case TX_8X8:
          tx_type = get_tx_type(plane_type, xd);
          vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob);
          break;
        case TX_16X16:
          tx_type = get_tx_type(plane_type, xd);
          vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob);
          break;
        case TX_32X32:
          tx_type = DCT_DCT;
          vp9_idct32x32_add(dqcoeff, dst, stride, eob);
          break;
        default:
          assert(0 && "Invalid transform size");
281
          return;
282
      }
283
    }
284
#endif  // CONFIG_VP9_HIGHBITDEPTH
285 286

    if (eob == 1) {
287
      vpx_memset(dqcoeff, 0, 2 * sizeof(dqcoeff[0]));
288
    } else {
289
      if (tx_type == DCT_DCT && tx_size <= TX_16X16 && eob <= 10)
290
        vpx_memset(dqcoeff, 0, 4 * (4 << tx_size) * sizeof(dqcoeff[0]));
291
      else if (tx_size == TX_32X32 && eob <= 34)
292
        vpx_memset(dqcoeff, 0, 256 * sizeof(dqcoeff[0]));
293
      else
294
        vpx_memset(dqcoeff, 0, (16 << (tx_size << 1)) * sizeof(dqcoeff[0]));
295
    }
296 297 298
  }
}

299 300 301 302 303 304 305 306 307
struct intra_args {
  VP9_COMMON *cm;
  MACROBLOCKD *xd;
  vp9_reader *r;
};

static void predict_and_reconstruct_intra_block(int plane, int block,
                                                BLOCK_SIZE plane_bsize,
                                                TX_SIZE tx_size, void *arg) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
308
  struct intra_args *const args = (struct intra_args *)arg;
309 310
  VP9_COMMON *const cm = args->cm;
  MACROBLOCKD *const xd = args->xd;
311
  struct macroblockd_plane *const pd = &xd->plane[plane];
hkuang's avatar
hkuang committed
312
  MODE_INFO *const mi = xd->mi[0].src_mi;
313 314
  const PREDICTION_MODE mode = (plane == 0) ? get_y_mode(mi, block)
                                            : mi->mbmi.uv_mode;
315 316 317 318
  int x, y;
  uint8_t *dst;
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y);
  dst = &pd->dst.buf[4 * y * pd->dst.stride + 4 * x];
319

320
  vp9_predict_intra_block(xd, block >> (tx_size << 1),
321
                          b_width_log2_lookup[plane_bsize], tx_size, mode,
322 323
                          dst, pd->dst.stride, dst, pd->dst.stride,
                          x, y, plane);
324

325
  if (!mi->mbmi.skip) {
326 327 328 329 330
    const int eob = vp9_decode_block_tokens(cm, xd, plane, block,
                                            plane_bsize, x, y, tx_size,
                                            args->r);
    inverse_transform_block(xd, plane, block, tx_size, dst, pd->dst.stride,
                            eob);
331
  }
332 333
}

334 335 336 337 338 339 340 341 342 343
struct inter_args {
  VP9_COMMON *cm;
  MACROBLOCKD *xd;
  vp9_reader *r;
  int *eobtotal;
};

static void reconstruct_inter_block(int plane, int block,
                                    BLOCK_SIZE plane_bsize,
                                    TX_SIZE tx_size, void *arg) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
344
  struct inter_args *args = (struct inter_args *)arg;
345 346
  VP9_COMMON *const cm = args->cm;
  MACROBLOCKD *const xd = args->xd;
347
  struct macroblockd_plane *const pd = &xd->plane[plane];
348
  int x, y, eob;
349
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y);
350 351
  eob = vp9_decode_block_tokens(cm, xd, plane, block, plane_bsize, x, y,
                                tx_size, args->r);
352 353
  inverse_transform_block(xd, plane, block, tx_size,
                          &pd->dst.buf[4 * y * pd->dst.stride + 4 * x],
354 355
                          pd->dst.stride, eob);
  *args->eobtotal += eob;
356 357
}

358 359 360
static MB_MODE_INFO *set_offsets(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                                 const TileInfo *const tile,
                                 BLOCK_SIZE bsize, int mi_row, int mi_col) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
361
  const int bw = num_8x8_blocks_wide_lookup[bsize];
362 363 364
  const int bh = num_8x8_blocks_high_lookup[bsize];
  const int x_mis = MIN(bw, cm->mi_cols - mi_col);
  const int y_mis = MIN(bh, cm->mi_rows - mi_row);
365
  const int offset = mi_row * cm->mi_stride + mi_col;
366
  int x, y;
367

hkuang's avatar
hkuang committed
368 369 370 371
  xd->mi = cm->mi + offset;
  xd->mi[0].src_mi = &xd->mi[0];  // Point to self.
  xd->mi[0].mbmi.sb_type = bsize;

372
  for (y = 0; y < y_mis; ++y)
hkuang's avatar
hkuang committed
373 374 375
    for (x = !y; x < x_mis; ++x) {
      xd->mi[y * cm->mi_stride + x].src_mi = &xd->mi[0];
    }
376

377
  set_skip_context(xd, mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
378

379 380
  // Distance of Mb to the various image edges. These are specified to 8th pel
  // as they are always compared to values that are in 1/8th pel units
James Zern's avatar
James Zern committed
381
  set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
382

383
  vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col);
hkuang's avatar
hkuang committed
384
  return &xd->mi[0].mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
385
}
John Koleszar's avatar
John Koleszar committed
386

387
static void decode_block(VP9Decoder *const pbi, MACROBLOCKD *const xd,
388 389 390
                         const TileInfo *const tile,
                         int mi_row, int mi_col,
                         vp9_reader *r, BLOCK_SIZE bsize) {
391
  VP9_COMMON *const cm = &pbi->common;
392
  const int less8x8 = bsize < BLOCK_8X8;
393
  MB_MODE_INFO *mbmi = set_offsets(cm, xd, tile, bsize, mi_row, mi_col);
394
  vp9_read_mode_info(pbi, xd, tile, mi_row, mi_col, r);
395

396
  if (less8x8)
397
    bsize = BLOCK_8X8;
398

399
  if (mbmi->skip) {
400
    reset_skip_context(xd, bsize);
401
  } else {
402 403 404 405
    if (cm->seg.enabled)
      setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
                                                  cm->base_qindex));
  }
406

407
  if (!is_inter_block(mbmi)) {
408
    struct intra_args arg = { cm, xd, r };
409 410
    vp9_foreach_transformed_block(xd, bsize,
                                  predict_and_reconstruct_intra_block, &arg);
411 412
  } else {
    // Prediction
413
    vp9_dec_build_inter_predictors_sb(pbi, xd, mi_row, mi_col, bsize);
414

415
    // Reconstruction
416
    if (!mbmi->skip) {
417
      int eobtotal = 0;
418
      struct inter_args arg = { cm, xd, r, &eobtotal };
419
      vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
420
      if (!less8x8 && eobtotal == 0)
421
        mbmi->skip = 1;  // skip loopfilter
422
    }
423
  }
424

425
  xd->corrupted |= vp9_reader_has_error(r);
426 427
}

428 429
static PARTITION_TYPE read_partition(VP9_COMMON *cm, MACROBLOCKD *xd, int hbs,
                                     int mi_row, int mi_col, BLOCK_SIZE bsize,
430
                                     vp9_reader *r) {
431
  const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
432 433 434 435
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
  PARTITION_TYPE p;
436 437

  if (has_rows && has_cols)
Dmitry Kovalev's avatar
Dmitry Kovalev committed
438
    p = (PARTITION_TYPE)vp9_read_tree(r, vp9_partition_tree, probs);
439
  else if (!has_rows && has_cols)
440
    p = vp9_read(r, probs[1]) ? PARTITION_SPLIT : PARTITION_HORZ;
441
  else if (has_rows && !has_cols)
442
    p = vp9_read(r, probs[2]) ? PARTITION_SPLIT : PARTITION_VERT;
443
  else
444 445 446 447 448 449
    p = PARTITION_SPLIT;

  if (!cm->frame_parallel_decoding_mode)
    ++cm->counts.partition[ctx][p];

  return p;
450 451
}

452
static void decode_partition(VP9Decoder *const pbi, MACROBLOCKD *const xd,
453 454 455
                             const TileInfo *const tile,
                             int mi_row, int mi_col,
                             vp9_reader* r, BLOCK_SIZE bsize) {
456
  VP9_COMMON *const cm = &pbi->common;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
457
  const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
458
  PARTITION_TYPE partition;
459
  BLOCK_SIZE subsize, uv_subsize;
460

461
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
462 463
    return;

464
  partition = read_partition(cm, xd, hbs, mi_row, mi_col, bsize, r);
465
  subsize = get_subsize(bsize, partition);
466 467
  uv_subsize = ss_size_lookup[subsize][cm->subsampling_x][cm->subsampling_y];
  if (subsize >= BLOCK_8X8 && uv_subsize == BLOCK_INVALID)
468 469
    vpx_internal_error(xd->error_info,
                       VPX_CODEC_CORRUPT_FRAME, "Invalid block size.");
470
  if (subsize < BLOCK_8X8) {
471
    decode_block(pbi, xd, tile, mi_row, mi_col, r, subsize);
472 473 474
  } else {
    switch (partition) {
      case PARTITION_NONE:
475
        decode_block(pbi, xd, tile, mi_row, mi_col, r, subsize);
476 477
        break;
      case PARTITION_HORZ:
478
        decode_block(pbi, xd, tile, mi_row, mi_col, r, subsize);
479
        if (mi_row + hbs < cm->mi_rows)
480
          decode_block(pbi, xd, tile, mi_row + hbs, mi_col, r, subsize);
481 482
        break;
      case PARTITION_VERT:
483
        decode_block(pbi, xd, tile, mi_row, mi_col, r, subsize);
484
        if (mi_col + hbs < cm->mi_cols)
485
          decode_block(pbi, xd, tile, mi_row, mi_col + hbs, r, subsize);
486 487
        break;
      case PARTITION_SPLIT:
488 489 490 491
        decode_partition(pbi, xd, tile, mi_row,       mi_col,       r, subsize);
        decode_partition(pbi, xd, tile, mi_row,       mi_col + hbs, r, subsize);
        decode_partition(pbi, xd, tile, mi_row + hbs, mi_col,       r, subsize);
        decode_partition(pbi, xd, tile, mi_row + hbs, mi_col + hbs, r, subsize);
492 493
        break;
      default:
James Zern's avatar
James Zern committed
494
        assert(0 && "Invalid partition type");
495
    }
496
  }
497

498
  // update partition context
499
  if (bsize >= BLOCK_8X8 &&
500
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
501
    update_partition_context(xd, mi_row, mi_col, subsize, bsize);
502 503
}

504 505 506 507
static void setup_token_decoder(const uint8_t *data,
                                const uint8_t *data_end,
                                size_t read_size,
                                struct vpx_internal_error_info *error_info,
508 509 510
                                vp9_reader *r,
                                vpx_decrypt_cb decrypt_cb,
                                void *decrypt_state) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
511 512 513
  // Validate the calculated partition length. If the buffer
  // described by the partition can't be fully read, then restrict
  // it to the portion that can be (for EC mode) or throw an error.
514
  if (!read_is_valid(data, read_size, data_end))
515
    vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
516
                       "Truncated packet or corrupt tile length");
John Koleszar's avatar
John Koleszar committed
517

518
  if (vp9_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
519
    vpx_internal_error(error_info, VPX_CODEC_MEM_ERROR,
John Koleszar's avatar
John Koleszar committed
520
                       "Failed to allocate bool decoder %d", 1);
John Koleszar's avatar
John Koleszar committed
521 522
}

523
static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs,
524
                                   vp9_reader *r) {
525 526 527
  int i, j, k, l, m;

  if (vp9_read_bit(r))
528
    for (i = 0; i < PLANE_TYPES; ++i)
529 530 531 532 533
      for (j = 0; j < REF_TYPES; ++j)
        for (k = 0; k < COEF_BANDS; ++k)
          for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l)
            for (m = 0; m < UNCONSTRAINED_NODES; ++m)
              vp9_diff_update_prob(r, &coef_probs[i][j][k][l][m]);
534
}
535

536
static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode,
537
                            vp9_reader *r) {
Yaowu Xu's avatar
Yaowu Xu committed
538 539 540 541
    const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
    TX_SIZE tx_size;
    for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
      read_coef_probs_common(fc->coef_probs[tx_size], r);
542 543
}

544 545
static void setup_segmentation(struct segmentation *seg,
                               struct vp9_read_bit_buffer *rb) {
546 547
  int i, j;

548 549
  seg->update_map = 0;
  seg->update_data = 0;
550

551 552
  seg->enabled = vp9_rb_read_bit(rb);
  if (!seg->enabled)
553 554 555
    return;

  // Segmentation map update
556 557
  seg->update_map = vp9_rb_read_bit(rb);
  if (seg->update_map) {
Paul Wilkins's avatar
Paul Wilkins committed
558
    for (i = 0; i < SEG_TREE_PROBS; i++)
559 560
      seg->tree_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                               : MAX_PROB;
561

562 563
    seg->temporal_update = vp9_rb_read_bit(rb);
    if (seg->temporal_update) {
564
      for (i = 0; i < PREDICTION_PROBS; i++)
565 566
        seg->pred_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                                 : MAX_PROB;
567 568
    } else {
      for (i = 0; i < PREDICTION_PROBS; i++)
569
        seg->pred_probs[i] = MAX_PROB;
570
    }
571
  }
572

573
  // Segmentation data update
574 575 576
  seg->update_data = vp9_rb_read_bit(rb);
  if (seg->update_data) {
    seg->abs_delta = vp9_rb_read_bit(rb);
577

578
    vp9_clearall_segfeatures(seg);
579

Paul Wilkins's avatar
Paul Wilkins committed
580
    for (i = 0; i < MAX_SEGMENTS; i++) {
581 582
      for (j = 0; j < SEG_LVL_MAX; j++) {
        int data = 0;
583
        const int feature_enabled = vp9_rb_read_bit(rb);
584
        if (feature_enabled) {
585
          vp9_enable_segfeature(seg, i, j);
586
          data = decode_unsigned_max(rb, vp9_seg_feature_data_max(j));
587
          if (vp9_is_segfeature_signed(j))
588
            data = vp9_rb_read_bit(rb) ? -data : data;
589
        }
590
        vp9_set_segdata(seg, i, j, data);
591 592 593 594 595
      }
    }
  }
}

596 597 598 599
static void setup_loopfilter(struct loopfilter *lf,
                             struct vp9_read_bit_buffer *rb) {
  lf->filter_level = vp9_rb_read_literal(rb, 6);
  lf->sharpness_level = vp9_rb_read_literal(rb, 3);
600 601 602

  // Read in loop filter deltas applied at the MB level based on mode or ref
  // frame.
603
  lf->mode_ref_delta_update = 0;
604

605 606 607 608
  lf->mode_ref_delta_enabled = vp9_rb_read_bit(rb);
  if (lf->mode_ref_delta_enabled) {
    lf->mode_ref_delta_update = vp9_rb_read_bit(rb);
    if (lf->mode_ref_delta_update) {
609 610
      int i;

611 612
      for (i = 0; i < MAX_REF_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
613
          lf->ref_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
614

615 616
      for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
617
          lf->mode_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
618 619 620 621
    }
  }
}

622 623
static int read_delta_q(struct vp9_read_bit_buffer *rb, int *delta_q) {
  const int old = *delta_q;
624
  *delta_q = vp9_rb_read_bit(rb) ? vp9_rb_read_signed_literal(rb, 4) : 0;
625 626
  return old != *delta_q;
}
627

628 629
static void setup_quantization(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                               struct vp9_read_bit_buffer *rb) {
630
  int update = 0;
631

632 633 634 635
  cm->base_qindex = vp9_rb_read_literal(rb, QINDEX_BITS);
  update |= read_delta_q(rb, &cm->y_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_ac_delta_q);
636
  if (update || cm->bit_depth != cm->dequant_bit_depth) {
637
    vp9_init_dequantizer(cm);
638 639
    cm->dequant_bit_depth = cm->bit_depth;
  }
640 641 642 643 644

  xd->lossless = cm->base_qindex == 0 &&
                 cm->y_dc_delta_q == 0 &&
                 cm->uv_dc_delta_q == 0 &&
                 cm->uv_ac_delta_q == 0;
645 646 647
#if CONFIG_VP9_HIGHBITDEPTH
  xd->bd = (int)cm->bit_depth;
#endif
648 649
}

650 651 652 653 654
static INTERP_FILTER read_interp_filter(struct vp9_read_bit_buffer *rb) {
  const INTERP_FILTER literal_to_filter[] = { EIGHTTAP_SMOOTH,
                                              EIGHTTAP,
                                              EIGHTTAP_SHARP,
                                              BILINEAR };
655
  return vp9_rb_read_bit(rb) ? SWITCHABLE
656
                             : literal_to_filter[vp9_rb_read_literal(rb, 2)];
657 658
}

659 660
void vp9_read_frame_size(struct vp9_read_bit_buffer *rb,
                         int *width, int *height) {
hkuang's avatar
hkuang committed
661 662
  *width = vp9_rb_read_literal(rb, 16) + 1;
  *height = vp9_rb_read_literal(rb, 16) + 1;
663 664
}

665
static void setup_display_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
666 667 668
  cm->display_width = cm->width;
  cm->display_height = cm->height;
  if (vp9_rb_read_bit(rb))
669
    vp9_read_frame_size(rb, &cm->display_width, &cm->display_height);
670
}
671

672 673 674 675 676 677 678 679
static void resize_mv_buffer(VP9_COMMON *cm) {
  vpx_free(cm->cur_frame->mvs);
  cm->cur_frame->mi_rows = cm->mi_rows;
  cm->cur_frame->mi_cols = cm->mi_cols;
  cm->cur_frame->mvs = (MV_REF *)vpx_calloc(cm->mi_rows * cm->mi_cols,
                                            sizeof(*cm->cur_frame->mvs));
}

Adrian Grange's avatar
Adrian Grange committed
680
static void resize_context_buffers(VP9_COMMON *cm, int width, int height) {
681 682 683 684 685
#if CONFIG_SIZE_LIMIT
  if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Width and height beyond allowed size.");
#endif
686
  if (cm->width != width || cm->height != height) {
687
    const int new_mi_rows =
688
        ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
689
    const int new_mi_cols =
690 691 692 693 694
        ALIGN_POWER_OF_TWO(width,  MI_SIZE_LOG2) >> MI_SIZE_LOG2;

    // Allocations in vp9_alloc_context_buffers() depend on individual
    // dimensions as well as the overall size.
    if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Adrian Grange's avatar
Adrian Grange committed
695
      if (vp9_alloc_context_buffers(cm, width, height))
696
        vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
697
                           "Failed to allocate context buffers");
Adrian Grange's avatar
Adrian Grange committed
698 699
    } else {
      vp9_set_mb_mi(cm, width, height);
700
    }
Adrian Grange's avatar
Adrian Grange committed
701
    vp9_init_context_buffers(cm);
702 703
    cm->width = width;
    cm->height = height;
704
  }
705 706 707 708
  if (cm->cur_frame->mvs == NULL || cm->mi_rows > cm->cur_frame->mi_rows ||
      cm->mi_cols > cm->cur_frame->mi_cols) {
    resize_mv_buffer(cm);
  }
Adrian Grange's avatar
Adrian Grange committed
709 710 711 712
}

static void setup_frame_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
  int width, height;
713
  BufferPool *const pool = cm->buffer_pool;
Adrian Grange's avatar
Adrian Grange committed
714 715 716
  vp9_read_frame_size(rb, &width, &height);
  resize_context_buffers(cm, width, height);
  setup_display_size(cm, rb);
717

718
  lock_buffer_pool(pool);
719 720
  if (vp9_realloc_frame_buffer(
          get_frame_new_buffer(cm), cm->width, cm->height,
721 722 723 724 725
          cm->subsampling_x, cm->subsampling_y,
#if CONFIG_VP9_HIGHBITDEPTH
          cm->use_highbitdepth,
#endif
          VP9_DEC_BORDER_IN_PIXELS,
726
          cm->byte_alignment,
727 728 729
          &pool->frame_bufs[cm->new_fb_idx].raw_frame_buffer, pool->get_fb_cb,
          pool->cb_priv)) {
    unlock_buffer_pool(pool);
730 731 732
    vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
                       "Failed to allocate frame buffer");
  }
733 734 735 736 737
  unlock_buffer_pool(pool);

  pool->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
  pool->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
  pool->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
738 739 740 741 742 743 744 745
}

static INLINE int valid_ref_frame_img_fmt(vpx_bit_depth_t ref_bit_depth,
                                          int ref_xss, int ref_yss,
                                          vpx_bit_depth_t this_bit_depth,
                                          int this_xss, int this_yss) {
  return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
         ref_yss == this_yss;
746 747
}

748
static void setup_frame_size_with_refs(VP9_COMMON *cm,
749 750 751
                                       struct vp9_read_bit_buffer *rb) {
  int width, height;
  int found = 0, i;
752
  int has_valid_ref_frame = 0;
753
  BufferPool *const pool = cm->buffer_pool;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
754
  for (i = 0; i < REFS_PER_FRAME; ++i) {
755
    if (vp9_rb_read_bit(rb)) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
756 757 758
      YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
      width = buf->y_crop_width;
      height = buf->y_crop_height;
759 760 761 762 763 764
      found = 1;
      break;
    }
  }

  if (!found)
765
    vp9_read_frame_size(rb, &width, &height);
766

hkuang's avatar
hkuang committed
767
  if (width <= 0 || height <= 0)
768 769 770 771 772
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Invalid frame size");

  // Check to make sure at least one of frames that this frame references
  // has valid dimensions.
773 774
  for (i = 0; i < REFS_PER_FRAME; ++i) {
    RefBuffer *const ref_frame = &cm->frame_refs[i];
775 776
    has_valid_ref_frame |= valid_ref_frame_size(ref_frame->buf->y_crop_width,
                                                ref_frame->buf->y_crop_height,
777
                                                width, height);