vp9_decodeframe.c 57.4 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5 6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9 10
 */

11
#include <assert.h>
12
#include <stdlib.h>  // qsort()
John Koleszar's avatar
John Koleszar committed
13

14
#include "./vp9_rtcd.h"
15 16
#include "./vpx_scale_rtcd.h"

17
#include "vpx_mem/vpx_mem.h"
18
#include "vpx_ports/mem_ops.h"
19 20
#include "vpx_scale/vpx_scale.h"

Dmitry Kovalev's avatar
Dmitry Kovalev committed
21
#include "vp9/common/vp9_alloccommon.h"
Ronald S. Bultje's avatar
Ronald S. Bultje committed
22
#include "vp9/common/vp9_common.h"
Yaowu Xu's avatar
Yaowu Xu committed
23
#include "vp9/common/vp9_entropy.h"
24
#include "vp9/common/vp9_entropymode.h"
25
#include "vp9/common/vp9_idct.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
26
#include "vp9/common/vp9_pred_common.h"
27
#include "vp9/common/vp9_quant_common.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
28 29
#include "vp9/common/vp9_reconintra.h"
#include "vp9/common/vp9_reconinter.h"
30
#include "vp9/common/vp9_seg_common.h"
hkuang's avatar
hkuang committed
31
#include "vp9/common/vp9_thread.h"
32
#include "vp9/common/vp9_tile_common.h"
33

Yaowu Xu's avatar
Yaowu Xu committed
34
#include "vp9/decoder/vp9_decodeframe.h"
35 36
#include "vp9/decoder/vp9_detokenize.h"
#include "vp9/decoder/vp9_decodemv.h"
37
#include "vp9/decoder/vp9_decoder.h"
38
#include "vp9/decoder/vp9_dsubexp.h"
39
#include "vp9/decoder/vp9_read_bit_buffer.h"
40
#include "vp9/decoder/vp9_reader.h"
41

42 43
#define MAX_VP9_HEADER_SIZE 80

44
static int is_compound_reference_allowed(const VP9_COMMON *cm) {
45
  int i;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
46
  for (i = 1; i < REFS_PER_FRAME; ++i)
47
    if (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1])
48 49 50 51 52
      return 1;

  return 0;
}

53
static void setup_compound_reference_mode(VP9_COMMON *cm) {
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70
  if (cm->ref_frame_sign_bias[LAST_FRAME] ==
          cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
    cm->comp_fixed_ref = ALTREF_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = GOLDEN_FRAME;
  } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
                 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
    cm->comp_fixed_ref = GOLDEN_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  } else {
    cm->comp_fixed_ref = LAST_FRAME;
    cm->comp_var_ref[0] = GOLDEN_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  }
}

71
static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
Johann's avatar
Johann committed
72
  return len != 0 && len <= (size_t)(end - start);
73 74
}

75 76 77 78 79
static int decode_unsigned_max(struct vp9_read_bit_buffer *rb, int max) {
  const int data = vp9_rb_read_literal(rb, get_unsigned_bits(max));
  return data > max ? max : data;
}

80 81 82 83 84
static TX_MODE read_tx_mode(vp9_reader *r) {
  TX_MODE tx_mode = vp9_read_literal(r, 2);
  if (tx_mode == ALLOW_32X32)
    tx_mode += vp9_read_bit(r);
  return tx_mode;
85 86
}

87
static void read_tx_mode_probs(struct tx_probs *tx_probs, vp9_reader *r) {
88 89 90
  int i, j;

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
91
    for (j = 0; j < TX_SIZES - 3; ++j)
92
      vp9_diff_update_prob(r, &tx_probs->p8x8[i][j]);
93 94

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
95
    for (j = 0; j < TX_SIZES - 2; ++j)
96
      vp9_diff_update_prob(r, &tx_probs->p16x16[i][j]);
97 98

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
99
    for (j = 0; j < TX_SIZES - 1; ++j)
100
      vp9_diff_update_prob(r, &tx_probs->p32x32[i][j]);
John Koleszar's avatar
John Koleszar committed
101 102
}

103 104
static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
105
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
106 107 108 109 110 111 112 113 114 115 116
    for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
      vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
}

static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
    for (j = 0; j < INTER_MODES - 1; ++j)
      vp9_diff_update_prob(r, &fc->inter_mode_probs[i][j]);
}

117 118
static REFERENCE_MODE read_frame_reference_mode(const VP9_COMMON *cm,
                                                vp9_reader *r) {
119
  if (is_compound_reference_allowed(cm)) {
120 121 122
    return vp9_read_bit(r) ? (vp9_read_bit(r) ? REFERENCE_MODE_SELECT
                                              : COMPOUND_REFERENCE)
                           : SINGLE_REFERENCE;
123 124 125
  } else {
    return SINGLE_REFERENCE;
  }
126 127
}

128
static void read_frame_reference_mode_probs(VP9_COMMON *cm, vp9_reader *r) {
129
  FRAME_CONTEXT *const fc = cm->fc;
130
  int i;
131

132
  if (cm->reference_mode == REFERENCE_MODE_SELECT)
133 134
    for (i = 0; i < COMP_INTER_CONTEXTS; ++i)
      vp9_diff_update_prob(r, &fc->comp_inter_prob[i]);
135

136
  if (cm->reference_mode != COMPOUND_REFERENCE)
137 138 139
    for (i = 0; i < REF_CONTEXTS; ++i) {
      vp9_diff_update_prob(r, &fc->single_ref_prob[i][0]);
      vp9_diff_update_prob(r, &fc->single_ref_prob[i][1]);
140 141
    }

142
  if (cm->reference_mode != SINGLE_REFERENCE)
143 144
    for (i = 0; i < REF_CONTEXTS; ++i)
      vp9_diff_update_prob(r, &fc->comp_ref_prob[i]);
145 146
}

147 148 149
static void update_mv_probs(vp9_prob *p, int n, vp9_reader *r) {
  int i;
  for (i = 0; i < n; ++i)
150
    if (vp9_read(r, MV_UPDATE_PROB))
Dmitry Kovalev's avatar
Dmitry Kovalev committed
151
      p[i] = (vp9_read_literal(r, 7) << 1) | 1;
152 153
}

154 155
static void read_mv_probs(nmv_context *ctx, int allow_hp, vp9_reader *r) {
  int i, j;
156

157
  update_mv_probs(ctx->joints, MV_JOINTS - 1, r);
158 159

  for (i = 0; i < 2; ++i) {
160 161 162 163 164
    nmv_component *const comp_ctx = &ctx->comps[i];
    update_mv_probs(&comp_ctx->sign, 1, r);
    update_mv_probs(comp_ctx->classes, MV_CLASSES - 1, r);
    update_mv_probs(comp_ctx->class0, CLASS0_SIZE - 1, r);
    update_mv_probs(comp_ctx->bits, MV_OFFSET_BITS, r);
165 166 167
  }

  for (i = 0; i < 2; ++i) {
168
    nmv_component *const comp_ctx = &ctx->comps[i];
169
    for (j = 0; j < CLASS0_SIZE; ++j)
Dmitry Kovalev's avatar
Dmitry Kovalev committed
170
      update_mv_probs(comp_ctx->class0_fp[j], MV_FP_SIZE - 1, r);
171
    update_mv_probs(comp_ctx->fp, 3, r);
172 173 174 175
  }

  if (allow_hp) {
    for (i = 0; i < 2; ++i) {
176 177 178
      nmv_component *const comp_ctx = &ctx->comps[i];
      update_mv_probs(&comp_ctx->class0_hp, 1, r);
      update_mv_probs(&comp_ctx->hp, 1, r);
179 180 181 182
    }
  }
}

183
static void setup_plane_dequants(VP9_COMMON *cm, MACROBLOCKD *xd, int q_index) {
John Koleszar's avatar
John Koleszar committed
184
  int i;
185
  xd->plane[0].dequant = cm->y_dequant[q_index];
186

187
  for (i = 1; i < MAX_MB_PLANE; i++)
188
    xd->plane[i].dequant = cm->uv_dequant[q_index];
John Koleszar's avatar
John Koleszar committed
189 190
}

191
static void inverse_transform_block(MACROBLOCKD* xd, int plane, int block,
192 193
                                    TX_SIZE tx_size, uint8_t *dst, int stride,
                                    int eob) {
194
  struct macroblockd_plane *const pd = &xd->plane[plane];
195
  if (eob > 0) {
196
    TX_TYPE tx_type = DCT_DCT;
197
    tran_low_t *const dqcoeff = BLOCK_OFFSET(pd->dqcoeff, block);
198 199 200 201
#if CONFIG_VP9_HIGHBITDEPTH
    if (xd->cur_buf->flags & YV12_FLAG_HIGHBITDEPTH) {
      if (xd->lossless) {
        tx_type = DCT_DCT;
202
        vp9_highbd_iwht4x4_add(dqcoeff, dst, stride, eob, xd->bd);
203 204 205 206 207
      } else {
        const PLANE_TYPE plane_type = pd->plane_type;
        switch (tx_size) {
          case TX_4X4:
            tx_type = get_tx_type_4x4(plane_type, xd, block);
208
            vp9_highbd_iht4x4_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
209 210 211
            break;
          case TX_8X8:
            tx_type = get_tx_type(plane_type, xd);
212
            vp9_highbd_iht8x8_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
213 214 215
            break;
          case TX_16X16:
            tx_type = get_tx_type(plane_type, xd);
216
            vp9_highbd_iht16x16_add(tx_type, dqcoeff, dst, stride, eob, xd->bd);
217 218 219
            break;
          case TX_32X32:
            tx_type = DCT_DCT;
220
            vp9_highbd_idct32x32_add(dqcoeff, dst, stride, eob, xd->bd);
221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255
            break;
          default:
            assert(0 && "Invalid transform size");
        }
      }
    } else {
      if (xd->lossless) {
        tx_type = DCT_DCT;
        vp9_iwht4x4_add(dqcoeff, dst, stride, eob);
      } else {
        const PLANE_TYPE plane_type = pd->plane_type;
        switch (tx_size) {
          case TX_4X4:
            tx_type = get_tx_type_4x4(plane_type, xd, block);
            vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_8X8:
            tx_type = get_tx_type(plane_type, xd);
            vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_16X16:
            tx_type = get_tx_type(plane_type, xd);
            vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob);
            break;
          case TX_32X32:
            tx_type = DCT_DCT;
            vp9_idct32x32_add(dqcoeff, dst, stride, eob);
            break;
          default:
            assert(0 && "Invalid transform size");
            return;
        }
      }
    }
#else
256 257 258 259 260 261 262 263
    if (xd->lossless) {
      tx_type = DCT_DCT;
      vp9_iwht4x4_add(dqcoeff, dst, stride, eob);
    } else {
      const PLANE_TYPE plane_type = pd->plane_type;
      switch (tx_size) {
        case TX_4X4:
          tx_type = get_tx_type_4x4(plane_type, xd, block);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
264
          vp9_iht4x4_add(tx_type, dqcoeff, dst, stride, eob);
265 266 267 268 269 270 271 272 273 274 275 276 277 278 279
          break;
        case TX_8X8:
          tx_type = get_tx_type(plane_type, xd);
          vp9_iht8x8_add(tx_type, dqcoeff, dst, stride, eob);
          break;
        case TX_16X16:
          tx_type = get_tx_type(plane_type, xd);
          vp9_iht16x16_add(tx_type, dqcoeff, dst, stride, eob);
          break;
        case TX_32X32:
          tx_type = DCT_DCT;
          vp9_idct32x32_add(dqcoeff, dst, stride, eob);
          break;
        default:
          assert(0 && "Invalid transform size");
280
          return;
281
      }
282
    }
283
#endif  // CONFIG_VP9_HIGHBITDEPTH
284 285

    if (eob == 1) {
286
      vpx_memset(dqcoeff, 0, 2 * sizeof(dqcoeff[0]));
287
    } else {
288
      if (tx_type == DCT_DCT && tx_size <= TX_16X16 && eob <= 10)
289
        vpx_memset(dqcoeff, 0, 4 * (4 << tx_size) * sizeof(dqcoeff[0]));
290
      else if (tx_size == TX_32X32 && eob <= 34)
291
        vpx_memset(dqcoeff, 0, 256 * sizeof(dqcoeff[0]));
292
      else
293
        vpx_memset(dqcoeff, 0, (16 << (tx_size << 1)) * sizeof(dqcoeff[0]));
294
    }
295 296 297
  }
}

298 299 300 301 302 303 304 305 306
struct intra_args {
  VP9_COMMON *cm;
  MACROBLOCKD *xd;
  vp9_reader *r;
};

static void predict_and_reconstruct_intra_block(int plane, int block,
                                                BLOCK_SIZE plane_bsize,
                                                TX_SIZE tx_size, void *arg) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
307
  struct intra_args *const args = (struct intra_args *)arg;
308 309
  VP9_COMMON *const cm = args->cm;
  MACROBLOCKD *const xd = args->xd;
310
  struct macroblockd_plane *const pd = &xd->plane[plane];
hkuang's avatar
hkuang committed
311
  MODE_INFO *const mi = xd->mi[0].src_mi;
312 313
  const PREDICTION_MODE mode = (plane == 0) ? get_y_mode(mi, block)
                                            : mi->mbmi.uv_mode;
314 315 316 317
  int x, y;
  uint8_t *dst;
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y);
  dst = &pd->dst.buf[4 * y * pd->dst.stride + 4 * x];
318

319
  vp9_predict_intra_block(xd, block >> (tx_size << 1),
320
                          b_width_log2_lookup[plane_bsize], tx_size, mode,
321 322
                          dst, pd->dst.stride, dst, pd->dst.stride,
                          x, y, plane);
323

324
  if (!mi->mbmi.skip) {
325 326 327 328 329
    const int eob = vp9_decode_block_tokens(cm, xd, plane, block,
                                            plane_bsize, x, y, tx_size,
                                            args->r);
    inverse_transform_block(xd, plane, block, tx_size, dst, pd->dst.stride,
                            eob);
330
  }
331 332
}

333 334 335 336 337 338 339 340 341 342
struct inter_args {
  VP9_COMMON *cm;
  MACROBLOCKD *xd;
  vp9_reader *r;
  int *eobtotal;
};

static void reconstruct_inter_block(int plane, int block,
                                    BLOCK_SIZE plane_bsize,
                                    TX_SIZE tx_size, void *arg) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
343
  struct inter_args *args = (struct inter_args *)arg;
344 345
  VP9_COMMON *const cm = args->cm;
  MACROBLOCKD *const xd = args->xd;
346
  struct macroblockd_plane *const pd = &xd->plane[plane];
347
  int x, y, eob;
348
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &x, &y);
349 350
  eob = vp9_decode_block_tokens(cm, xd, plane, block, plane_bsize, x, y,
                                tx_size, args->r);
351 352
  inverse_transform_block(xd, plane, block, tx_size,
                          &pd->dst.buf[4 * y * pd->dst.stride + 4 * x],
353 354
                          pd->dst.stride, eob);
  *args->eobtotal += eob;
355 356
}

357 358 359
static MB_MODE_INFO *set_offsets(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                                 const TileInfo *const tile,
                                 BLOCK_SIZE bsize, int mi_row, int mi_col) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
360
  const int bw = num_8x8_blocks_wide_lookup[bsize];
361 362 363
  const int bh = num_8x8_blocks_high_lookup[bsize];
  const int x_mis = MIN(bw, cm->mi_cols - mi_col);
  const int y_mis = MIN(bh, cm->mi_rows - mi_row);
364
  const int offset = mi_row * cm->mi_stride + mi_col;
365
  int x, y;
366

hkuang's avatar
hkuang committed
367 368 369 370
  xd->mi = cm->mi + offset;
  xd->mi[0].src_mi = &xd->mi[0];  // Point to self.
  xd->mi[0].mbmi.sb_type = bsize;

371
  for (y = 0; y < y_mis; ++y)
hkuang's avatar
hkuang committed
372 373 374
    for (x = !y; x < x_mis; ++x) {
      xd->mi[y * cm->mi_stride + x].src_mi = &xd->mi[0];
    }
375

376
  set_skip_context(xd, mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
377

378 379
  // Distance of Mb to the various image edges. These are specified to 8th pel
  // as they are always compared to values that are in 1/8th pel units
James Zern's avatar
James Zern committed
380
  set_mi_row_col(xd, tile, mi_row, bh, mi_col, bw, cm->mi_rows, cm->mi_cols);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
381

382
  vp9_setup_dst_planes(xd->plane, get_frame_new_buffer(cm), mi_row, mi_col);
hkuang's avatar
hkuang committed
383
  return &xd->mi[0].mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
384
}
John Koleszar's avatar
John Koleszar committed
385

386
static void decode_block(VP9_COMMON *const cm, MACROBLOCKD *const xd,
387 388 389
                         const TileInfo *const tile,
                         int mi_row, int mi_col,
                         vp9_reader *r, BLOCK_SIZE bsize) {
390
  const int less8x8 = bsize < BLOCK_8X8;
391
  MB_MODE_INFO *mbmi = set_offsets(cm, xd, tile, bsize, mi_row, mi_col);
392
  vp9_read_mode_info(cm, xd, tile, mi_row, mi_col, r);
393

394
  if (less8x8)
395
    bsize = BLOCK_8X8;
396

397
  if (mbmi->skip) {
398
    reset_skip_context(xd, bsize);
399
  } else {
400 401 402 403
    if (cm->seg.enabled)
      setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
                                                  cm->base_qindex));
  }
404

405
  if (!is_inter_block(mbmi)) {
406
    struct intra_args arg = { cm, xd, r };
407 408
    vp9_foreach_transformed_block(xd, bsize,
                                  predict_and_reconstruct_intra_block, &arg);
409 410
  } else {
    // Prediction
411
    vp9_dec_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);
412

413
    // Reconstruction
414
    if (!mbmi->skip) {
415
      int eobtotal = 0;
416
      struct inter_args arg = { cm, xd, r, &eobtotal };
417
      vp9_foreach_transformed_block(xd, bsize, reconstruct_inter_block, &arg);
418
      if (!less8x8 && eobtotal == 0)
419
        mbmi->skip = 1;  // skip loopfilter
420
    }
421
  }
422

423
  xd->corrupted |= vp9_reader_has_error(r);
424 425
}

426 427
static PARTITION_TYPE read_partition(VP9_COMMON *cm, MACROBLOCKD *xd, int hbs,
                                     int mi_row, int mi_col, BLOCK_SIZE bsize,
428
                                     vp9_reader *r) {
429
  const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
430 431 432 433
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
  PARTITION_TYPE p;
434 435

  if (has_rows && has_cols)
Dmitry Kovalev's avatar
Dmitry Kovalev committed
436
    p = (PARTITION_TYPE)vp9_read_tree(r, vp9_partition_tree, probs);
437
  else if (!has_rows && has_cols)
438
    p = vp9_read(r, probs[1]) ? PARTITION_SPLIT : PARTITION_HORZ;
439
  else if (has_rows && !has_cols)
440
    p = vp9_read(r, probs[2]) ? PARTITION_SPLIT : PARTITION_VERT;
441
  else
442 443 444 445 446 447
    p = PARTITION_SPLIT;

  if (!cm->frame_parallel_decoding_mode)
    ++cm->counts.partition[ctx][p];

  return p;
448 449
}

450
static void decode_partition(VP9_COMMON *const cm, MACROBLOCKD *const xd,
451 452 453
                             const TileInfo *const tile,
                             int mi_row, int mi_col,
                             vp9_reader* r, BLOCK_SIZE bsize) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
454
  const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
455
  PARTITION_TYPE partition;
456
  BLOCK_SIZE subsize, uv_subsize;
457

458
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
459 460
    return;

461
  partition = read_partition(cm, xd, hbs, mi_row, mi_col, bsize, r);
462
  subsize = get_subsize(bsize, partition);
463 464
  uv_subsize = ss_size_lookup[subsize][cm->subsampling_x][cm->subsampling_y];
  if (subsize >= BLOCK_8X8 && uv_subsize == BLOCK_INVALID)
465 466
    vpx_internal_error(xd->error_info,
                       VPX_CODEC_CORRUPT_FRAME, "Invalid block size.");
467
  if (subsize < BLOCK_8X8) {
468
    decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
469 470 471
  } else {
    switch (partition) {
      case PARTITION_NONE:
472
        decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
473 474
        break;
      case PARTITION_HORZ:
475
        decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
476
        if (mi_row + hbs < cm->mi_rows)
477
          decode_block(cm, xd, tile, mi_row + hbs, mi_col, r, subsize);
478 479
        break;
      case PARTITION_VERT:
480
        decode_block(cm, xd, tile, mi_row, mi_col, r, subsize);
481
        if (mi_col + hbs < cm->mi_cols)
482
          decode_block(cm, xd, tile, mi_row, mi_col + hbs, r, subsize);
483 484
        break;
      case PARTITION_SPLIT:
485 486 487 488
        decode_partition(cm, xd, tile, mi_row,       mi_col,       r, subsize);
        decode_partition(cm, xd, tile, mi_row,       mi_col + hbs, r, subsize);
        decode_partition(cm, xd, tile, mi_row + hbs, mi_col,       r, subsize);
        decode_partition(cm, xd, tile, mi_row + hbs, mi_col + hbs, r, subsize);
489 490
        break;
      default:
James Zern's avatar
James Zern committed
491
        assert(0 && "Invalid partition type");
492
    }
493
  }
494

495
  // update partition context
496
  if (bsize >= BLOCK_8X8 &&
497
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
498
    update_partition_context(xd, mi_row, mi_col, subsize, bsize);
499 500
}

501 502 503 504
static void setup_token_decoder(const uint8_t *data,
                                const uint8_t *data_end,
                                size_t read_size,
                                struct vpx_internal_error_info *error_info,
505 506 507
                                vp9_reader *r,
                                vpx_decrypt_cb decrypt_cb,
                                void *decrypt_state) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
508 509 510
  // Validate the calculated partition length. If the buffer
  // described by the partition can't be fully read, then restrict
  // it to the portion that can be (for EC mode) or throw an error.
511
  if (!read_is_valid(data, read_size, data_end))
512
    vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
513
                       "Truncated packet or corrupt tile length");
John Koleszar's avatar
John Koleszar committed
514

515
  if (vp9_reader_init(r, data, read_size, decrypt_cb, decrypt_state))
516
    vpx_internal_error(error_info, VPX_CODEC_MEM_ERROR,
John Koleszar's avatar
John Koleszar committed
517
                       "Failed to allocate bool decoder %d", 1);
John Koleszar's avatar
John Koleszar committed
518 519
}

520
static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs,
521
                                   vp9_reader *r) {
522 523 524
  int i, j, k, l, m;

  if (vp9_read_bit(r))
525
    for (i = 0; i < PLANE_TYPES; ++i)
526 527 528 529 530
      for (j = 0; j < REF_TYPES; ++j)
        for (k = 0; k < COEF_BANDS; ++k)
          for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l)
            for (m = 0; m < UNCONSTRAINED_NODES; ++m)
              vp9_diff_update_prob(r, &coef_probs[i][j][k][l][m]);
531
}
532

533
static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode,
534
                            vp9_reader *r) {
Yaowu Xu's avatar
Yaowu Xu committed
535 536 537 538
    const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
    TX_SIZE tx_size;
    for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
      read_coef_probs_common(fc->coef_probs[tx_size], r);
539 540
}

541 542
static void setup_segmentation(struct segmentation *seg,
                               struct vp9_read_bit_buffer *rb) {
543 544
  int i, j;

545 546
  seg->update_map = 0;
  seg->update_data = 0;
547

548 549
  seg->enabled = vp9_rb_read_bit(rb);
  if (!seg->enabled)
550 551 552
    return;

  // Segmentation map update
553 554
  seg->update_map = vp9_rb_read_bit(rb);
  if (seg->update_map) {
Paul Wilkins's avatar
Paul Wilkins committed
555
    for (i = 0; i < SEG_TREE_PROBS; i++)
556 557
      seg->tree_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                               : MAX_PROB;
558

559 560
    seg->temporal_update = vp9_rb_read_bit(rb);
    if (seg->temporal_update) {
561
      for (i = 0; i < PREDICTION_PROBS; i++)
562 563
        seg->pred_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                                 : MAX_PROB;
564 565
    } else {
      for (i = 0; i < PREDICTION_PROBS; i++)
566
        seg->pred_probs[i] = MAX_PROB;
567
    }
568
  }
569

570
  // Segmentation data update
571 572 573
  seg->update_data = vp9_rb_read_bit(rb);
  if (seg->update_data) {
    seg->abs_delta = vp9_rb_read_bit(rb);
574

575
    vp9_clearall_segfeatures(seg);
576

Paul Wilkins's avatar
Paul Wilkins committed
577
    for (i = 0; i < MAX_SEGMENTS; i++) {
578 579
      for (j = 0; j < SEG_LVL_MAX; j++) {
        int data = 0;
580
        const int feature_enabled = vp9_rb_read_bit(rb);
581
        if (feature_enabled) {
582
          vp9_enable_segfeature(seg, i, j);
583
          data = decode_unsigned_max(rb, vp9_seg_feature_data_max(j));
584
          if (vp9_is_segfeature_signed(j))
585
            data = vp9_rb_read_bit(rb) ? -data : data;
586
        }
587
        vp9_set_segdata(seg, i, j, data);
588 589 590 591 592
      }
    }
  }
}

593 594 595 596
static void setup_loopfilter(struct loopfilter *lf,
                             struct vp9_read_bit_buffer *rb) {
  lf->filter_level = vp9_rb_read_literal(rb, 6);
  lf->sharpness_level = vp9_rb_read_literal(rb, 3);
597 598 599

  // Read in loop filter deltas applied at the MB level based on mode or ref
  // frame.
600
  lf->mode_ref_delta_update = 0;
601

602 603 604 605
  lf->mode_ref_delta_enabled = vp9_rb_read_bit(rb);
  if (lf->mode_ref_delta_enabled) {
    lf->mode_ref_delta_update = vp9_rb_read_bit(rb);
    if (lf->mode_ref_delta_update) {
606 607
      int i;

608 609
      for (i = 0; i < MAX_REF_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
610
          lf->ref_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
611

612 613
      for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
614
          lf->mode_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
615 616 617 618
    }
  }
}

619 620
static int read_delta_q(struct vp9_read_bit_buffer *rb, int *delta_q) {
  const int old = *delta_q;
621
  *delta_q = vp9_rb_read_bit(rb) ? vp9_rb_read_signed_literal(rb, 4) : 0;
622 623
  return old != *delta_q;
}
624

625 626
static void setup_quantization(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                               struct vp9_read_bit_buffer *rb) {
627
  int update = 0;
628

629 630 631 632
  cm->base_qindex = vp9_rb_read_literal(rb, QINDEX_BITS);
  update |= read_delta_q(rb, &cm->y_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_ac_delta_q);
633
  if (update || cm->bit_depth != cm->dequant_bit_depth) {
634
    vp9_init_dequantizer(cm);
635 636
    cm->dequant_bit_depth = cm->bit_depth;
  }
637 638 639 640 641

  xd->lossless = cm->base_qindex == 0 &&
                 cm->y_dc_delta_q == 0 &&
                 cm->uv_dc_delta_q == 0 &&
                 cm->uv_ac_delta_q == 0;
642 643 644
#if CONFIG_VP9_HIGHBITDEPTH
  xd->bd = (int)cm->bit_depth;
#endif
645 646
}

647 648 649 650 651
static INTERP_FILTER read_interp_filter(struct vp9_read_bit_buffer *rb) {
  const INTERP_FILTER literal_to_filter[] = { EIGHTTAP_SMOOTH,
                                              EIGHTTAP,
                                              EIGHTTAP_SHARP,
                                              BILINEAR };
652
  return vp9_rb_read_bit(rb) ? SWITCHABLE
653
                             : literal_to_filter[vp9_rb_read_literal(rb, 2)];
654 655
}

656 657
void vp9_read_frame_size(struct vp9_read_bit_buffer *rb,
                         int *width, int *height) {
hkuang's avatar
hkuang committed
658 659
  *width = vp9_rb_read_literal(rb, 16) + 1;
  *height = vp9_rb_read_literal(rb, 16) + 1;
660 661
}

662
static void setup_display_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
663 664 665
  cm->display_width = cm->width;
  cm->display_height = cm->height;
  if (vp9_rb_read_bit(rb))
666
    vp9_read_frame_size(rb, &cm->display_width, &cm->display_height);
667
}
668

669 670 671 672 673 674 675 676
static void resize_mv_buffer(VP9_COMMON *cm) {
  vpx_free(cm->cur_frame->mvs);
  cm->cur_frame->mi_rows = cm->mi_rows;
  cm->cur_frame->mi_cols = cm->mi_cols;
  cm->cur_frame->mvs = (MV_REF *)vpx_calloc(cm->mi_rows * cm->mi_cols,
                                            sizeof(*cm->cur_frame->mvs));
}

Adrian Grange's avatar
Adrian Grange committed
677
static void resize_context_buffers(VP9_COMMON *cm, int width, int height) {
678 679 680 681 682
#if CONFIG_SIZE_LIMIT
  if (width > DECODE_WIDTH_LIMIT || height > DECODE_HEIGHT_LIMIT)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Width and height beyond allowed size.");
#endif
683
  if (cm->width != width || cm->height != height) {
684
    const int new_mi_rows =
685
        ALIGN_POWER_OF_TWO(height, MI_SIZE_LOG2) >> MI_SIZE_LOG2;
686
    const int new_mi_cols =
687 688 689 690 691
        ALIGN_POWER_OF_TWO(width,  MI_SIZE_LOG2) >> MI_SIZE_LOG2;

    // Allocations in vp9_alloc_context_buffers() depend on individual
    // dimensions as well as the overall size.
    if (new_mi_cols > cm->mi_cols || new_mi_rows > cm->mi_rows) {
Adrian Grange's avatar
Adrian Grange committed
692
      if (vp9_alloc_context_buffers(cm, width, height))
693
        vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
694
                           "Failed to allocate context buffers");
Adrian Grange's avatar
Adrian Grange committed
695 696
    } else {
      vp9_set_mb_mi(cm, width, height);
697
    }
Adrian Grange's avatar
Adrian Grange committed
698
    vp9_init_context_buffers(cm);
699 700
    cm->width = width;
    cm->height = height;
701
  }
702 703 704 705
  if (cm->cur_frame->mvs == NULL || cm->mi_rows > cm->cur_frame->mi_rows ||
      cm->mi_cols > cm->cur_frame->mi_cols) {
    resize_mv_buffer(cm);
  }
Adrian Grange's avatar
Adrian Grange committed
706 707 708 709 710 711 712
}

static void setup_frame_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
  int width, height;
  vp9_read_frame_size(rb, &width, &height);
  resize_context_buffers(cm, width, height);
  setup_display_size(cm, rb);
713

714 715
  if (vp9_realloc_frame_buffer(
          get_frame_new_buffer(cm), cm->width, cm->height,
716 717 718 719 720
          cm->subsampling_x, cm->subsampling_y,
#if CONFIG_VP9_HIGHBITDEPTH
          cm->use_highbitdepth,
#endif
          VP9_DEC_BORDER_IN_PIXELS,
721
          cm->byte_alignment,
722 723
          &cm->frame_bufs[cm->new_fb_idx].raw_frame_buffer, cm->get_fb_cb,
          cm->cb_priv)) {
724 725 726
    vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
                       "Failed to allocate frame buffer");
  }
727 728 729 730 731
  cm->frame_bufs[cm->new_fb_idx].buf.subsampling_x = cm->subsampling_x;
  cm->frame_bufs[cm->new_fb_idx].buf.subsampling_y = cm->subsampling_y;
  cm->frame_bufs[cm->new_fb_idx].buf.color_space =
      (vpx_color_space_t)cm->color_space;
  cm->frame_bufs[cm->new_fb_idx].buf.bit_depth = (unsigned int)cm->bit_depth;
732 733 734 735 736 737 738 739
}

static INLINE int valid_ref_frame_img_fmt(vpx_bit_depth_t ref_bit_depth,
                                          int ref_xss, int ref_yss,
                                          vpx_bit_depth_t this_bit_depth,
                                          int this_xss, int this_yss) {
  return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
         ref_yss == this_yss;
740 741
}

742
static void setup_frame_size_with_refs(VP9_COMMON *cm,
743 744 745
                                       struct vp9_read_bit_buffer *rb) {
  int width, height;
  int found = 0, i;
746
  int has_valid_ref_frame = 0;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
747
  for (i = 0; i < REFS_PER_FRAME; ++i) {
748
    if (vp9_rb_read_bit(rb)) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
749 750 751
      YV12_BUFFER_CONFIG *const buf = cm->frame_refs[i].buf;
      width = buf->y_crop_width;
      height = buf->y_crop_height;
752 753 754 755 756 757
      found = 1;
      break;
    }
  }

  if (!found)
758
    vp9_read_frame_size(rb, &width, &height);
759

hkuang's avatar
hkuang committed
760
  if (width <= 0 || height <= 0)
761 762 763 764 765
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Invalid frame size");

  // Check to make sure at least one of frames that this frame references
  // has valid dimensions.
766 767
  for (i = 0; i < REFS_PER_FRAME; ++i) {
    RefBuffer *const ref_frame = &cm->frame_refs[i];
768 769
    has_valid_ref_frame |= valid_ref_frame_size(ref_frame->buf->y_crop_width,
                                                ref_frame->buf->y_crop_height,
770
                                                width, height);
771
  }
772 773 774
  if (!has_valid_ref_frame)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Referenced frame has invalid size");
775 776 777 778
  for (i = 0; i < REFS_PER_FRAME; ++i) {
    RefBuffer *const ref_frame = &cm->frame_refs[i];
    if (!valid_ref_frame_img_fmt(
            ref_frame->buf->bit_depth,
779 780
            ref_frame->buf->subsampling_x,
            ref_frame->buf->subsampling_y,
781 782 783 784
            cm->bit_depth,
            cm->subsampling_x,
            cm->subsampling_y))
      vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
Yaowu Xu's avatar