vp9_decodframe.c 46.9 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5 6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9 10
 */

11 12
#include <assert.h>
#include <stdio.h>
John Koleszar's avatar
John Koleszar committed
13

14
#include "vp9/decoder/vp9_onyxd_int.h"
Ronald S. Bultje's avatar
Ronald S. Bultje committed
15
#include "vp9/common/vp9_common.h"
16 17 18
#include "vp9/common/vp9_header.h"
#include "vp9/common/vp9_reconintra.h"
#include "vp9/common/vp9_reconinter.h"
Yaowu Xu's avatar
Yaowu Xu committed
19
#include "vp9/common/vp9_entropy.h"
20
#include "vp9/decoder/vp9_decodframe.h"
21
#include "vp9/decoder/vp9_detokenize.h"
22 23 24 25
#include "vp9/common/vp9_invtrans.h"
#include "vp9/common/vp9_alloccommon.h"
#include "vp9/common/vp9_entropymode.h"
#include "vp9/common/vp9_quant_common.h"
Johann's avatar
Johann committed
26
#include "vpx_scale/vpx_scale.h"
27
#include "vp9/common/vp9_setupintrarecon.h"
Scott LaVarnway's avatar
Scott LaVarnway committed
28

29
#include "vp9/decoder/vp9_decodemv.h"
30 31
#include "vp9/common/vp9_extend.h"
#include "vp9/common/vp9_modecont.h"
John Koleszar's avatar
John Koleszar committed
32
#include "vpx_mem/vpx_mem.h"
33
#include "vp9/decoder/vp9_dboolhuff.h"
John Koleszar's avatar
John Koleszar committed
34

35
#include "vp9/common/vp9_seg_common.h"
36
#include "vp9/common/vp9_tile_common.h"
37
#include "vp9_rtcd.h"
38

39
// #define DEC_DEBUG
40 41 42 43
#ifdef DEC_DEBUG
int dec_debug = 0;
#endif

44 45 46 47 48 49 50 51 52
static int read_le16(const uint8_t *p) {
  return (p[1] << 8) | p[0];
}

static int read_le32(const uint8_t *p) {
  return (p[3] << 24) | (p[2] << 16) | (p[1] << 8) | p[0];
}

// len == 0 is not allowed
53 54
static int read_is_valid(const uint8_t *start, size_t len,
                         const uint8_t *end) {
55 56 57
  return start + len > start && start + len <= end;
}

58 59 60 61 62 63 64 65 66 67 68 69 70 71
static void setup_txfm_mode(VP9_COMMON *pc, int lossless, vp9_reader *r) {
  if (lossless) {
    pc->txfm_mode = ONLY_4X4;
  } else {
    pc->txfm_mode = vp9_read_literal(r, 2);
    if (pc->txfm_mode == ALLOW_32X32)
      pc->txfm_mode += vp9_read_bit(r);

    if (pc->txfm_mode == TX_MODE_SELECT) {
      pc->prob_tx[0] = vp9_read_prob(r);
      pc->prob_tx[1] = vp9_read_prob(r);
      pc->prob_tx[2] = vp9_read_prob(r);
    }
  }
72 73
}

74 75 76 77 78 79 80 81 82 83 84 85 86
static int get_unsigned_bits(unsigned int num_values) {
  int cat = 0;
  if (num_values <= 1)
    return 0;
  num_values--;
  while (num_values > 0) {
    cat++;
    num_values >>= 1;
  }
  return cat;
}

static int inv_recenter_nonneg(int v, int m) {
87
  if (v > 2 * m)
88
    return v;
89 90

  return v % 2 ? m - (v + 1) / 2 : m + v / 2;
91 92
}

93
static int decode_uniform(vp9_reader *r, int n) {
94 95 96 97 98 99
  int v;
  const int l = get_unsigned_bits(n);
  const int m = (1 << l) - n;
  if (!l)
    return 0;

100 101
  v = vp9_read_literal(r, l - 1);
  return v < m ?  v : (v << 1) - m + vp9_read_bit(r);
102 103
}

104
static int decode_term_subexp(vp9_reader *r, int k, int num_syms) {
105 106 107 108 109
  int i = 0, mk = 0, word;
  while (1) {
    const int b = i ? k + i - 1 : k;
    const int a = 1 << b;
    if (num_syms <= mk + 3 * a) {
110
      word = decode_uniform(r, num_syms - mk) + mk;
111 112
      break;
    } else {
113
      if (vp9_read_bit(r)) {
114 115 116
        i++;
        mk += a;
      } else {
117
        word = vp9_read_literal(r, b) + mk;
118 119 120 121 122 123 124
        break;
      }
    }
  }
  return word;
}

125
static int decode_unsigned_max(vp9_reader *r, int max) {
126 127 128
  int data = 0, bit = 0, lmax = max;

  while (lmax) {
129
    data |= vp9_read_bit(r) << bit++;
130 131 132 133 134
    lmax >>= 1;
  }
  return data > max ? max : data;
}

John Koleszar's avatar
John Koleszar committed
135 136
static int merge_index(int v, int n, int modulus) {
  int max1 = (n - 1 - modulus / 2) / modulus + 1;
137 138 139
  if (v < max1) {
    v = v * modulus + modulus / 2;
  } else {
John Koleszar's avatar
John Koleszar committed
140 141 142 143 144 145 146 147
    int w;
    v -= max1;
    w = v;
    v += (v + modulus - modulus / 2) / modulus;
    while (v % modulus == modulus / 2 ||
           w != v - (v + modulus - modulus / 2) / modulus) v++;
  }
  return v;
148 149
}

John Koleszar's avatar
John Koleszar committed
150 151
static int inv_remap_prob(int v, int m) {
  const int n = 256;
152

153
  v = merge_index(v, n - 1, MODULUS_PARAM);
John Koleszar's avatar
John Koleszar committed
154
  if ((m << 1) <= n) {
155
    return inv_recenter_nonneg(v + 1, m);
John Koleszar's avatar
John Koleszar committed
156
  } else {
157
    return n - 1 - inv_recenter_nonneg(v + 1, n - 1 - m);
John Koleszar's avatar
John Koleszar committed
158
  }
159
}
160

161 162
static vp9_prob read_prob_diff_update(vp9_reader *r, int oldp) {
  int delp = decode_term_subexp(r, SUBEXP_PARAM, 255);
163
  return (vp9_prob)inv_remap_prob(delp, oldp);
164
}
165

166 167
void vp9_init_dequantizer(VP9_COMMON *pc) {
  int q, i;
John Koleszar's avatar
John Koleszar committed
168

Dmitry Kovalev's avatar
Dmitry Kovalev committed
169
  for (q = 0; q < QINDEX_RANGE; q++) {
170
    // DC value
171 172
    pc->y_dequant[q][0] = vp9_dc_quant(q, pc->y_dc_delta_q);
    pc->uv_dequant[q][0] = vp9_dc_quant(q, pc->uv_dc_delta_q);
John Koleszar's avatar
John Koleszar committed
173

174
    // AC values
John Koleszar's avatar
John Koleszar committed
175
    for (i = 1; i < 16; i++) {
176
      const int rc = vp9_default_zig_zag1d_4x4[i];
John Koleszar's avatar
John Koleszar committed
177

178 179
      pc->y_dequant[q][rc] = vp9_ac_quant(q, 0);
      pc->uv_dequant[q][rc] = vp9_ac_quant(q, pc->uv_ac_delta_q);
John Koleszar's avatar
John Koleszar committed
180
    }
John Koleszar's avatar
John Koleszar committed
181
  }
John Koleszar's avatar
John Koleszar committed
182 183
}

Dmitry Kovalev's avatar
Dmitry Kovalev committed
184 185 186
static int get_qindex(MACROBLOCKD *mb, int segment_id, int base_qindex) {
  // Set the Q baseline allowing for any segment level adjustment
  if (vp9_segfeature_active(mb, segment_id, SEG_LVL_ALT_Q)) {
187 188 189 190
    const int data = vp9_get_segdata(mb, segment_id, SEG_LVL_ALT_Q);
    return mb->mb_segment_abs_delta == SEGMENT_ABSDATA ?
               data :  // Abs value
               clamp(base_qindex + data, 0, MAXQ);  // Delta value
Dmitry Kovalev's avatar
Dmitry Kovalev committed
191 192 193 194 195
  } else {
    return base_qindex;
  }
}

196
static void mb_init_dequantizer(VP9_COMMON *pc, MACROBLOCKD *xd) {
John Koleszar's avatar
John Koleszar committed
197
  int i;
198 199
  const int segment_id = xd->mode_info_context->mbmi.segment_id;
  xd->q_index = get_qindex(xd, segment_id, pc->base_qindex);
John Koleszar's avatar
John Koleszar committed
200

201
  xd->plane[0].dequant = pc->y_dequant[xd->q_index];
202
  for (i = 1; i < MAX_MB_PLANE; i++)
203
    xd->plane[i].dequant = pc->uv_dequant[xd->q_index];
John Koleszar's avatar
John Koleszar committed
204 205
}

206
#if !CONFIG_SB8X8
207
static void decode_8x8(MACROBLOCKD *xd) {
208 209
  const MB_PREDICTION_MODE mode = xd->mode_info_context->mbmi.mode;
  // luma
210
  // if the first one is DCT_DCT assume all the rest are as well
211
  TX_TYPE tx_type = get_tx_type_8x8(xd, 0);
212 213 214 215 216 217 218
  int i;
  assert(mode == I8X8_PRED);
  for (i = 0; i < 4; i++) {
    int ib = vp9_i8x8_block[i];
    int idx = (ib & 0x02) ? (ib + 2) : ib;
    int16_t *q  = BLOCK_OFFSET(xd->plane[0].qcoeff, idx, 16);
    uint8_t* const dst =
219 220 221
          raster_block_offset_uint8(xd, BLOCK_SIZE_MB16X16, 0, ib,
                                    xd->plane[0].dst.buf,
                                    xd->plane[0].dst.stride);
222 223 224 225
    int stride = xd->plane[0].dst.stride;
    if (mode == I8X8_PRED) {
      int i8x8mode = xd->mode_info_context->bmi[ib].as_mode.first;
      vp9_intra8x8_predict(xd, ib, i8x8mode, dst, stride);
226
    }
227 228
    tx_type = get_tx_type_8x8(xd, ib);
    vp9_iht_add_8x8_c(tx_type, q, dst, stride, xd->plane[0].eobs[idx]);
229 230
  }

231
  // chroma
232 233 234 235
  for (i = 0; i < 4; i++) {
    int ib = vp9_i8x8_block[i];
    int i8x8mode = xd->mode_info_context->bmi[ib].as_mode.first;
    uint8_t* dst;
236

237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253
    dst = raster_block_offset_uint8(xd, BLOCK_SIZE_MB16X16, 1, i,
                                    xd->plane[1].dst.buf,
                                    xd->plane[1].dst.stride);
    vp9_intra_uv4x4_predict(xd, 16 + i, i8x8mode,
                            dst, xd->plane[1].dst.stride);
    xd->itxm_add(BLOCK_OFFSET(xd->plane[1].qcoeff, i, 16),
                 dst, xd->plane[1].dst.stride,
                 xd->plane[1].eobs[i]);

    dst = raster_block_offset_uint8(xd, BLOCK_SIZE_MB16X16, 2, i,
                                    xd->plane[2].dst.buf,
                                    xd->plane[1].dst.stride);
    vp9_intra_uv4x4_predict(xd, 20 + i, i8x8mode,
                            dst, xd->plane[1].dst.stride);
    xd->itxm_add(BLOCK_OFFSET(xd->plane[2].qcoeff, i, 16),
                 dst, xd->plane[1].dst.stride,
                 xd->plane[2].eobs[i]);
254 255
  }
}
256
#endif
257

258
static INLINE void dequant_add_y(MACROBLOCKD *xd, TX_TYPE tx_type, int idx) {
259
  struct macroblockd_plane *const y = &xd->plane[0];
260 261 262
  uint8_t* const dst = raster_block_offset_uint8(xd, BLOCK_SIZE_MB16X16, 0, idx,
                                                 xd->plane[0].dst.buf,
                                                 xd->plane[0].dst.stride);
263
  if (tx_type != DCT_DCT) {
264
    vp9_iht_add_c(tx_type, BLOCK_OFFSET(y->qcoeff, idx, 16),
265
                  dst, xd->plane[0].dst.stride, y->eobs[idx]);
266
  } else {
267 268
    xd->itxm_add(BLOCK_OFFSET(y->qcoeff, idx, 16),
                 dst, xd->plane[0].dst.stride, y->eobs[idx]);
269 270 271
  }
}

272
#if !CONFIG_SB8X8
273
static void decode_4x4(VP9D_COMP *pbi, MACROBLOCKD *xd, vp9_reader *r) {
274
  TX_TYPE tx_type;
275
  int i = 0;
276
  const MB_PREDICTION_MODE mode = xd->mode_info_context->mbmi.mode;
277 278 279 280 281 282 283
  assert(mode == I8X8_PRED);
  for (i = 0; i < 4; i++) {
    int ib = vp9_i8x8_block[i];
    const int iblock[4] = {0, 1, 4, 5};
    int j;
    uint8_t* dst;
    int i8x8mode = xd->mode_info_context->bmi[ib].as_mode.first;
284

285 286 287 288 289 290 291
    dst = raster_block_offset_uint8(xd, BLOCK_SIZE_MB16X16, 0, ib,
                                    xd->plane[0].dst.buf,
                                    xd->plane[0].dst.stride);
    vp9_intra8x8_predict(xd, ib, i8x8mode, dst, xd->plane[0].dst.stride);
    for (j = 0; j < 4; j++) {
      tx_type = get_tx_type_4x4(xd, ib + iblock[j]);
      dequant_add_y(xd, tx_type, ib + iblock[j]);
292
    }
293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308
    dst = raster_block_offset_uint8(xd, BLOCK_SIZE_MB16X16, 1, i,
                                    xd->plane[1].dst.buf,
                                    xd->plane[1].dst.stride);
    vp9_intra_uv4x4_predict(xd, 16 + i, i8x8mode,
                            dst, xd->plane[1].dst.stride);
    xd->itxm_add(BLOCK_OFFSET(xd->plane[1].qcoeff, i, 16),
                 dst, xd->plane[1].dst.stride,
                 xd->plane[1].eobs[i]);
    dst = raster_block_offset_uint8(xd, BLOCK_SIZE_MB16X16, 2, i,
                                    xd->plane[2].dst.buf,
                                    xd->plane[2].dst.stride);
    vp9_intra_uv4x4_predict(xd, 20 + i, i8x8mode,
                            dst, xd->plane[1].dst.stride);
    xd->itxm_add(BLOCK_OFFSET(xd->plane[2].qcoeff, i, 16),
                 dst, xd->plane[1].dst.stride,
                 xd->plane[2].eobs[i]);
309 310
  }
}
311
#endif
312

313 314 315 316 317 318 319 320 321 322 323 324
static int txfrm_block_to_raster_block(MACROBLOCKD *xd,
                                       BLOCK_SIZE_TYPE bsize,
                                       int plane, int block,
                                       int ss_txfrm_size) {
  const int bwl = b_width_log2(bsize) - xd->plane[plane].subsampling_x;
  const int txwl = ss_txfrm_size / 2;
  const int tx_cols_lg2 = bwl - txwl;
  const int tx_cols = 1 << tx_cols_lg2;
  const int raster_mb = block >> ss_txfrm_size;
  const int x = (raster_mb & (tx_cols - 1)) << (txwl);
  const int y = raster_mb >> tx_cols_lg2 << (txwl);
  return x + (y << bwl);
325 326
}

327

328 329 330 331 332 333 334 335 336 337 338
static void decode_block(int plane, int block, BLOCK_SIZE_TYPE bsize,
                         int ss_txfrm_size, void *arg) {
  MACROBLOCKD* const xd = arg;
  int16_t* const qcoeff = BLOCK_OFFSET(xd->plane[plane].qcoeff, block, 16);
  const int stride = xd->plane[plane].dst.stride;
  const int raster_block = txfrm_block_to_raster_block(xd, bsize, plane,
                                                       block, ss_txfrm_size);
  uint8_t* const dst = raster_block_offset_uint8(xd, bsize, plane,
                                                 raster_block,
                                                 xd->plane[plane].dst.buf,
                                                 stride);
339

340
  TX_TYPE tx_type;
341

342 343 344 345 346 347 348 349 350 351 352 353 354 355 356 357 358 359 360 361 362 363
  switch (ss_txfrm_size / 2) {
    case TX_4X4:
      tx_type = plane == 0 ? get_tx_type_4x4(xd, raster_block) : DCT_DCT;
      if (tx_type == DCT_DCT)
        xd->itxm_add(qcoeff, dst, stride, xd->plane[plane].eobs[block]);
      else
        vp9_iht_add_c(tx_type, qcoeff, dst, stride,
                      xd->plane[plane].eobs[block]);
      break;
    case TX_8X8:
      tx_type = plane == 0 ? get_tx_type_8x8(xd, raster_block) : DCT_DCT;
      vp9_iht_add_8x8_c(tx_type, qcoeff, dst, stride,
                        xd->plane[plane].eobs[block]);
      break;
    case TX_16X16:
      tx_type = plane == 0 ? get_tx_type_16x16(xd, raster_block) : DCT_DCT;
      vp9_iht_add_16x16_c(tx_type, qcoeff, dst, stride,
                          xd->plane[plane].eobs[block]);
      break;
    case TX_32X32:
      vp9_idct_add_32x32(qcoeff, dst, stride, xd->plane[plane].eobs[block]);
      break;
364 365 366
  }
}

367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398
static void decode_atom_intra(VP9D_COMP *pbi, MACROBLOCKD *xd,
                              vp9_reader *r,
                              BLOCK_SIZE_TYPE bsize) {
  int i = 0;
  int bwl = b_width_log2(bsize), bhl = b_height_log2(bsize);
  int bc = 1 << (bwl + bhl);
  int tx_type;

  for (i = 0; i < bc; i++) {
    int b_mode = xd->mode_info_context->bmi[i].as_mode.first;
    uint8_t* dst;
    dst = raster_block_offset_uint8(xd, bsize, 0, i,
                                    xd->plane[0].dst.buf,
                                    xd->plane[0].dst.stride);
#if CONFIG_NEWBINTRAMODES
    xd->mode_info_context->bmi[i].as_mode.context =
        vp9_find_bpred_context(xd, i, dst, xd->plane[0].dst.stride);
    if (!xd->mode_info_context->mbmi.mb_skip_coeff)
      vp9_decode_coefs_4x4(pbi, xd, r, PLANE_TYPE_Y_WITH_DC, i);
#endif
    vp9_intra4x4_predict(xd, i, b_mode, dst, xd->plane[0].dst.stride);
    // TODO(jingning): refactor to use foreach_transformed_block_in_plane_
    tx_type = get_tx_type_4x4(xd, i);
    dequant_add_y(xd, tx_type, i);
  }
#if CONFIG_NEWBINTRAMODES
  if (!xd->mode_info_context->mbmi.mb_skip_coeff)
    vp9_decode_mb_tokens_4x4_uv(pbi, xd, r);
#endif
  foreach_transformed_block_uv(xd, bsize, decode_block, xd);
}

399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433
static void decode_atom(VP9D_COMP *pbi, MACROBLOCKD *xd,
                        int mi_row, int mi_col,
                        vp9_reader *r, BLOCK_SIZE_TYPE bsize) {
  MB_MODE_INFO *const mbmi = &xd->mode_info_context->mbmi;

  if (pbi->common.frame_type != KEY_FRAME)
    vp9_setup_interp_filters(xd, mbmi->interp_filter, &pbi->common);

  // prediction
  if (mbmi->ref_frame == INTRA_FRAME)
    vp9_build_intra_predictors_sbuv_s(xd, bsize);
  else
    vp9_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);

  if (mbmi->mb_skip_coeff) {
    vp9_reset_sb_tokens_context(xd, bsize);
  } else {
    // re-initialize macroblock dequantizer before detokenization
    if (xd->segmentation_enabled)
      mb_init_dequantizer(&pbi->common, xd);

    if (!vp9_reader_has_error(r)) {
#if CONFIG_NEWBINTRAMODES
    if (mbmi->mode != I4X4_PRED)
#endif
      vp9_decode_tokens(pbi, xd, r, bsize);
    }
  }

  if (mbmi->ref_frame == INTRA_FRAME)
    decode_atom_intra(pbi, xd, r, bsize);
  else
    foreach_transformed_block(xd, bsize, decode_block, xd);
}

434
static void decode_sb(VP9D_COMP *pbi, MACROBLOCKD *xd, int mi_row, int mi_col,
435
                      vp9_reader *r, BLOCK_SIZE_TYPE bsize) {
436
  const int bwl = mi_width_log2(bsize), bhl = mi_height_log2(bsize);
437
  const int bw = 1 << bwl, bh = 1 << bhl;
438
  int n, eobtotal;
439
  VP9_COMMON *const pc = &pbi->common;
440 441
  MODE_INFO *const mi = xd->mode_info_context;
  MB_MODE_INFO *const mbmi = &mi->mbmi;
442
  const int mis = pc->mode_info_stride;
443

444
  assert(mbmi->sb_type == bsize);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
445 446

  if (pbi->common.frame_type != KEY_FRAME)
447
    vp9_setup_interp_filters(xd, mbmi->interp_filter, pc);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
448

449
  // generate prediction
450
  if (mbmi->ref_frame == INTRA_FRAME) {
451 452
    vp9_build_intra_predictors_sby_s(xd, bsize);
    vp9_build_intra_predictors_sbuv_s(xd, bsize);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
453
  } else {
454
    vp9_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
455 456
  }

457
  if (mbmi->mb_skip_coeff) {
Yunqing Wang's avatar
Yunqing Wang committed
458
    vp9_reset_sb_tokens_context(xd, bsize);
459
  } else {
Yunqing Wang's avatar
Yunqing Wang committed
460 461
    // re-initialize macroblock dequantizer before detokenization
    if (xd->segmentation_enabled)
462
      mb_init_dequantizer(pc, xd);
Yunqing Wang's avatar
Yunqing Wang committed
463 464

    // dequantization and idct
465
    eobtotal = vp9_decode_tokens(pbi, xd, r, bsize);
Yunqing Wang's avatar
Yunqing Wang committed
466 467 468 469
    if (eobtotal == 0) {  // skip loopfilter
      for (n = 0; n < bw * bh; n++) {
        const int x_idx = n & (bw - 1), y_idx = n >> bwl;

470
        if (mi_col + x_idx < pc->mi_cols && mi_row + y_idx < pc->mi_rows)
Yunqing Wang's avatar
Yunqing Wang committed
471 472 473
          mi[y_idx * mis + x_idx].mbmi.mb_skip_coeff = 1;
      }
    } else {
474
      foreach_transformed_block(xd, bsize, decode_block, xd);
475
    }
476
  }
477 478
}

479
#if !CONFIG_SB8X8
480 481
// TODO(jingning): This only performs I8X8_PRED decoding process, which will be
// automatically covered by decode_sb, when SB8X8 is on.
482
static void decode_mb(VP9D_COMP *pbi, MACROBLOCKD *xd,
483
                     int mi_row, int mi_col,
484
                     vp9_reader *r) {
485 486
  MB_MODE_INFO *const mbmi = &xd->mode_info_context->mbmi;
  const int tx_size = mbmi->txfm_size;
487

488
  assert(mbmi->sb_type == BLOCK_SIZE_MB16X16);
John Koleszar's avatar
John Koleszar committed
489

490
  if (mbmi->mb_skip_coeff) {
Yunqing Wang's avatar
Yunqing Wang committed
491
    vp9_reset_sb_tokens_context(xd, BLOCK_SIZE_MB16X16);
492
  } else {
Yunqing Wang's avatar
Yunqing Wang committed
493 494
    // re-initialize macroblock dequantizer before detokenization
    if (xd->segmentation_enabled)
495
      mb_init_dequantizer(&pbi->common, xd);
Yunqing Wang's avatar
Yunqing Wang committed
496

497 498
    if (!vp9_reader_has_error(r))
      vp9_decode_tokens(pbi, xd, r, BLOCK_SIZE_MB16X16);
Yunqing Wang's avatar
Yunqing Wang committed
499 500
  }

501 502 503 504
  if (tx_size == TX_8X8)
    decode_8x8(xd);
  else
    decode_4x4(pbi, xd, r);
Yaowu Xu's avatar
Yaowu Xu committed
505
}
506
#endif
John Koleszar's avatar
John Koleszar committed
507

508 509 510 511
static int get_delta_q(vp9_reader *r, int *dq) {
  const int old_value = *dq;

  if (vp9_read_bit(r)) {  // Update bit
512 513
    const int value = vp9_read_literal(r, 4);
    *dq = vp9_read_and_apply_sign(r, value);
John Koleszar's avatar
John Koleszar committed
514
  }
John Koleszar's avatar
John Koleszar committed
515

516
  // Trigger a quantizer update if the delta-q value has changed
517
  return old_value != *dq;
John Koleszar's avatar
John Koleszar committed
518 519
}

520
static void set_offsets(VP9D_COMP *pbi, BLOCK_SIZE_TYPE bsize,
521 522 523
                        int mi_row, int mi_col) {
  const int bh = 1 << mi_height_log2(bsize);
  const int bw = 1 << mi_width_log2(bsize);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
524 525
  VP9_COMMON *const cm = &pbi->common;
  MACROBLOCKD *const xd = &pbi->mb;
526
  int i;
John Koleszar's avatar
John Koleszar committed
527

528
  const int mi_idx = mi_row * cm->mode_info_stride + mi_col;
529
  const YV12_BUFFER_CONFIG *dst_fb = &cm->yv12_fb[cm->new_fb_idx];
530 531 532 533
  const int recon_yoffset =
      (MI_SIZE * mi_row) * dst_fb->y_stride + (MI_SIZE * mi_col);
  const int recon_uvoffset =
      (MI_UV_SIZE * mi_row) * dst_fb->uv_stride + (MI_UV_SIZE * mi_col);
534

535
  xd->mode_info_context = cm->mi + mi_idx;
536
  xd->mode_info_context->mbmi.sb_type = bsize;
537 538
  xd->prev_mode_info_context = cm->prev_mi + mi_idx;

539 540 541 542 543 544
  for (i = 0; i < MAX_MB_PLANE; i++) {
    xd->plane[i].above_context = cm->above_context[i] +
        (mi_col * 4 >> (xd->plane[i].subsampling_x + CONFIG_SB8X8));
    xd->plane[i].left_context = cm->left_context[i] +
        (((mi_row * 4 >> CONFIG_SB8X8) & 15) >> xd->plane[i].subsampling_y);
  }
545 546
  xd->above_seg_context = cm->above_seg_context + (mi_col >> CONFIG_SB8X8);
  xd->left_seg_context  = cm->left_seg_context + ((mi_row >> CONFIG_SB8X8) & 3);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
547

548 549
  // Distance of Mb to the various image edges. These are specified to 8th pel
  // as they are always compared to values that are in 1/8th pel units
550
  set_mi_row_col(cm, xd, mi_row, bh, mi_col, bw);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
551

552 553 554
  xd->plane[0].dst.buf = dst_fb->y_buffer + recon_yoffset;
  xd->plane[1].dst.buf = dst_fb->u_buffer + recon_uvoffset;
  xd->plane[2].dst.buf = dst_fb->v_buffer + recon_uvoffset;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
555
}
John Koleszar's avatar
John Koleszar committed
556

557
static void set_refs(VP9D_COMP *pbi, int mi_row, int mi_col) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
558 559
  VP9_COMMON *const cm = &pbi->common;
  MACROBLOCKD *const xd = &pbi->mb;
560
  MB_MODE_INFO *const mbmi = &xd->mode_info_context->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
561 562

  if (mbmi->ref_frame > INTRA_FRAME) {
563
    // Select the appropriate reference frame for this MB
564 565 566
    const int fb_idx = cm->active_ref_idx[mbmi->ref_frame - 1];
    const YV12_BUFFER_CONFIG *cfg = &cm->yv12_fb[fb_idx];
    xd->scale_factor[0]    = cm->active_ref_scale[mbmi->ref_frame - 1];
567
    xd->scale_factor_uv[0] = cm->active_ref_scale[mbmi->ref_frame - 1];
568
    setup_pre_planes(xd, cfg, NULL, mi_row, mi_col,
569
                     xd->scale_factor, xd->scale_factor_uv);
570
    xd->corrupted |= cfg->corrupted;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
571

Ronald S. Bultje's avatar
Ronald S. Bultje committed
572
    if (mbmi->second_ref_frame > INTRA_FRAME) {
573
      // Select the appropriate reference frame for this MB
574 575
      const int second_fb_idx = cm->active_ref_idx[mbmi->second_ref_frame - 1];
      const YV12_BUFFER_CONFIG *second_cfg = &cm->yv12_fb[second_fb_idx];
Yunqing Wang's avatar
Yunqing Wang committed
576 577
      xd->scale_factor[1]    = cm->active_ref_scale[mbmi->second_ref_frame - 1];
      xd->scale_factor_uv[1] = cm->active_ref_scale[mbmi->second_ref_frame - 1];
578
      setup_pre_planes(xd, NULL, second_cfg, mi_row, mi_col,
579
                       xd->scale_factor, xd->scale_factor_uv);
580
      xd->corrupted |= second_cfg->corrupted;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
581 582 583
    }
  }
}
John Koleszar's avatar
John Koleszar committed
584

585
static void decode_modes_b(VP9D_COMP *pbi, int mi_row, int mi_col,
586 587 588
                           vp9_reader *r, BLOCK_SIZE_TYPE bsize) {
  MACROBLOCKD *const xd = &pbi->mb;

589 590 591
  set_offsets(pbi, bsize, mi_row, mi_col);
  vp9_decode_mb_mode_mv(pbi, xd, mi_row, mi_col, r);
  set_refs(pbi, mi_row, mi_col);
592

593
#if CONFIG_SB8X8
594 595 596 597
  if (bsize >= BLOCK_SIZE_SB8X8)
    decode_sb(pbi, xd, mi_row, mi_col, r, bsize);
  else
    decode_atom(pbi, xd, mi_row, mi_col, r, BLOCK_SIZE_SB8X8);
598
#else
599
  // TODO(jingning): merge decode_sb_ and decode_mb_
600
  if (bsize > BLOCK_SIZE_MB16X16) {
601
    decode_sb(pbi, xd, mi_row, mi_col, r, bsize);
602
  } else {
603 604 605 606 607 608 609
    // TODO(jingning): In transition of separating functionalities of decode_mb
    // into decode_sb and decode_atom. Will remove decode_mb and clean this up
    // when SB8X8 is on.
    if (xd->mode_info_context->mbmi.mode == I4X4_PRED ||
        (xd->mode_info_context->mbmi.mode == SPLITMV &&
         xd->mode_info_context->mbmi.partitioning == PARTITIONING_4X4))
      decode_atom(pbi, xd, mi_row, mi_col, r, bsize);
610 611
    else if (xd->mode_info_context->mbmi.mode != I8X8_PRED)
      decode_sb(pbi, xd, mi_row, mi_col, r, bsize);
612
    else
613 614
      // TODO(jingning): decode_mb still carries deocding process of I8X8_PRED.
      // This will be covered by decode_sb when SB8X8 is on.
615
      decode_mb(pbi, xd, mi_row, mi_col, r);
616
  }
617
#endif
618

619
  xd->corrupted |= vp9_reader_has_error(r);
620 621
}

622
static void decode_modes_sb(VP9D_COMP *pbi, int mi_row, int mi_col,
623
                            vp9_reader* r, BLOCK_SIZE_TYPE bsize) {
624 625
  VP9_COMMON *const pc = &pbi->common;
  MACROBLOCKD *const xd = &pbi->mb;
626
  int bsl = mi_width_log2(bsize), bs = (1 << bsl) / 2;
627 628 629 630
  int n;
  PARTITION_TYPE partition = PARTITION_NONE;
  BLOCK_SIZE_TYPE subsize;

631
  if (mi_row >= pc->mi_rows || mi_col >= pc->mi_cols)
632 633 634
    return;

  if (bsize > BLOCK_SIZE_MB16X16) {
635
    int pl;
636
    // read the partition information
637 638 639
    xd->left_seg_context =
        pc->left_seg_context + ((mi_row >> CONFIG_SB8X8) & 3);
    xd->above_seg_context = pc->above_seg_context + (mi_col >> CONFIG_SB8X8);
640
    pl = partition_plane_context(xd, bsize);
641
    partition = treed_read(r, vp9_partition_tree,
642 643
                           pc->fc.partition_prob[pl]);
    pc->fc.partition_counts[pl][partition]++;
644 645 646 647 648
  }

  switch (partition) {
    case PARTITION_NONE:
      subsize = bsize;
649
      decode_modes_b(pbi, mi_row, mi_col, r, subsize);
650 651 652 653
      break;
    case PARTITION_HORZ:
      subsize = (bsize == BLOCK_SIZE_SB64X64) ? BLOCK_SIZE_SB64X32 :
                                                BLOCK_SIZE_SB32X16;
654 655 656
      decode_modes_b(pbi, mi_row, mi_col, r, subsize);
      if ((mi_row + bs) < pc->mi_rows)
        decode_modes_b(pbi, mi_row + bs, mi_col, r, subsize);
657 658 659 660
      break;
    case PARTITION_VERT:
      subsize = (bsize == BLOCK_SIZE_SB64X64) ? BLOCK_SIZE_SB32X64 :
                                                BLOCK_SIZE_SB16X32;
661 662 663
      decode_modes_b(pbi, mi_row, mi_col, r, subsize);
      if ((mi_col + bs) < pc->mi_cols)
        decode_modes_b(pbi, mi_row, mi_col + bs, r, subsize);
664 665 666 667 668 669 670 671 672 673
      break;
    case PARTITION_SPLIT:
      subsize = (bsize == BLOCK_SIZE_SB64X64) ? BLOCK_SIZE_SB32X32 :
                                                BLOCK_SIZE_MB16X16;
      for (n = 0; n < 4; n++) {
        int j = n >> 1, i = n & 0x01;
        if (subsize == BLOCK_SIZE_SB32X32)
          xd->sb_index = n;
        else
          xd->mb_index = n;
674
        decode_modes_sb(pbi, mi_row + j * bs, mi_col + i * bs, r, subsize);
675 676 677 678 679
      }
      break;
    default:
      assert(0);
  }
680 681 682 683
  // update partition context
  if ((partition == PARTITION_SPLIT) && (bsize > BLOCK_SIZE_SB32X32))
    return;

684 685
  xd->left_seg_context = pc->left_seg_context + ((mi_row >> CONFIG_SB8X8) & 3);
  xd->above_seg_context = pc->above_seg_context + (mi_col >> CONFIG_SB8X8);
686
  update_partition_context(xd, subsize, bsize);
687 688
}

689
static void setup_token_decoder(VP9D_COMP *pbi,
690 691
                                const uint8_t *data,
                                vp9_reader *r) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
692
  VP9_COMMON *pc = &pbi->common;
693 694
  const uint8_t *data_end = pbi->source + pbi->source_sz;
  const size_t partition_size = data_end - data;
John Koleszar's avatar
John Koleszar committed
695

Dmitry Kovalev's avatar
Dmitry Kovalev committed
696 697 698
  // Validate the calculated partition length. If the buffer
  // described by the partition can't be fully read, then restrict
  // it to the portion that can be (for EC mode) or throw an error.
699
  if (!read_is_valid(data, partition_size, data_end))
John Koleszar's avatar
John Koleszar committed
700 701 702 703
    vpx_internal_error(&pc->error, VPX_CODEC_CORRUPT_FRAME,
                       "Truncated packet or corrupt partition "
                       "%d length", 1);

704
  if (vp9_reader_init(r, data, partition_size))
John Koleszar's avatar
John Koleszar committed
705 706
    vpx_internal_error(&pc->error, VPX_CODEC_MEM_ERROR,
                       "Failed to allocate bool decoder %d", 1);
John Koleszar's avatar
John Koleszar committed
707 708
}

709 710
static void init_frame(VP9D_COMP *pbi) {
  VP9_COMMON *const pc = &pbi->common;
711
  MACROBLOCKD *const xd = &pbi->mb;
John Koleszar's avatar
John Koleszar committed
712

John Koleszar's avatar
John Koleszar committed
713
  if (pc->frame_type == KEY_FRAME) {
714
    vp9_setup_past_independence(pc, xd);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
715
    // All buffers are implicitly updated on key frames.
716
    pbi->refresh_frame_flags = (1 << NUM_REF_FRAMES) - 1;
717 718 719
  } else if (pc->error_resilient_mode) {
    vp9_setup_past_independence(pc, xd);
  }
John Koleszar's avatar
John Koleszar committed
720

John Koleszar's avatar
John Koleszar committed
721
  xd->mode_info_context = pc->mi;
722
  xd->prev_mode_info_context = pc->prev_mi;
John Koleszar's avatar
John Koleszar committed
723 724 725
  xd->frame_type = pc->frame_type;
  xd->mode_info_context->mbmi.mode = DC_PRED;
  xd->mode_info_stride = pc->mode_info_stride;
John Koleszar's avatar
John Koleszar committed
726 727
}

728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766
#if CONFIG_CODE_ZEROGROUP
static void read_zpc_probs_common(VP9_COMMON *cm,
                                  vp9_reader* bc,
                                  TX_SIZE tx_size) {
  int r, b, p, n;
  vp9_zpc_probs *zpc_probs;
  vp9_prob upd = ZPC_UPDATE_PROB;
  if (!get_zpc_used(tx_size)) return;
  if (!vp9_read_bit(bc)) return;

  if (tx_size == TX_32X32) {
    zpc_probs = &cm->fc.zpc_probs_32x32;
  } else if (tx_size == TX_16X16) {
    zpc_probs = &cm->fc.zpc_probs_16x16;
  } else if (tx_size == TX_8X8) {
    zpc_probs = &cm->fc.zpc_probs_8x8;
  } else {
    zpc_probs = &cm->fc.zpc_probs_4x4;
  }
  for (r = 0; r < REF_TYPES; ++r) {
    for (b = 0; b < ZPC_BANDS; ++b) {
      for (p = 0; p < ZPC_PTOKS; ++p) {
        for (n = 0; n < ZPC_NODES; ++n) {
          vp9_prob *q = &(*zpc_probs)[r][b][p][n];
#if USE_ZPC_EXTRA == 0
          if (n == 1) continue;
#endif
          if (vp9_read(bc, upd)) {
            *q = read_prob_diff_update(bc, *q);
          }
        }
      }
    }
  }
}

static void read_zpc_probs(VP9_COMMON *cm,
                           vp9_reader* bc) {
  read_zpc_probs_common(cm, bc, TX_4X4);
767
  if (cm->txfm_mode > ONLY_4X4)
768 769 770 771 772 773 774 775
    read_zpc_probs_common(cm, bc, TX_8X8);
  if (cm->txfm_mode > ALLOW_8X8)
    read_zpc_probs_common(cm, bc, TX_16X16);
  if (cm->txfm_mode > ALLOW_16X16)
    read_zpc_probs_common(cm, bc, TX_32X32);
}
#endif  // CONFIG_CODE_ZEROGROUP

776 777 778
static void read_coef_probs_common(vp9_coeff_probs *coef_probs,
                                   TX_SIZE tx_size,
                                   vp9_reader *r) {
779 780 781 782 783 784
#if CONFIG_MODELCOEFPROB && MODEL_BASED_UPDATE
  const int entropy_nodes_update = UNCONSTRAINED_UPDATE_NODES;
#else
  const int entropy_nodes_update = ENTROPY_NODES;
#endif

785
  int i, j, k, l, m;
John Koleszar's avatar
John Koleszar committed
786

787
  if (vp9_read_bit(r)) {
788
    for (i = 0; i < BLOCK_TYPES; i++) {
789 790 791
      for (j = 0; j < REF_TYPES; j++) {
        for (k = 0; k < COEF_BANDS; k++) {
          for (l = 0; l < PREV_COEF_CONTEXTS; l++) {
792
            const int mstart = 0;
793 794
            if (l >= 3 && k == 0)
              continue;
795 796

            for (m = mstart; m < entropy_nodes_update; m++) {
797 798
              vp9_prob *const p = coef_probs[i][j][k][l] + m;

799 800
              if (vp9_read(r, vp9_coef_update_prob[m])) {
                *p = read_prob_diff_update(r, *p);
801
#if CONFIG_MODELCOEFPROB && MODEL_BASED_UPDATE
802
                if (m == UNCONSTRAINED_NODES - 1)
803 804
                  vp9_get_model_distribution(*p, coef_probs[i][j][k][l], i, j);
#endif
805
              }
806 807 808
            }
          }
        }
809 810
      }
    }
811
  }
812
}
813

814 815 816
static void read_coef_probs(VP9D_COMP *pbi, vp9_reader *r) {
  const TXFM_MODE mode = pbi->common.txfm_mode;
  FRAME_CONTEXT *const fc = &pbi->common.fc;
Daniel Kang's avatar
Daniel Kang committed
817

818
  read_coef_probs_common(fc->coef_probs_4x4, TX_4X4, r);
819

820
  if (mode > ONLY_4X4)
821
    read_coef_probs_common(fc->coef_probs_8x8, TX_8X8, r);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
822

823
  if (mode > ALLOW_8X8)
824
    read_coef_probs_common(fc->coef_probs_16x16, TX_16X16, r);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
825

826
  if (mode > ALLOW_16X16)
827
    read_coef_probs_common(fc->coef_probs_32x32, TX_32X32, r);
828 829
}

830
static void setup_segmentation(VP9_COMMON *pc, MACROBLOCKD *xd, vp9_reader *r) {
831 832
  int i, j;

833 834
  xd->update_mb_segmentation_map = 0;
  xd->update_mb_segmentation_data = 0;
835 836 837
#if CONFIG_IMPLICIT_SEGMENTATION
  xd->allow_implicit_segment_update = 0;
#endif
838

839
  xd->segmentation_enabled = vp9_read_bit(r);
840 841 842 843 844
  if (!xd->segmentation_enabled)
    return;

  // Segmentation map update
  xd->update_mb_segmentation_map = vp9_read_bit(r);
845 846 847
#if CONFIG_IMPLICIT_SEGMENTATION
    xd->allow_implicit_segment_update = vp9_read_bit(r);
#endif