vp9_decodframe.c 38.4 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
#include <assert.h>
John Koleszar's avatar
John Koleszar committed
12

13
14
15
16
#include "./vp9_rtcd.h"
#include "vpx_mem/vpx_mem.h"
#include "vpx_scale/vpx_scale.h"

Dmitry Kovalev's avatar
Dmitry Kovalev committed
17
#include "vp9/common/vp9_alloccommon.h"
Ronald S. Bultje's avatar
Ronald S. Bultje committed
18
#include "vp9/common/vp9_common.h"
Yaowu Xu's avatar
Yaowu Xu committed
19
#include "vp9/common/vp9_entropy.h"
20
#include "vp9/common/vp9_entropymode.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
21
#include "vp9/common/vp9_extend.h"
22
#include "vp9/common/vp9_idct.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
23
#include "vp9/common/vp9_pred_common.h"
24
#include "vp9/common/vp9_quant_common.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
25
26
#include "vp9/common/vp9_reconintra.h"
#include "vp9/common/vp9_reconinter.h"
27
28
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_tile_common.h"
29
30

#include "vp9/decoder/vp9_dboolhuff.h"
31
32
33
#include "vp9/decoder/vp9_decodframe.h"
#include "vp9/decoder/vp9_detokenize.h"
#include "vp9/decoder/vp9_decodemv.h"
34
#include "vp9/decoder/vp9_dsubexp.h"
35
36
#include "vp9/decoder/vp9_onyxd_int.h"
#include "vp9/decoder/vp9_read_bit_buffer.h"
37
#include "vp9/decoder/vp9_thread.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
38
#include "vp9/decoder/vp9_treereader.h"
39

40
41
static int read_be32(const uint8_t *p) {
  return (p[0] << 24) | (p[1] << 16) | (p[2] << 8) | p[3];
42
43
}

44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
static int is_compound_prediction_allowed(const VP9_COMMON *cm) {
  int i;
  for (i = 1; i < ALLOWED_REFS_PER_FRAME; ++i)
    if  (cm->ref_frame_sign_bias[i + 1] != cm->ref_frame_sign_bias[1])
      return 1;

  return 0;
}

static void setup_compound_prediction(VP9_COMMON *cm) {
  if (cm->ref_frame_sign_bias[LAST_FRAME] ==
          cm->ref_frame_sign_bias[GOLDEN_FRAME]) {
    cm->comp_fixed_ref = ALTREF_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = GOLDEN_FRAME;
  } else if (cm->ref_frame_sign_bias[LAST_FRAME] ==
                 cm->ref_frame_sign_bias[ALTREF_FRAME]) {
    cm->comp_fixed_ref = GOLDEN_FRAME;
    cm->comp_var_ref[0] = LAST_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  } else {
    cm->comp_fixed_ref = LAST_FRAME;
    cm->comp_var_ref[0] = GOLDEN_FRAME;
    cm->comp_var_ref[1] = ALTREF_FRAME;
  }
}

71
// len == 0 is not allowed
72
static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
73
74
75
  return start + len > start && start + len <= end;
}

76
77
78
79
80
static int decode_unsigned_max(struct vp9_read_bit_buffer *rb, int max) {
  const int data = vp9_rb_read_literal(rb, get_unsigned_bits(max));
  return data > max ? max : data;
}

81
82
83
84
85
static TX_MODE read_tx_mode(vp9_reader *r) {
  TX_MODE tx_mode = vp9_read_literal(r, 2);
  if (tx_mode == ALLOW_32X32)
    tx_mode += vp9_read_bit(r);
  return tx_mode;
86
87
}

88
static void read_tx_probs(struct tx_probs *tx_probs, vp9_reader *r) {
89
90
91
  int i, j;

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
92
    for (j = 0; j < TX_SIZES - 3; ++j)
93
      vp9_diff_update_prob(r, &tx_probs->p8x8[i][j]);
94
95

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
96
    for (j = 0; j < TX_SIZES - 2; ++j)
97
      vp9_diff_update_prob(r, &tx_probs->p16x16[i][j]);
98
99

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
100
    for (j = 0; j < TX_SIZES - 1; ++j)
101
      vp9_diff_update_prob(r, &tx_probs->p32x32[i][j]);
John Koleszar's avatar
John Koleszar committed
102
103
}

104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (j = 0; j < SWITCHABLE_FILTERS + 1; ++j)
    for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
      vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
}

static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
    for (j = 0; j < INTER_MODES - 1; ++j)
      vp9_diff_update_prob(r, &fc->inter_mode_probs[i][j]);
}

static INLINE COMPPREDMODE_TYPE read_comp_pred_mode(vp9_reader *r) {
  COMPPREDMODE_TYPE mode = vp9_read_bit(r);
  if (mode)
    mode += vp9_read_bit(r);
  return mode;
}

static void read_comp_pred(VP9_COMMON *cm, vp9_reader *r) {
  int i;

128
129
130
131
132
  const int compound_allowed = is_compound_prediction_allowed(cm);
  cm->comp_pred_mode = compound_allowed ? read_comp_pred_mode(r)
                                        : SINGLE_PREDICTION_ONLY;
  if (compound_allowed)
    setup_compound_prediction(cm);
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193

  if (cm->comp_pred_mode == HYBRID_PREDICTION)
    for (i = 0; i < COMP_INTER_CONTEXTS; i++)
      vp9_diff_update_prob(r, &cm->fc.comp_inter_prob[i]);

  if (cm->comp_pred_mode != COMP_PREDICTION_ONLY)
    for (i = 0; i < REF_CONTEXTS; i++) {
      vp9_diff_update_prob(r, &cm->fc.single_ref_prob[i][0]);
      vp9_diff_update_prob(r, &cm->fc.single_ref_prob[i][1]);
    }

  if (cm->comp_pred_mode != SINGLE_PREDICTION_ONLY)
    for (i = 0; i < REF_CONTEXTS; i++)
      vp9_diff_update_prob(r, &cm->fc.comp_ref_prob[i]);
}

static void update_mv(vp9_reader *r, vp9_prob *p) {
  if (vp9_read(r, NMV_UPDATE_PROB))
    *p = (vp9_read_literal(r, 7) << 1) | 1;
}

static void read_mv_probs(vp9_reader *r, nmv_context *mvc, int allow_hp) {
  int i, j, k;

  for (j = 0; j < MV_JOINTS - 1; ++j)
    update_mv(r, &mvc->joints[j]);

  for (i = 0; i < 2; ++i) {
    nmv_component *const comp = &mvc->comps[i];

    update_mv(r, &comp->sign);

    for (j = 0; j < MV_CLASSES - 1; ++j)
      update_mv(r, &comp->classes[j]);

    for (j = 0; j < CLASS0_SIZE - 1; ++j)
      update_mv(r, &comp->class0[j]);

    for (j = 0; j < MV_OFFSET_BITS; ++j)
      update_mv(r, &comp->bits[j]);
  }

  for (i = 0; i < 2; ++i) {
    nmv_component *const comp = &mvc->comps[i];

    for (j = 0; j < CLASS0_SIZE; ++j)
      for (k = 0; k < 3; ++k)
        update_mv(r, &comp->class0_fp[j][k]);

    for (j = 0; j < 3; ++j)
      update_mv(r, &comp->fp[j]);
  }

  if (allow_hp) {
    for (i = 0; i < 2; ++i) {
      update_mv(r, &mvc->comps[i].class0_hp);
      update_mv(r, &mvc->comps[i].hp);
    }
  }
}

194
static void setup_plane_dequants(VP9_COMMON *cm, MACROBLOCKD *xd, int q_index) {
John Koleszar's avatar
John Koleszar committed
195
  int i;
196
  xd->plane[0].dequant = cm->y_dequant[q_index];
197

198
  for (i = 1; i < MAX_MB_PLANE; i++)
199
    xd->plane[i].dequant = cm->uv_dequant[q_index];
John Koleszar's avatar
John Koleszar committed
200
201
}

202
static void decode_block(int plane, int block, BLOCK_SIZE plane_bsize,
203
                         TX_SIZE tx_size, void *arg) {
204
  MACROBLOCKD* const xd = arg;
205
  struct macroblockd_plane *const pd = &xd->plane[plane];
206
  int16_t* const qcoeff = BLOCK_OFFSET(pd->qcoeff, block);
207
  const int stride = pd->dst.stride;
208
  const int eob = pd->eobs[block];
209
210
211
212
213
214
215
216
217
218
219
220
  if (eob > 0) {
    TX_TYPE tx_type;
    const int raster_block = txfrm_block_to_raster_block(plane_bsize, tx_size,
                                                         block);
    uint8_t* const dst = raster_block_offset_uint8(plane_bsize, raster_block,
                                                   pd->dst.buf, stride);
    switch (tx_size) {
      case TX_4X4:
        tx_type = get_tx_type_4x4(pd->plane_type, xd, raster_block);
        if (tx_type == DCT_DCT)
          xd->itxm_add(qcoeff, dst, stride, eob);
        else
221
          vp9_iht4x4_add(tx_type, qcoeff, dst, stride, eob);
222
223
224
        break;
      case TX_8X8:
        tx_type = get_tx_type_8x8(pd->plane_type, xd);
225
        vp9_iht8x8_add(tx_type, qcoeff, dst, stride, eob);
226
227
228
        break;
      case TX_16X16:
        tx_type = get_tx_type_16x16(pd->plane_type, xd);
229
        vp9_iht16x16_add(tx_type, qcoeff, dst, stride, eob);
230
231
232
        break;
      case TX_32X32:
        tx_type = DCT_DCT;
233
        vp9_idct32x32_add(qcoeff, dst, stride, eob);
234
235
236
237
238
239
240
241
242
243
        break;
      default:
        assert(!"Invalid transform size");
    }

    if (eob == 1) {
      *((int32_t *)qcoeff) = 0;
    } else {
      if (tx_type == DCT_DCT && tx_size <= TX_16X16 && eob <= 10)
        vpx_memset(qcoeff, 0, 4 * (4 << tx_size) * sizeof(qcoeff[0]));
244
      else
245
        vpx_memset(qcoeff, 0, (16 << (tx_size << 1)) * sizeof(qcoeff[0]));
246
    }
247
248
249
  }
}

250
static void decode_block_intra(int plane, int block, BLOCK_SIZE plane_bsize,
251
                               TX_SIZE tx_size, void *arg) {
252
  MACROBLOCKD* const xd = arg;
253
  struct macroblockd_plane *const pd = &xd->plane[plane];
254
  MODE_INFO *const mi = xd->mi_8x8[0];
255
256
257
  const int raster_block = txfrm_block_to_raster_block(plane_bsize, tx_size,
                                                       block);
  uint8_t* const dst = raster_block_offset_uint8(plane_bsize, raster_block,
258
                                                 pd->dst.buf, pd->dst.stride);
259
260
261
262
  const MB_PREDICTION_MODE mode = (plane == 0)
        ? ((mi->mbmi.sb_type < BLOCK_8X8) ? mi->bmi[raster_block].as_mode
                                          : mi->mbmi.mode)
        : mi->mbmi.uv_mode;
263

264
  if (xd->mb_to_right_edge < 0 || xd->mb_to_bottom_edge < 0)
265
    extend_for_intra(xd, plane_bsize, plane, block, tx_size);
266

267
268
269
  vp9_predict_intra_block(xd, raster_block >> tx_size,
                          b_width_log2(plane_bsize), tx_size, mode,
                          dst, pd->dst.stride, dst, pd->dst.stride);
270

Dmitry Kovalev's avatar
Dmitry Kovalev committed
271
272
  if (!mi->mbmi.skip_coeff)
    decode_block(plane, block, plane_bsize, tx_size, arg);
273
274
}

275
276
static int decode_tokens(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                         BLOCK_SIZE bsize, vp9_reader *r) {
277
  MB_MODE_INFO *const mbmi = &xd->mi_8x8[0]->mbmi;
278

279
280
  if (mbmi->skip_coeff) {
    reset_skip_context(xd, bsize);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
281
    return -1;
282
  } else {
283
284
285
    if (cm->seg.enabled)
      setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
                                                  cm->base_qindex));
286

Dmitry Kovalev's avatar
Dmitry Kovalev committed
287
    // TODO(dkovalev) if (!vp9_reader_has_error(r))
288
    return vp9_decode_tokens(cm, xd, &cm->seg, r, bsize);
289
290
291
  }
}

292
static void set_offsets(VP9D_COMP *pbi, BLOCK_SIZE bsize,
293
                        int mi_row, int mi_col) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
294
295
  VP9_COMMON *const cm = &pbi->common;
  MACROBLOCKD *const xd = &pbi->mb;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
296
297
298
  const int bh = num_8x8_blocks_high_lookup[bsize];
  const int bw = num_8x8_blocks_wide_lookup[bsize];
  const int offset = mi_row * cm->mode_info_stride + mi_col;
299

300
  xd->mode_info_stride = cm->mode_info_stride;
301
302
303
304
305

  xd->mi_8x8 = cm->mi_grid_visible + offset;
  xd->prev_mi_8x8 = cm->prev_mi_grid_visible + offset;

  // we are using the mode info context stream here
306
  xd->mi_8x8[0] = xd->mi_stream;
307
  xd->mi_8x8[0]->mbmi.sb_type = bsize;
308
  ++xd->mi_stream;
309

310
311
  // Special case: if prev_mi is NULL, the previous mode info context
  // cannot be used.
312
  xd->last_mi = cm->prev_mi ? xd->prev_mi_8x8[0] : NULL;
313

314
  set_skip_context(cm, xd, mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
315

316
317
  // Distance of Mb to the various image edges. These are specified to 8th pel
  // as they are always compared to values that are in 1/8th pel units
318
  set_mi_row_col(cm, xd, mi_row, bh, mi_col, bw);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
319

320
  setup_dst_planes(xd, get_frame_new_buffer(cm), mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
321
}
John Koleszar's avatar
John Koleszar committed
322

323
324
static void set_ref(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                    int idx, int mi_row, int mi_col) {
325
  MB_MODE_INFO *const mbmi = &xd->mi_8x8[0]->mbmi;
326
  const int ref = mbmi->ref_frame[idx] - LAST_FRAME;
327
  const YV12_BUFFER_CONFIG *cfg = get_frame_ref_buffer(cm, ref);
Yunqing Wang's avatar
Yunqing Wang committed
328
329
  const struct scale_factors_common *sfc = &cm->active_ref_scale_comm[ref];
  if (!vp9_is_valid_scale(sfc))
330
331
332
    vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
                       "Invalid scale factors");

Yunqing Wang's avatar
Yunqing Wang committed
333
334
  xd->scale_factor[idx].sfc = sfc;
  setup_pre_planes(xd, idx, cfg, mi_row, mi_col, &xd->scale_factor[idx]);
335
  xd->corrupted |= cfg->corrupted;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
336
}
John Koleszar's avatar
John Koleszar committed
337

338
static void decode_modes_b(VP9D_COMP *pbi, int mi_row, int mi_col,
339
                           vp9_reader *r, BLOCK_SIZE bsize, int index) {
340
  VP9_COMMON *const cm = &pbi->common;
341
  MACROBLOCKD *const xd = &pbi->mb;
342
  const int less8x8 = bsize < BLOCK_8X8;
343
  MB_MODE_INFO *mbmi;
344
  int eobtotal;
345

346
  if (less8x8)
347
    if (index > 0)
348
      return;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
349

350
  set_offsets(pbi, bsize, mi_row, mi_col);
351
  vp9_read_mode_info(cm, xd, mi_row, mi_col, r);
352

353
  if (less8x8)
354
    bsize = BLOCK_8X8;
355
356

  // Has to be called after set_offsets
357
  mbmi = &xd->mi_8x8[0]->mbmi;
358
  eobtotal = decode_tokens(cm, xd, bsize, r);
359

360
  if (!is_inter_block(mbmi)) {
361
362
    // Intra reconstruction
    foreach_transformed_block(xd, bsize, decode_block_intra, xd);
363
  } else {
364
    // Inter reconstruction
365
366
367
368
369
    const int decode_blocks = (eobtotal > 0);

    if (!less8x8) {
      assert(mbmi->sb_type == bsize);
      if (eobtotal == 0)
370
        mbmi->skip_coeff = 1;  // skip loopfilter
371
    }
372

373
    set_ref(cm, xd, 0, mi_row, mi_col);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
374
    if (has_second_ref(mbmi))
375
      set_ref(cm, xd, 1, mi_row, mi_col);
376

377
378
    xd->subpix.filter_x = xd->subpix.filter_y =
        vp9_get_filter_kernel(mbmi->interp_filter);
379
    vp9_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);
380
381
382

    if (decode_blocks)
      foreach_transformed_block(xd, bsize, decode_block, xd);
383
  }
384
  xd->corrupted |= vp9_reader_has_error(r);
385
386
}

387
static void decode_modes_sb(VP9D_COMP *pbi, int mi_row, int mi_col,
388
                            vp9_reader* r, BLOCK_SIZE bsize, int index) {
389
  VP9_COMMON *const cm = &pbi->common;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
390
  const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
391
  PARTITION_TYPE partition = PARTITION_NONE;
392
  BLOCK_SIZE subsize;
393

394
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
395
396
    return;

397
  if (bsize < BLOCK_8X8) {
398
    if (index > 0)
399
      return;
400
  } else {
401
    int pl;
402
    const int idx = check_bsize_coverage(hbs, cm->mi_rows, cm->mi_cols,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
403
                                         mi_row, mi_col);
404
    pl = partition_plane_context(cm, mi_row, mi_col, bsize);
405
406
407

    if (idx == 0)
      partition = treed_read(r, vp9_partition_tree,
408
                             cm->fc.partition_prob[cm->frame_type][pl]);
409
    else if (idx > 0 &&
410
        !vp9_read(r, cm->fc.partition_prob[cm->frame_type][pl][idx]))
411
412
413
414
      partition = (idx == 1) ? PARTITION_HORZ : PARTITION_VERT;
    else
      partition = PARTITION_SPLIT;

415
416
    if (!cm->frame_parallel_decoding_mode)
      ++cm->counts.partition[pl][partition];
417
418
  }

419
  subsize = get_subsize(bsize, partition);
420

421
422
  switch (partition) {
    case PARTITION_NONE:
423
      decode_modes_b(pbi, mi_row, mi_col, r, subsize, 0);
424
425
      break;
    case PARTITION_HORZ:
426
      decode_modes_b(pbi, mi_row, mi_col, r, subsize, 0);
427
      if (mi_row + hbs < cm->mi_rows)
428
        decode_modes_b(pbi, mi_row + hbs, mi_col, r, subsize, 1);
429
430
      break;
    case PARTITION_VERT:
431
      decode_modes_b(pbi, mi_row, mi_col, r, subsize, 0);
432
      if (mi_col + hbs < cm->mi_cols)
433
        decode_modes_b(pbi, mi_row, mi_col + hbs, r, subsize, 1);
434
      break;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
435
436
    case PARTITION_SPLIT: {
      int n;
437
      for (n = 0; n < 4; n++) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
438
        const int j = n >> 1, i = n & 1;
439
        decode_modes_sb(pbi, mi_row + j * hbs, mi_col + i * hbs,
440
                        r, subsize, n);
441
      }
Dmitry Kovalev's avatar
Dmitry Kovalev committed
442
    } break;
443
    default:
444
      assert(!"Invalid partition type");
445
  }
446

447
  // update partition context
448
  if (bsize >= BLOCK_8X8 &&
449
450
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
    update_partition_context(cm, mi_row, mi_col, subsize, bsize);
451
452
}

453
454
455
456
static void setup_token_decoder(const uint8_t *data,
                                const uint8_t *data_end,
                                size_t read_size,
                                struct vpx_internal_error_info *error_info,
457
                                vp9_reader *r) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
458
459
460
  // Validate the calculated partition length. If the buffer
  // described by the partition can't be fully read, then restrict
  // it to the portion that can be (for EC mode) or throw an error.
461
  if (!read_is_valid(data, read_size, data_end))
462
    vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
463
                       "Truncated packet or corrupt tile length");
John Koleszar's avatar
John Koleszar committed
464

465
  if (vp9_reader_init(r, data, read_size))
466
    vpx_internal_error(error_info, VPX_CODEC_MEM_ERROR,
John Koleszar's avatar
John Koleszar committed
467
                       "Failed to allocate bool decoder %d", 1);
John Koleszar's avatar
John Koleszar committed
468
469
}

470
static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs,
471
                                   vp9_reader *r) {
472
473
474
475
476
477
478
479
480
  int i, j, k, l, m;

  if (vp9_read_bit(r))
    for (i = 0; i < BLOCK_TYPES; i++)
      for (j = 0; j < REF_TYPES; j++)
        for (k = 0; k < COEF_BANDS; k++)
          for (l = 0; l < PREV_COEF_CONTEXTS; l++)
            if (k > 0 || l < 3)
              for (m = 0; m < UNCONSTRAINED_NODES; m++)
481
                vp9_diff_update_prob(r, &coef_probs[i][j][k][l][m]);
482
}
483

484
static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode,
485
                            vp9_reader *r) {
486
  read_coef_probs_common(fc->coef_probs[TX_4X4], r);
487

488
  if (tx_mode > ONLY_4X4)
489
    read_coef_probs_common(fc->coef_probs[TX_8X8], r);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
490

491
  if (tx_mode > ALLOW_8X8)
492
    read_coef_probs_common(fc->coef_probs[TX_16X16], r);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
493

494
  if (tx_mode > ALLOW_16X16)
495
    read_coef_probs_common(fc->coef_probs[TX_32X32], r);
496
497
}

498
499
static void setup_segmentation(struct segmentation *seg,
                               struct vp9_read_bit_buffer *rb) {
500
501
  int i, j;

502
503
  seg->update_map = 0;
  seg->update_data = 0;
504

505
506
  seg->enabled = vp9_rb_read_bit(rb);
  if (!seg->enabled)
507
508
509
    return;

  // Segmentation map update
510
511
  seg->update_map = vp9_rb_read_bit(rb);
  if (seg->update_map) {
Paul Wilkins's avatar
Paul Wilkins committed
512
    for (i = 0; i < SEG_TREE_PROBS; i++)
513
514
      seg->tree_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                               : MAX_PROB;
515

516
517
    seg->temporal_update = vp9_rb_read_bit(rb);
    if (seg->temporal_update) {
518
      for (i = 0; i < PREDICTION_PROBS; i++)
519
520
        seg->pred_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                                 : MAX_PROB;
521
522
    } else {
      for (i = 0; i < PREDICTION_PROBS; i++)
523
        seg->pred_probs[i] = MAX_PROB;
524
    }
525
  }
526

527
  // Segmentation data update
528
529
530
  seg->update_data = vp9_rb_read_bit(rb);
  if (seg->update_data) {
    seg->abs_delta = vp9_rb_read_bit(rb);
531

532
    vp9_clearall_segfeatures(seg);
533

Paul Wilkins's avatar
Paul Wilkins committed
534
    for (i = 0; i < MAX_SEGMENTS; i++) {
535
536
      for (j = 0; j < SEG_LVL_MAX; j++) {
        int data = 0;
537
        const int feature_enabled = vp9_rb_read_bit(rb);
538
        if (feature_enabled) {
539
          vp9_enable_segfeature(seg, i, j);
540
          data = decode_unsigned_max(rb, vp9_seg_feature_data_max(j));
541
          if (vp9_is_segfeature_signed(j))
542
            data = vp9_rb_read_bit(rb) ? -data : data;
543
        }
544
        vp9_set_segdata(seg, i, j, data);
545
546
547
548
549
      }
    }
  }
}

550
551
552
553
static void setup_loopfilter(struct loopfilter *lf,
                             struct vp9_read_bit_buffer *rb) {
  lf->filter_level = vp9_rb_read_literal(rb, 6);
  lf->sharpness_level = vp9_rb_read_literal(rb, 3);
554
555
556

  // Read in loop filter deltas applied at the MB level based on mode or ref
  // frame.
557
  lf->mode_ref_delta_update = 0;
558

559
560
561
562
  lf->mode_ref_delta_enabled = vp9_rb_read_bit(rb);
  if (lf->mode_ref_delta_enabled) {
    lf->mode_ref_delta_update = vp9_rb_read_bit(rb);
    if (lf->mode_ref_delta_update) {
563
564
      int i;

565
566
      for (i = 0; i < MAX_REF_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
567
          lf->ref_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
568

569
570
      for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
571
          lf->mode_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
572
573
574
575
    }
  }
}

576
577
static int read_delta_q(struct vp9_read_bit_buffer *rb, int *delta_q) {
  const int old = *delta_q;
578
  *delta_q = vp9_rb_read_bit(rb) ? vp9_rb_read_signed_literal(rb, 4) : 0;
579
580
  return old != *delta_q;
}
581

582
static void setup_quantization(VP9D_COMP *pbi, struct vp9_read_bit_buffer *rb) {
583
  MACROBLOCKD *const xd = &pbi->mb;
584
585
  VP9_COMMON *const cm = &pbi->common;
  int update = 0;
586

587
588
589
590
591
592
  cm->base_qindex = vp9_rb_read_literal(rb, QINDEX_BITS);
  update |= read_delta_q(rb, &cm->y_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_ac_delta_q);
  if (update)
    vp9_init_dequantizer(cm);
593
594
595
596
597

  xd->lossless = cm->base_qindex == 0 &&
                 cm->y_dc_delta_q == 0 &&
                 cm->uv_dc_delta_q == 0 &&
                 cm->uv_ac_delta_q == 0;
598

599
  xd->itxm_add = xd->lossless ? vp9_iwht4x4_add : vp9_idct4x4_add;
600
601
}

602
603
604
605
606
607
static INTERPOLATION_TYPE read_interp_filter_type(
                              struct vp9_read_bit_buffer *rb) {
  const INTERPOLATION_TYPE literal_to_type[] = { EIGHTTAP_SMOOTH,
                                                 EIGHTTAP,
                                                 EIGHTTAP_SHARP,
                                                 BILINEAR };
608
  return vp9_rb_read_bit(rb) ? SWITCHABLE
609
                             : literal_to_type[vp9_rb_read_literal(rb, 2)];
610
611
}

612
static void read_frame_size(struct vp9_read_bit_buffer *rb,
613
                            int *width, int *height) {
614
615
  const int w = vp9_rb_read_literal(rb, 16) + 1;
  const int h = vp9_rb_read_literal(rb, 16) + 1;
616
617
  *width = w;
  *height = h;
618
619
}

620
static void setup_display_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
621
622
623
  cm->display_width = cm->width;
  cm->display_height = cm->height;
  if (vp9_rb_read_bit(rb))
624
    read_frame_size(rb, &cm->display_width, &cm->display_height);
625
}
626

627
628
static void apply_frame_size(VP9D_COMP *pbi, int width, int height) {
  VP9_COMMON *cm = &pbi->common;
629

630
  if (cm->width != width || cm->height != height) {
631
    if (!pbi->initial_width || !pbi->initial_height) {
632
633
      if (vp9_alloc_frame_buffers(cm, width, height))
        vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
634
                           "Failed to allocate frame buffers");
635
636
      pbi->initial_width = width;
      pbi->initial_height = height;
637
638
    } else {
      if (width > pbi->initial_width)
639
        vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
640
                           "Frame width too large");
641

642
      if (height > pbi->initial_height)
643
        vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
644
                           "Frame height too large");
645
646
    }

647
648
    cm->width = width;
    cm->height = height;
649

650
    vp9_update_frame_size(cm);
651
  }
652

653
  vp9_realloc_frame_buffer(get_frame_new_buffer(cm), cm->width, cm->height,
654
                           cm->subsampling_x, cm->subsampling_y,
655
                           VP9BORDERINPIXELS);
656
657
}

658
659
660
static void setup_frame_size(VP9D_COMP *pbi,
                             struct vp9_read_bit_buffer *rb) {
  int width, height;
661
  read_frame_size(rb, &width, &height);
662
  apply_frame_size(pbi, width, height);
663
  setup_display_size(&pbi->common, rb);
664
665
}

666
667
668
669
670
671
672
673
static void setup_frame_size_with_refs(VP9D_COMP *pbi,
                                       struct vp9_read_bit_buffer *rb) {
  VP9_COMMON *const cm = &pbi->common;

  int width, height;
  int found = 0, i;
  for (i = 0; i < ALLOWED_REFS_PER_FRAME; ++i) {
    if (vp9_rb_read_bit(rb)) {
674
      YV12_BUFFER_CONFIG *const cfg = get_frame_ref_buffer(cm, i);
675
676
677
678
679
680
681
682
      width = cfg->y_crop_width;
      height = cfg->y_crop_height;
      found = 1;
      break;
    }
  }

  if (!found)
683
    read_frame_size(rb, &width, &height);
684

685
686
687
688
  if (!width || !height)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Referenced frame with invalid size");

689
  apply_frame_size(pbi, width, height);
690
  setup_display_size(cm, rb);
691
692
}

693
static void decode_tile(VP9D_COMP *pbi, vp9_reader *r, int tile_col) {
694
  const int num_threads = pbi->oxcf.max_threads;
695
  VP9_COMMON *const cm = &pbi->common;
696
  int mi_row, mi_col;
697
698
699
  MACROBLOCKD *xd = &pbi->mb;

  xd->mi_stream = pbi->mi_streams[tile_col];
700

701
  if (pbi->do_loopfilter_inline) {
702
    LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1;
703
    lf_data->frame_buffer = get_frame_new_buffer(cm);
704
705
706
707
    lf_data->cm = cm;
    lf_data->xd = pbi->mb;
    lf_data->stop = 0;
    lf_data->y_only = 0;
708
    vp9_loop_filter_frame_init(cm, cm->lf.filter_level);
709
710
  }

711
  for (mi_row = cm->cur_tile_mi_row_start; mi_row < cm->cur_tile_mi_row_end;
712
       mi_row += MI_BLOCK_SIZE) {
713
    // For a SB there are 2 left contexts, each pertaining to a MB row within
714
715
716
    vp9_zero(cm->left_context);
    vp9_zero(cm->left_seg_context);
    for (mi_col = cm->cur_tile_mi_col_start; mi_col < cm->cur_tile_mi_col_end;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
717
         mi_col += MI_BLOCK_SIZE)
718
      decode_modes_sb(pbi, mi_row, mi_col, r, BLOCK_64X64, 0);
719
720
721

    if (pbi->do_loopfilter_inline) {
      const int lf_start = mi_row - MI_BLOCK_SIZE;
722
      LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1;
723

724
725
      // delay the loopfilter by 1 macroblock row.
      if (lf_start < 0) continue;
726

727
728
      // decoding has completed: finish up the loop filter in this thread.
      if (mi_row + MI_BLOCK_SIZE >= cm->cur_tile_mi_row_end) continue;
729

730
731
732
733
      vp9_worker_sync(&pbi->lf_worker);
      lf_data->start = lf_start;
      lf_data->stop = mi_row;
      if (num_threads > 1) {
734
735
        vp9_worker_launch(&pbi->lf_worker);
      } else {
736
        vp9_worker_execute(&pbi->lf_worker);
737
      }
738
739
740
741
    }
  }

  if (pbi->do_loopfilter_inline) {
742
    LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1;
743

744
745
746
747
    vp9_worker_sync(&pbi->lf_worker);
    lf_data->start = lf_data->stop;
    lf_data->stop = cm->mi_rows;
    vp9_worker_execute(&pbi->lf_worker);
748
749
750
  }
}

751
static void setup_tile_info(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
752
753
  int min_log2_tile_cols, max_log2_tile_cols, max_ones;
  vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
754

Dmitry Kovalev's avatar
Dmitry Kovalev committed
755
756
757
758
759
  // columns
  max_ones = max_log2_tile_cols - min_log2_tile_cols;
  cm->log2_tile_cols = min_log2_tile_cols;
  while (max_ones-- && vp9_rb_read_bit(rb))
    cm->log2_tile_cols++;
760

Dmitry Kovalev's avatar
Dmitry Kovalev committed
761
  // rows
762
763
764
765
766
  cm->log2_tile_rows = vp9_rb_read_bit(rb);
  if (cm->log2_tile_rows)
    cm->log2_tile_rows += vp9_rb_read_bit(rb);
}

767
768
769
static const uint8_t *decode_tiles(VP9D_COMP *pbi, const uint8_t *data) {
  vp9_reader residual_bc;

770
  VP9_COMMON *const cm = &pbi->common;
771

772
  const uint8_t *const data_end = pbi->source + pbi->source_sz;
773
774
775
  const int aligned_mi_cols = mi_cols_aligned_to_sb(cm->mi_cols);
  const int tile_cols = 1 << cm->log2_tile_cols;
  const int tile_rows = 1 << cm->log2_tile_rows;
776
  int tile_row, tile_col;
777

778
779
  // Note: this memset assumes above_context[0], [1] and [2]
  // are allocated as part of the same buffer.
780
  vpx_memset(cm->above_context[0], 0,
781
             sizeof(ENTROPY_CONTEXT) * MAX_MB_PLANE * (2 * aligned_mi_cols));
782

783
  vpx_memset(cm->above_seg_context, 0,
784
             sizeof(PARTITION_CONTEXT) * aligned_mi_cols);
785
786
787

  if (pbi->oxcf.inv_tile_order) {
    const uint8_t *data_ptr2[4][1 << 6];
788
    vp9_reader bc_bak = {0};
789
790

    // pre-initialize the offsets, we're going to read in inverse order
791
    data_ptr2[0][0] = data;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
792
    for (tile_row = 0; tile_row < tile_rows; tile_row++) {
793
      if (tile_row) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
794
795
796
        const int size = read_be32(data_ptr2[tile_row - 1][tile_cols - 1]);
        data_ptr2[tile_row - 1][tile_cols - 1] += 4;
        data_ptr2[tile_row][0] = data_ptr2[tile_row - 1][tile_cols - 1] + size;
797
798
      }

Dmitry Kovalev's avatar
Dmitry Kovalev committed
799
      for (tile_col = 1; tile_col < tile_cols; tile_col++) {
800
        const int size = read_be32(data_ptr2[tile_row][tile_col - 1]);
801
802
803
804
805
806
        data_ptr2[tile_row][tile_col - 1] += 4;
        data_ptr2[tile_row][tile_col] =
            data_ptr2[tile_row][tile_col - 1] + size;
      }
    }

Dmitry Kovalev's avatar
Dmitry Kovalev committed
807
    for (tile_row = 0; tile_row < tile_rows; tile_row++) {
808
      vp9_get_tile_row_offsets(cm, tile_row);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
809
      for (tile_col = tile_cols - 1; tile_col >= 0; tile_col--) {
810
        vp9_get_tile_col_offsets(cm, tile_col);
811
        setup_token_decoder(data_ptr2[tile_row][tile_col], data_end,
812
                            data_end - data_ptr2[tile_row][tile_col],
813
                            &cm->error, &residual_bc);
814
        decode_tile(pbi, &residual_bc, tile_col);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
815
        if (tile_row == tile_rows - 1 && tile_col == tile_cols - 1)