vp9_decodframe.c 38.4 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
#include <assert.h>
John Koleszar's avatar
John Koleszar committed
12

13
14
15
16
#include "./vp9_rtcd.h"
#include "vpx_mem/vpx_mem.h"
#include "vpx_scale/vpx_scale.h"

Dmitry Kovalev's avatar
Dmitry Kovalev committed
17
#include "vp9/common/vp9_alloccommon.h"
Ronald S. Bultje's avatar
Ronald S. Bultje committed
18
#include "vp9/common/vp9_common.h"
Yaowu Xu's avatar
Yaowu Xu committed
19
#include "vp9/common/vp9_entropy.h"
20
#include "vp9/common/vp9_entropymode.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
21
#include "vp9/common/vp9_extend.h"
22
#include "vp9/common/vp9_idct.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
23
#include "vp9/common/vp9_pred_common.h"
24
#include "vp9/common/vp9_quant_common.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
25
26
#include "vp9/common/vp9_reconintra.h"
#include "vp9/common/vp9_reconinter.h"
27
28
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_tile_common.h"
29
30

#include "vp9/decoder/vp9_dboolhuff.h"
31
32
33
#include "vp9/decoder/vp9_decodframe.h"
#include "vp9/decoder/vp9_detokenize.h"
#include "vp9/decoder/vp9_decodemv.h"
34
#include "vp9/decoder/vp9_dsubexp.h"
35
36
#include "vp9/decoder/vp9_onyxd_int.h"
#include "vp9/decoder/vp9_read_bit_buffer.h"
37
#include "vp9/decoder/vp9_thread.h"
Dmitry Kovalev's avatar
Dmitry Kovalev committed
38
#include "vp9/decoder/vp9_treereader.h"
39

40
41
static int read_be32(const uint8_t *p) {
  return (p[0] << 24) | (p[1] << 16) | (p[2] << 8) | p[3];
42
43
44
}

// len == 0 is not allowed
45
static int read_is_valid(const uint8_t *start, size_t len, const uint8_t *end) {
46
47
48
  return start + len > start && start + len <= end;
}

49
50
51
52
53
static int decode_unsigned_max(struct vp9_read_bit_buffer *rb, int max) {
  const int data = vp9_rb_read_literal(rb, get_unsigned_bits(max));
  return data > max ? max : data;
}

54
55
56
57
58
static TX_MODE read_tx_mode(vp9_reader *r) {
  TX_MODE tx_mode = vp9_read_literal(r, 2);
  if (tx_mode == ALLOW_32X32)
    tx_mode += vp9_read_bit(r);
  return tx_mode;
59
60
}

61
static void read_tx_probs(struct tx_probs *tx_probs, vp9_reader *r) {
62
63
64
  int i, j;

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
65
    for (j = 0; j < TX_SIZES - 3; ++j)
66
      vp9_diff_update_prob(r, &tx_probs->p8x8[i][j]);
67
68

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
69
    for (j = 0; j < TX_SIZES - 2; ++j)
70
      vp9_diff_update_prob(r, &tx_probs->p16x16[i][j]);
71
72

  for (i = 0; i < TX_SIZE_CONTEXTS; ++i)
73
    for (j = 0; j < TX_SIZES - 1; ++j)
74
      vp9_diff_update_prob(r, &tx_probs->p32x32[i][j]);
John Koleszar's avatar
John Koleszar committed
75
76
}

77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
static void read_switchable_interp_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (j = 0; j < SWITCHABLE_FILTERS + 1; ++j)
    for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i)
      vp9_diff_update_prob(r, &fc->switchable_interp_prob[j][i]);
}

static void read_inter_mode_probs(FRAME_CONTEXT *fc, vp9_reader *r) {
  int i, j;
  for (i = 0; i < INTER_MODE_CONTEXTS; ++i)
    for (j = 0; j < INTER_MODES - 1; ++j)
      vp9_diff_update_prob(r, &fc->inter_mode_probs[i][j]);
}

static INLINE COMPPREDMODE_TYPE read_comp_pred_mode(vp9_reader *r) {
  COMPPREDMODE_TYPE mode = vp9_read_bit(r);
  if (mode)
    mode += vp9_read_bit(r);
  return mode;
}

static void read_comp_pred(VP9_COMMON *cm, vp9_reader *r) {
  int i;

  cm->comp_pred_mode = cm->allow_comp_inter_inter ? read_comp_pred_mode(r)
                                                  : SINGLE_PREDICTION_ONLY;

  if (cm->comp_pred_mode == HYBRID_PREDICTION)
    for (i = 0; i < COMP_INTER_CONTEXTS; i++)
      vp9_diff_update_prob(r, &cm->fc.comp_inter_prob[i]);

  if (cm->comp_pred_mode != COMP_PREDICTION_ONLY)
    for (i = 0; i < REF_CONTEXTS; i++) {
      vp9_diff_update_prob(r, &cm->fc.single_ref_prob[i][0]);
      vp9_diff_update_prob(r, &cm->fc.single_ref_prob[i][1]);
    }

  if (cm->comp_pred_mode != SINGLE_PREDICTION_ONLY)
    for (i = 0; i < REF_CONTEXTS; i++)
      vp9_diff_update_prob(r, &cm->fc.comp_ref_prob[i]);
}

static void update_mv(vp9_reader *r, vp9_prob *p) {
  if (vp9_read(r, NMV_UPDATE_PROB))
    *p = (vp9_read_literal(r, 7) << 1) | 1;
}

static void read_mv_probs(vp9_reader *r, nmv_context *mvc, int allow_hp) {
  int i, j, k;

  for (j = 0; j < MV_JOINTS - 1; ++j)
    update_mv(r, &mvc->joints[j]);

  for (i = 0; i < 2; ++i) {
    nmv_component *const comp = &mvc->comps[i];

    update_mv(r, &comp->sign);

    for (j = 0; j < MV_CLASSES - 1; ++j)
      update_mv(r, &comp->classes[j]);

    for (j = 0; j < CLASS0_SIZE - 1; ++j)
      update_mv(r, &comp->class0[j]);

    for (j = 0; j < MV_OFFSET_BITS; ++j)
      update_mv(r, &comp->bits[j]);
  }

  for (i = 0; i < 2; ++i) {
    nmv_component *const comp = &mvc->comps[i];

    for (j = 0; j < CLASS0_SIZE; ++j)
      for (k = 0; k < 3; ++k)
        update_mv(r, &comp->class0_fp[j][k]);

    for (j = 0; j < 3; ++j)
      update_mv(r, &comp->fp[j]);
  }

  if (allow_hp) {
    for (i = 0; i < 2; ++i) {
      update_mv(r, &mvc->comps[i].class0_hp);
      update_mv(r, &mvc->comps[i].hp);
    }
  }
}

164
static void setup_plane_dequants(VP9_COMMON *cm, MACROBLOCKD *xd, int q_index) {
John Koleszar's avatar
John Koleszar committed
165
  int i;
166
  xd->plane[0].dequant = cm->y_dequant[q_index];
167

168
  for (i = 1; i < MAX_MB_PLANE; i++)
169
    xd->plane[i].dequant = cm->uv_dequant[q_index];
John Koleszar's avatar
John Koleszar committed
170
171
}

172
static void decode_block(int plane, int block, BLOCK_SIZE plane_bsize,
173
                         TX_SIZE tx_size, void *arg) {
174
  MACROBLOCKD* const xd = arg;
175
  struct macroblockd_plane *const pd = &xd->plane[plane];
176
  int16_t* const qcoeff = BLOCK_OFFSET(pd->qcoeff, block);
177
  const int stride = pd->dst.stride;
178
  const int eob = pd->eobs[block];
179
180
181
182
183
184
185
186
187
188
189
190
  if (eob > 0) {
    TX_TYPE tx_type;
    const int raster_block = txfrm_block_to_raster_block(plane_bsize, tx_size,
                                                         block);
    uint8_t* const dst = raster_block_offset_uint8(plane_bsize, raster_block,
                                                   pd->dst.buf, stride);
    switch (tx_size) {
      case TX_4X4:
        tx_type = get_tx_type_4x4(pd->plane_type, xd, raster_block);
        if (tx_type == DCT_DCT)
          xd->itxm_add(qcoeff, dst, stride, eob);
        else
191
          vp9_iht4x4_add(tx_type, qcoeff, dst, stride, eob);
192
193
194
        break;
      case TX_8X8:
        tx_type = get_tx_type_8x8(pd->plane_type, xd);
195
        vp9_iht8x8_add(tx_type, qcoeff, dst, stride, eob);
196
197
198
        break;
      case TX_16X16:
        tx_type = get_tx_type_16x16(pd->plane_type, xd);
199
        vp9_iht16x16_add(tx_type, qcoeff, dst, stride, eob);
200
201
202
        break;
      case TX_32X32:
        tx_type = DCT_DCT;
203
        vp9_idct32x32_add(qcoeff, dst, stride, eob);
204
205
206
207
208
209
210
211
212
213
        break;
      default:
        assert(!"Invalid transform size");
    }

    if (eob == 1) {
      *((int32_t *)qcoeff) = 0;
    } else {
      if (tx_type == DCT_DCT && tx_size <= TX_16X16 && eob <= 10)
        vpx_memset(qcoeff, 0, 4 * (4 << tx_size) * sizeof(qcoeff[0]));
214
      else
215
        vpx_memset(qcoeff, 0, (16 << (tx_size << 1)) * sizeof(qcoeff[0]));
216
    }
217
218
219
  }
}

220
static void decode_block_intra(int plane, int block, BLOCK_SIZE plane_bsize,
221
                               TX_SIZE tx_size, void *arg) {
222
  MACROBLOCKD* const xd = arg;
223
  struct macroblockd_plane *const pd = &xd->plane[plane];
224
  MODE_INFO *const mi = xd->mi_8x8[0];
225
226
227
  const int raster_block = txfrm_block_to_raster_block(plane_bsize, tx_size,
                                                       block);
  uint8_t* const dst = raster_block_offset_uint8(plane_bsize, raster_block,
228
                                                 pd->dst.buf, pd->dst.stride);
229
230
231
232
  const MB_PREDICTION_MODE mode = (plane == 0)
        ? ((mi->mbmi.sb_type < BLOCK_8X8) ? mi->bmi[raster_block].as_mode
                                          : mi->mbmi.mode)
        : mi->mbmi.uv_mode;
233

234
  if (xd->mb_to_right_edge < 0 || xd->mb_to_bottom_edge < 0)
235
    extend_for_intra(xd, plane_bsize, plane, block, tx_size);
236

237
238
239
  vp9_predict_intra_block(xd, raster_block >> tx_size,
                          b_width_log2(plane_bsize), tx_size, mode,
                          dst, pd->dst.stride, dst, pd->dst.stride);
240

Dmitry Kovalev's avatar
Dmitry Kovalev committed
241
242
  if (!mi->mbmi.skip_coeff)
    decode_block(plane, block, plane_bsize, tx_size, arg);
243
244
}

245
246
static int decode_tokens(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                         BLOCK_SIZE bsize, vp9_reader *r) {
247
  MB_MODE_INFO *const mbmi = &xd->mi_8x8[0]->mbmi;
248

249
250
  if (mbmi->skip_coeff) {
    reset_skip_context(xd, bsize);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
251
    return -1;
252
  } else {
253
254
255
    if (cm->seg.enabled)
      setup_plane_dequants(cm, xd, vp9_get_qindex(&cm->seg, mbmi->segment_id,
                                                  cm->base_qindex));
256

Dmitry Kovalev's avatar
Dmitry Kovalev committed
257
    // TODO(dkovalev) if (!vp9_reader_has_error(r))
258
    return vp9_decode_tokens(cm, xd, &cm->seg, r, bsize);
259
260
261
  }
}

262
static void set_offsets(VP9D_COMP *pbi, BLOCK_SIZE bsize,
263
                        int mi_row, int mi_col) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
264
265
  VP9_COMMON *const cm = &pbi->common;
  MACROBLOCKD *const xd = &pbi->mb;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
266
267
268
  const int bh = num_8x8_blocks_high_lookup[bsize];
  const int bw = num_8x8_blocks_wide_lookup[bsize];
  const int offset = mi_row * cm->mode_info_stride + mi_col;
269

270
  xd->mode_info_stride = cm->mode_info_stride;
271
272
273
274
275

  xd->mi_8x8 = cm->mi_grid_visible + offset;
  xd->prev_mi_8x8 = cm->prev_mi_grid_visible + offset;

  // we are using the mode info context stream here
276
  xd->mi_8x8[0] = xd->mi_stream;
277
  xd->mi_8x8[0]->mbmi.sb_type = bsize;
278
  ++xd->mi_stream;
279

280
281
  // Special case: if prev_mi is NULL, the previous mode info context
  // cannot be used.
282
  xd->last_mi = cm->prev_mi ? xd->prev_mi_8x8[0] : NULL;
283

284
  set_skip_context(cm, xd, mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
285

286
287
  // Distance of Mb to the various image edges. These are specified to 8th pel
  // as they are always compared to values that are in 1/8th pel units
288
  set_mi_row_col(cm, xd, mi_row, bh, mi_col, bw);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
289

290
  setup_dst_planes(xd, &cm->yv12_fb[cm->new_fb_idx], mi_row, mi_col);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
291
}
John Koleszar's avatar
John Koleszar committed
292

293
294
static void set_ref(VP9_COMMON *const cm, MACROBLOCKD *const xd,
                    int idx, int mi_row, int mi_col) {
295
  MB_MODE_INFO *const mbmi = &xd->mi_8x8[0]->mbmi;
296
  const int ref = mbmi->ref_frame[idx] - LAST_FRAME;
297
  const YV12_BUFFER_CONFIG *cfg = &cm->yv12_fb[cm->active_ref_idx[ref]];
298
299
300
301
302
  const struct scale_factors *sf = &cm->active_ref_scale[ref];
  if (!vp9_is_valid_scale(sf))
    vpx_internal_error(&cm->error, VPX_CODEC_UNSUP_BITSTREAM,
                       "Invalid scale factors");

303
304
  xd->scale_factor[idx] = *sf;
  setup_pre_planes(xd, idx, cfg, mi_row, mi_col, sf);
305
  xd->corrupted |= cfg->corrupted;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
306
}
John Koleszar's avatar
John Koleszar committed
307

308
static void decode_modes_b(VP9D_COMP *pbi, int mi_row, int mi_col,
309
                           vp9_reader *r, BLOCK_SIZE bsize, int index) {
310
  VP9_COMMON *const cm = &pbi->common;
311
  MACROBLOCKD *const xd = &pbi->mb;
312
  const int less8x8 = bsize < BLOCK_8X8;
313
  MB_MODE_INFO *mbmi;
314
  int eobtotal;
315

316
  if (less8x8)
317
    if (index > 0)
318
      return;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
319

320
  set_offsets(pbi, bsize, mi_row, mi_col);
321
  vp9_read_mode_info(cm, xd, mi_row, mi_col, r);
322

323
  if (less8x8)
324
    bsize = BLOCK_8X8;
325
326

  // Has to be called after set_offsets
327
  mbmi = &xd->mi_8x8[0]->mbmi;
328
  eobtotal = decode_tokens(cm, xd, bsize, r);
329

330
  if (!is_inter_block(mbmi)) {
331
332
    // Intra reconstruction
    foreach_transformed_block(xd, bsize, decode_block_intra, xd);
333
  } else {
334
    // Inter reconstruction
335
336
337
338
339
    const int decode_blocks = (eobtotal > 0);

    if (!less8x8) {
      assert(mbmi->sb_type == bsize);
      if (eobtotal == 0)
340
        mbmi->skip_coeff = 1;  // skip loopfilter
341
    }
342

343
    set_ref(cm, xd, 0, mi_row, mi_col);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
344
    if (has_second_ref(mbmi))
345
      set_ref(cm, xd, 1, mi_row, mi_col);
346

347
348
    xd->subpix.filter_x = xd->subpix.filter_y =
        vp9_get_filter_kernel(mbmi->interp_filter);
349
    vp9_build_inter_predictors_sb(xd, mi_row, mi_col, bsize);
350
351
352

    if (decode_blocks)
      foreach_transformed_block(xd, bsize, decode_block, xd);
353
  }
354
  xd->corrupted |= vp9_reader_has_error(r);
355
356
}

357
static void decode_modes_sb(VP9D_COMP *pbi, int mi_row, int mi_col,
358
                            vp9_reader* r, BLOCK_SIZE bsize, int index) {
359
  VP9_COMMON *const cm = &pbi->common;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
360
  const int hbs = num_8x8_blocks_wide_lookup[bsize] / 2;
361
  PARTITION_TYPE partition = PARTITION_NONE;
362
  BLOCK_SIZE subsize;
363

364
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
365
366
    return;

367
  if (bsize < BLOCK_8X8) {
368
    if (index > 0)
369
      return;
370
  } else {
371
    int pl;
372
    const int idx = check_bsize_coverage(hbs, cm->mi_rows, cm->mi_cols,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
373
                                         mi_row, mi_col);
374
    pl = partition_plane_context(cm, mi_row, mi_col, bsize);
375
376
377

    if (idx == 0)
      partition = treed_read(r, vp9_partition_tree,
378
                             cm->fc.partition_prob[cm->frame_type][pl]);
379
    else if (idx > 0 &&
380
        !vp9_read(r, cm->fc.partition_prob[cm->frame_type][pl][idx]))
381
382
383
384
      partition = (idx == 1) ? PARTITION_HORZ : PARTITION_VERT;
    else
      partition = PARTITION_SPLIT;

385
386
    if (!cm->frame_parallel_decoding_mode)
      ++cm->counts.partition[pl][partition];
387
388
  }

389
  subsize = get_subsize(bsize, partition);
390

391
392
  switch (partition) {
    case PARTITION_NONE:
393
      decode_modes_b(pbi, mi_row, mi_col, r, subsize, 0);
394
395
      break;
    case PARTITION_HORZ:
396
      decode_modes_b(pbi, mi_row, mi_col, r, subsize, 0);
397
      if (mi_row + hbs < cm->mi_rows)
398
        decode_modes_b(pbi, mi_row + hbs, mi_col, r, subsize, 1);
399
400
      break;
    case PARTITION_VERT:
401
      decode_modes_b(pbi, mi_row, mi_col, r, subsize, 0);
402
      if (mi_col + hbs < cm->mi_cols)
403
        decode_modes_b(pbi, mi_row, mi_col + hbs, r, subsize, 1);
404
      break;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
405
406
    case PARTITION_SPLIT: {
      int n;
407
      for (n = 0; n < 4; n++) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
408
        const int j = n >> 1, i = n & 1;
409
        decode_modes_sb(pbi, mi_row + j * hbs, mi_col + i * hbs,
410
                        r, subsize, n);
411
      }
Dmitry Kovalev's avatar
Dmitry Kovalev committed
412
    } break;
413
    default:
414
      assert(!"Invalid partition type");
415
  }
416

417
  // update partition context
418
  if (bsize >= BLOCK_8X8 &&
419
420
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
    update_partition_context(cm, mi_row, mi_col, subsize, bsize);
421
422
}

423
424
425
426
static void setup_token_decoder(const uint8_t *data,
                                const uint8_t *data_end,
                                size_t read_size,
                                struct vpx_internal_error_info *error_info,
427
                                vp9_reader *r) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
428
429
430
  // Validate the calculated partition length. If the buffer
  // described by the partition can't be fully read, then restrict
  // it to the portion that can be (for EC mode) or throw an error.
431
  if (!read_is_valid(data, read_size, data_end))
432
    vpx_internal_error(error_info, VPX_CODEC_CORRUPT_FRAME,
433
                       "Truncated packet or corrupt tile length");
John Koleszar's avatar
John Koleszar committed
434

435
  if (vp9_reader_init(r, data, read_size))
436
    vpx_internal_error(error_info, VPX_CODEC_MEM_ERROR,
John Koleszar's avatar
John Koleszar committed
437
                       "Failed to allocate bool decoder %d", 1);
John Koleszar's avatar
John Koleszar committed
438
439
}

440
static void read_coef_probs_common(vp9_coeff_probs_model *coef_probs,
441
                                   vp9_reader *r) {
442
443
444
445
446
447
448
449
450
  int i, j, k, l, m;

  if (vp9_read_bit(r))
    for (i = 0; i < BLOCK_TYPES; i++)
      for (j = 0; j < REF_TYPES; j++)
        for (k = 0; k < COEF_BANDS; k++)
          for (l = 0; l < PREV_COEF_CONTEXTS; l++)
            if (k > 0 || l < 3)
              for (m = 0; m < UNCONSTRAINED_NODES; m++)
451
                vp9_diff_update_prob(r, &coef_probs[i][j][k][l][m]);
452
}
453

454
static void read_coef_probs(FRAME_CONTEXT *fc, TX_MODE tx_mode,
455
                            vp9_reader *r) {
456
  read_coef_probs_common(fc->coef_probs[TX_4X4], r);
457

458
  if (tx_mode > ONLY_4X4)
459
    read_coef_probs_common(fc->coef_probs[TX_8X8], r);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
460

461
  if (tx_mode > ALLOW_8X8)
462
    read_coef_probs_common(fc->coef_probs[TX_16X16], r);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
463

464
  if (tx_mode > ALLOW_16X16)
465
    read_coef_probs_common(fc->coef_probs[TX_32X32], r);
466
467
}

468
469
static void setup_segmentation(struct segmentation *seg,
                               struct vp9_read_bit_buffer *rb) {
470
471
  int i, j;

472
473
  seg->update_map = 0;
  seg->update_data = 0;
474

475
476
  seg->enabled = vp9_rb_read_bit(rb);
  if (!seg->enabled)
477
478
479
    return;

  // Segmentation map update
480
481
  seg->update_map = vp9_rb_read_bit(rb);
  if (seg->update_map) {
Paul Wilkins's avatar
Paul Wilkins committed
482
    for (i = 0; i < SEG_TREE_PROBS; i++)
483
484
      seg->tree_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                               : MAX_PROB;
485

486
487
    seg->temporal_update = vp9_rb_read_bit(rb);
    if (seg->temporal_update) {
488
      for (i = 0; i < PREDICTION_PROBS; i++)
489
490
        seg->pred_probs[i] = vp9_rb_read_bit(rb) ? vp9_rb_read_literal(rb, 8)
                                                 : MAX_PROB;
491
492
    } else {
      for (i = 0; i < PREDICTION_PROBS; i++)
493
        seg->pred_probs[i] = MAX_PROB;
494
    }
495
  }
496

497
  // Segmentation data update
498
499
500
  seg->update_data = vp9_rb_read_bit(rb);
  if (seg->update_data) {
    seg->abs_delta = vp9_rb_read_bit(rb);
501

502
    vp9_clearall_segfeatures(seg);
503

Paul Wilkins's avatar
Paul Wilkins committed
504
    for (i = 0; i < MAX_SEGMENTS; i++) {
505
506
      for (j = 0; j < SEG_LVL_MAX; j++) {
        int data = 0;
507
        const int feature_enabled = vp9_rb_read_bit(rb);
508
        if (feature_enabled) {
509
          vp9_enable_segfeature(seg, i, j);
510
          data = decode_unsigned_max(rb, vp9_seg_feature_data_max(j));
511
          if (vp9_is_segfeature_signed(j))
512
            data = vp9_rb_read_bit(rb) ? -data : data;
513
        }
514
        vp9_set_segdata(seg, i, j, data);
515
516
517
518
519
      }
    }
  }
}

520
521
522
523
static void setup_loopfilter(struct loopfilter *lf,
                             struct vp9_read_bit_buffer *rb) {
  lf->filter_level = vp9_rb_read_literal(rb, 6);
  lf->sharpness_level = vp9_rb_read_literal(rb, 3);
524
525
526

  // Read in loop filter deltas applied at the MB level based on mode or ref
  // frame.
527
  lf->mode_ref_delta_update = 0;
528

529
530
531
532
  lf->mode_ref_delta_enabled = vp9_rb_read_bit(rb);
  if (lf->mode_ref_delta_enabled) {
    lf->mode_ref_delta_update = vp9_rb_read_bit(rb);
    if (lf->mode_ref_delta_update) {
533
534
      int i;

535
536
      for (i = 0; i < MAX_REF_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
537
          lf->ref_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
538

539
540
      for (i = 0; i < MAX_MODE_LF_DELTAS; i++)
        if (vp9_rb_read_bit(rb))
541
          lf->mode_deltas[i] = vp9_rb_read_signed_literal(rb, 6);
542
543
544
545
    }
  }
}

546
547
static int read_delta_q(struct vp9_read_bit_buffer *rb, int *delta_q) {
  const int old = *delta_q;
548
  *delta_q = vp9_rb_read_bit(rb) ? vp9_rb_read_signed_literal(rb, 4) : 0;
549
550
  return old != *delta_q;
}
551

552
static void setup_quantization(VP9D_COMP *pbi, struct vp9_read_bit_buffer *rb) {
553
  MACROBLOCKD *const xd = &pbi->mb;
554
555
  VP9_COMMON *const cm = &pbi->common;
  int update = 0;
556

557
558
559
560
561
562
  cm->base_qindex = vp9_rb_read_literal(rb, QINDEX_BITS);
  update |= read_delta_q(rb, &cm->y_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_dc_delta_q);
  update |= read_delta_q(rb, &cm->uv_ac_delta_q);
  if (update)
    vp9_init_dequantizer(cm);
563
564
565
566
567

  xd->lossless = cm->base_qindex == 0 &&
                 cm->y_dc_delta_q == 0 &&
                 cm->uv_dc_delta_q == 0 &&
                 cm->uv_ac_delta_q == 0;
568

569
  xd->itxm_add = xd->lossless ? vp9_iwht4x4_add : vp9_idct4x4_add;
570
571
}

572
573
574
575
576
577
static INTERPOLATION_TYPE read_interp_filter_type(
                              struct vp9_read_bit_buffer *rb) {
  const INTERPOLATION_TYPE literal_to_type[] = { EIGHTTAP_SMOOTH,
                                                 EIGHTTAP,
                                                 EIGHTTAP_SHARP,
                                                 BILINEAR };
578
  return vp9_rb_read_bit(rb) ? SWITCHABLE
579
                             : literal_to_type[vp9_rb_read_literal(rb, 2)];
580
581
}

582
static void read_frame_size(struct vp9_read_bit_buffer *rb,
583
                            int *width, int *height) {
584
585
  const int w = vp9_rb_read_literal(rb, 16) + 1;
  const int h = vp9_rb_read_literal(rb, 16) + 1;
586
587
  *width = w;
  *height = h;
588
589
}

590
static void setup_display_size(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
591
592
593
  cm->display_width = cm->width;
  cm->display_height = cm->height;
  if (vp9_rb_read_bit(rb))
594
    read_frame_size(rb, &cm->display_width, &cm->display_height);
595
}
596

597
598
static void apply_frame_size(VP9D_COMP *pbi, int width, int height) {
  VP9_COMMON *cm = &pbi->common;
599

600
  if (cm->width != width || cm->height != height) {
601
    if (!pbi->initial_width || !pbi->initial_height) {
602
603
      if (vp9_alloc_frame_buffers(cm, width, height))
        vpx_internal_error(&cm->error, VPX_CODEC_MEM_ERROR,
604
                           "Failed to allocate frame buffers");
605
606
      pbi->initial_width = width;
      pbi->initial_height = height;
607
608
    } else {
      if (width > pbi->initial_width)
609
        vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
610
                           "Frame width too large");
611

612
      if (height > pbi->initial_height)
613
        vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
614
                           "Frame height too large");
615
616
    }

617
618
    cm->width = width;
    cm->height = height;
619

620
    vp9_update_frame_size(cm);
621
  }
622

623
624
  vp9_realloc_frame_buffer(&cm->yv12_fb[cm->new_fb_idx], cm->width, cm->height,
                           cm->subsampling_x, cm->subsampling_y,
625
                           VP9BORDERINPIXELS);
626
627
}

628
629
630
static void setup_frame_size(VP9D_COMP *pbi,
                             struct vp9_read_bit_buffer *rb) {
  int width, height;
631
  read_frame_size(rb, &width, &height);
632
  apply_frame_size(pbi, width, height);
633
  setup_display_size(&pbi->common, rb);
634
635
}

636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
static void setup_frame_size_with_refs(VP9D_COMP *pbi,
                                       struct vp9_read_bit_buffer *rb) {
  VP9_COMMON *const cm = &pbi->common;

  int width, height;
  int found = 0, i;
  for (i = 0; i < ALLOWED_REFS_PER_FRAME; ++i) {
    if (vp9_rb_read_bit(rb)) {
      YV12_BUFFER_CONFIG *cfg = &cm->yv12_fb[cm->active_ref_idx[i]];
      width = cfg->y_crop_width;
      height = cfg->y_crop_height;
      found = 1;
      break;
    }
  }

  if (!found)
653
    read_frame_size(rb, &width, &height);
654

655
656
657
658
  if (!width || !height)
    vpx_internal_error(&cm->error, VPX_CODEC_CORRUPT_FRAME,
                       "Referenced frame with invalid size");

659
  apply_frame_size(pbi, width, height);
660
  setup_display_size(cm, rb);
661
662
}

663
static void decode_tile(VP9D_COMP *pbi, vp9_reader *r, int tile_col) {
664
  const int num_threads = pbi->oxcf.max_threads;
665
  VP9_COMMON *const cm = &pbi->common;
666
  int mi_row, mi_col;
667
  YV12_BUFFER_CONFIG *const fb = &cm->yv12_fb[cm->new_fb_idx];
668
669
670
  MACROBLOCKD *xd = &pbi->mb;

  xd->mi_stream = pbi->mi_streams[tile_col];
671

672
  if (pbi->do_loopfilter_inline) {
673
674
675
676
677
678
    LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1;
    lf_data->frame_buffer = fb;
    lf_data->cm = cm;
    lf_data->xd = pbi->mb;
    lf_data->stop = 0;
    lf_data->y_only = 0;
679
    vp9_loop_filter_frame_init(cm, cm->lf.filter_level);
680
681
  }

682
  for (mi_row = cm->cur_tile_mi_row_start; mi_row < cm->cur_tile_mi_row_end;
683
       mi_row += MI_BLOCK_SIZE) {
684
    // For a SB there are 2 left contexts, each pertaining to a MB row within
685
686
687
    vp9_zero(cm->left_context);
    vp9_zero(cm->left_seg_context);
    for (mi_col = cm->cur_tile_mi_col_start; mi_col < cm->cur_tile_mi_col_end;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
688
         mi_col += MI_BLOCK_SIZE)
689
      decode_modes_sb(pbi, mi_row, mi_col, r, BLOCK_64X64, 0);
690
691
692

    if (pbi->do_loopfilter_inline) {
      const int lf_start = mi_row - MI_BLOCK_SIZE;
693
      LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1;
694

695
696
      // delay the loopfilter by 1 macroblock row.
      if (lf_start < 0) continue;
697

698
699
      // decoding has completed: finish up the loop filter in this thread.
      if (mi_row + MI_BLOCK_SIZE >= cm->cur_tile_mi_row_end) continue;
700

701
702
703
704
      vp9_worker_sync(&pbi->lf_worker);
      lf_data->start = lf_start;
      lf_data->stop = mi_row;
      if (num_threads > 1) {
705
706
        vp9_worker_launch(&pbi->lf_worker);
      } else {
707
        vp9_worker_execute(&pbi->lf_worker);
708
      }
709
710
711
712
    }
  }

  if (pbi->do_loopfilter_inline) {
713
    LFWorkerData *const lf_data = (LFWorkerData*)pbi->lf_worker.data1;
714

715
716
717
718
    vp9_worker_sync(&pbi->lf_worker);
    lf_data->start = lf_data->stop;
    lf_data->stop = cm->mi_rows;
    vp9_worker_execute(&pbi->lf_worker);
719
720
721
  }
}

722
static void setup_tile_info(VP9_COMMON *cm, struct vp9_read_bit_buffer *rb) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
723
724
  int min_log2_tile_cols, max_log2_tile_cols, max_ones;
  vp9_get_tile_n_bits(cm->mi_cols, &min_log2_tile_cols, &max_log2_tile_cols);
725

Dmitry Kovalev's avatar
Dmitry Kovalev committed
726
727
728
729
730
  // columns
  max_ones = max_log2_tile_cols - min_log2_tile_cols;
  cm->log2_tile_cols = min_log2_tile_cols;
  while (max_ones-- && vp9_rb_read_bit(rb))
    cm->log2_tile_cols++;
731

Dmitry Kovalev's avatar
Dmitry Kovalev committed
732
  // rows
733
734
735
736
737
  cm->log2_tile_rows = vp9_rb_read_bit(rb);
  if (cm->log2_tile_rows)
    cm->log2_tile_rows += vp9_rb_read_bit(rb);
}

738
739
740
static const uint8_t *decode_tiles(VP9D_COMP *pbi, const uint8_t *data) {
  vp9_reader residual_bc;

741
  VP9_COMMON *const cm = &pbi->common;
742

743
  const uint8_t *const data_end = pbi->source + pbi->source_sz;
744
745
746
  const int aligned_mi_cols = mi_cols_aligned_to_sb(cm->mi_cols);
  const int tile_cols = 1 << cm->log2_tile_cols;
  const int tile_rows = 1 << cm->log2_tile_rows;
747
  int tile_row, tile_col;
748

749
750
  // Note: this memset assumes above_context[0], [1] and [2]
  // are allocated as part of the same buffer.
751
  vpx_memset(cm->above_context[0], 0,
752
             sizeof(ENTROPY_CONTEXT) * MAX_MB_PLANE * (2 * aligned_mi_cols));
753

754
  vpx_memset(cm->above_seg_context, 0,
755
             sizeof(PARTITION_CONTEXT) * aligned_mi_cols);
756
757
758

  if (pbi->oxcf.inv_tile_order) {
    const uint8_t *data_ptr2[4][1 << 6];
759
    vp9_reader bc_bak = {0};
760
761

    // pre-initialize the offsets, we're going to read in inverse order
762
    data_ptr2[0][0] = data;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
763
    for (tile_row = 0; tile_row < tile_rows; tile_row++) {
764
      if (tile_row) {
Dmitry Kovalev's avatar
Dmitry Kovalev committed
765
766
767
        const int size = read_be32(data_ptr2[tile_row - 1][tile_cols - 1]);
        data_ptr2[tile_row - 1][tile_cols - 1] += 4;
        data_ptr2[tile_row][0] = data_ptr2[tile_row - 1][tile_cols - 1] + size;
768
769
      }

Dmitry Kovalev's avatar
Dmitry Kovalev committed
770
      for (tile_col = 1; tile_col < tile_cols; tile_col++) {
771
        const int size = read_be32(data_ptr2[tile_row][tile_col - 1]);
772
773
774
775
776
777
        data_ptr2[tile_row][tile_col - 1] += 4;
        data_ptr2[tile_row][tile_col] =
            data_ptr2[tile_row][tile_col - 1] + size;
      }
    }

Dmitry Kovalev's avatar
Dmitry Kovalev committed
778
    for (tile_row = 0; tile_row < tile_rows; tile_row++) {
779
      vp9_get_tile_row_offsets(cm, tile_row);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
780
      for (tile_col = tile_cols - 1; tile_col >= 0; tile_col--) {
781
        vp9_get_tile_col_offsets(cm, tile_col);
782
        setup_token_decoder(data_ptr2[tile_row][tile_col], data_end,
783
                            data_end - data_ptr2[tile_row][tile_col],
784
                            &cm->error, &residual_bc);
785
        decode_tile(pbi, &residual_bc, tile_col);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
786
        if (tile_row == tile_rows - 1 && tile_col == tile_cols - 1)
787
          bc_bak = residual_bc;
788
789
      }
    }
790
    residual_bc = bc_bak;
791
792
793
  } else {
    int has_more;

Dmitry Kovalev's avatar
Dmitry Kovalev committed
794
    for (tile_row = 0; tile_row < tile_rows; tile_row++) {
795
      vp9_get_tile_row_offsets(cm, tile_row);
Dmitry Kovalev's avatar
Dmitry Kovalev committed
796
      for (tile_col = 0; tile_col < tile_cols; tile_col++) {
797
798
        size_t size;

799
        vp9_get_tile_col_offsets(cm, tile_col);
800

Dmitry Kovalev's avatar
Dmitry Kovalev committed
801
        has_more = tile_col < tile_cols - 1 || tile_row < tile_rows - 1;
802
        if (has_more) {
803