vp9_bitstream.c 41.5 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"
17
#include "vpx_ports/mem_ops.h"
18

19
#include "vp9/common/vp9_entropy.h"
20
#include "vp9/common/vp9_entropymode.h"
21
#include "vp9/common/vp9_entropymv.h"
22
#include "vp9/common/vp9_mvref_common.h"
23
#include "vp9/common/vp9_pragmas.h"
24
25
26
27
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_tile_common.h"
28

Dmitry Kovalev's avatar
Dmitry Kovalev committed
29
#include "vp9/encoder/vp9_cost.h"
30
#include "vp9/encoder/vp9_bitstream.h"
31
32
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_mcomp.h"
33
#include "vp9/encoder/vp9_segmentation.h"
34
#include "vp9/encoder/vp9_subexp.h"
35
#include "vp9/encoder/vp9_tokenize.h"
36
37
#include "vp9/encoder/vp9_write_bit_buffer.h"

38
39
40
41
42
43
44
45
46
47
48
49
50
51
static struct vp9_token intra_mode_encodings[INTRA_MODES];
static struct vp9_token switchable_interp_encodings[SWITCHABLE_FILTERS];
static struct vp9_token partition_encodings[PARTITION_TYPES];
static struct vp9_token inter_mode_encodings[INTER_MODES];

void vp9_entropy_mode_init() {
  vp9_tokens_from_tree(intra_mode_encodings, vp9_intra_mode_tree);
  vp9_tokens_from_tree(switchable_interp_encodings, vp9_switchable_interp_tree);
  vp9_tokens_from_tree(partition_encodings, vp9_partition_tree);
  vp9_tokens_from_tree(inter_mode_encodings, vp9_inter_mode_tree);
}

static void write_intra_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
52
  vp9_write_token(w, vp9_intra_mode_tree, probs, &intra_mode_encodings[mode]);
53
54
55
56
57
}

static void write_inter_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  assert(is_inter_mode(mode));
58
59
  vp9_write_token(w, vp9_inter_mode_tree, probs,
                  &inter_mode_encodings[INTER_OFFSET(mode)]);
60
61
}

62
63
static void encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                                int data, int max) {
64
65
66
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

67
68
69
70
71
72
static void prob_diff_update(const vp9_tree_index *tree,
                             vp9_prob probs[/*n - 1*/],
                             const unsigned int counts[/*n - 1*/],
                             int n, vp9_writer *w) {
  int i;
  unsigned int branch_ct[32][2];
73
74

  // Assuming max number of probabilities <= 32
75
  assert(n <= 32);
76

77
  vp9_tree_probs_from_distribution(tree, branch_ct, counts);
78
  for (i = 0; i < n - 1; ++i)
79
    vp9_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
80
81
}

82
static void write_selected_tx_size(const VP9_COMP *cpi,
83
84
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
85
  const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
86
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
87
88
  const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
                                                 &cpi->common.fc.tx_probs);
89
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
90
  if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
91
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
92
    if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
93
94
95
96
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

97
static int write_skip(const VP9_COMP *cpi, int segment_id, const MODE_INFO *mi,
98
                      vp9_writer *w) {
99
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
100
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
101
102
    return 1;
  } else {
103
    const int skip = mi->mbmi.skip;
104
105
    vp9_write(w, skip, vp9_get_skip_prob(&cpi->common, xd));
    return skip;
106
107
108
  }
}

109
static void update_skip_probs(VP9_COMMON *cm, vp9_writer *w) {
110
111
  int k;

Dmitry Kovalev's avatar
Dmitry Kovalev committed
112
113
  for (k = 0; k < SKIP_CONTEXTS; ++k)
    vp9_cond_prob_diff_update(w, &cm->fc.skip_probs[k], cm->counts.skip[k]);
114
115
}

116
static void update_switchable_interp_probs(VP9_COMMON *cm, vp9_writer *w) {
117
118
119
120
121
  int j;
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
    prob_diff_update(vp9_switchable_interp_tree,
                     cm->fc.switchable_interp_prob[j],
                     cm->counts.switchable_interp[j], SWITCHABLE_FILTERS, w);
122
123
}

124
125
static void pack_mb_tokens(vp9_writer *w,
                           TOKENEXTRA **tp, const TOKENEXTRA *stop) {
126
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
127

128
  while (p < stop && p->token != EOSB_TOKEN) {
129
    const int t = p->token;
130
131
    const struct vp9_token *const a = &vp9_coef_encodings[t];
    const vp9_extra_bit *const b = &vp9_extra_bits[t];
John Koleszar's avatar
John Koleszar committed
132
133
    int i = 0;
    int v = a->value;
134
    int n = a->len;
135

John Koleszar's avatar
John Koleszar committed
136
137
138
139
140
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
141

142
143
144
145
146
147
148
149
    // TODO(jbb): expanding this can lead to big gains.  It allows
    // much better branch prediction and would enable us to avoid numerous
    // lookups and compares.

    // If we have a token that's in the constrained set, the coefficient tree
    // is split into two treed writes.  The first treed write takes care of the
    // unconstrained nodes.  The second treed write takes care of the
    // constrained nodes.
150
    if (t >= TWO_TOKEN && t < EOB_TOKEN) {
151
152
      int len = UNCONSTRAINED_NODES - p->skip_eob_node;
      int bits = v >> (n - len);
153
154
155
156
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, bits, len, i);
      vp9_write_tree(w, vp9_coef_con_tree,
                     vp9_pareto8_full[p->context_tree[PIVOT_NODE] - 1],
                     v, n - len, 0);
157
    } else {
158
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, v, n, i);
159
    }
John Koleszar's avatar
John Koleszar committed
160

John Koleszar's avatar
John Koleszar committed
161
    if (b->base_val) {
162
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
163

164
      if (l) {
165
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
166
        int v = e >> 1;
167
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
168
        int i = 0;
John Koleszar's avatar
John Koleszar committed
169

John Koleszar's avatar
John Koleszar committed
170
171
        do {
          const int bb = (v >> --n) & 1;
172
          vp9_write(w, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
173
174
175
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
176

177
      vp9_write_bit(w, e & 1);
John Koleszar's avatar
John Koleszar committed
178
    }
John Koleszar's avatar
John Koleszar committed
179
180
181
    ++p;
  }

182
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
183
184
}

185
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
186
                             int segment_id) {
187
  if (seg->enabled && seg->update_map)
188
    vp9_write_tree(w, vp9_segment_tree, seg->tree_probs, segment_id, 3, 0);
John Koleszar's avatar
John Koleszar committed
189
190
}

Paul Wilkins's avatar
Paul Wilkins committed
191
// This function encodes the reference frame
192
193
194
195
196
197
198
static void write_ref_frames(const VP9_COMP *cpi, vp9_writer *w) {
  const VP9_COMMON *const cm = &cpi->common;
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
  const MB_MODE_INFO *const mbmi = &xd->mi_8x8[0]->mbmi;
  const int is_compound = has_second_ref(mbmi);
  const int segment_id = mbmi->segment_id;

John Koleszar's avatar
John Koleszar committed
199
200
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
201
202
203
204
205
  if (vp9_segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
    assert(!is_compound);
    assert(mbmi->ref_frame[0] ==
               vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
  } else {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
206
207
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
208
    if (cm->reference_mode == REFERENCE_MODE_SELECT) {
209
      vp9_write(w, is_compound, vp9_get_reference_mode_prob(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
210
    } else {
211
      assert(!is_compound == (cm->reference_mode == SINGLE_REFERENCE));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
212
    }
213

214
215
    if (is_compound) {
      vp9_write(w, mbmi->ref_frame[0] == GOLDEN_FRAME,
216
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
217
    } else {
218
219
220
221
222
223
      const int bit0 = mbmi->ref_frame[0] != LAST_FRAME;
      vp9_write(w, bit0, vp9_get_pred_prob_single_ref_p1(cm, xd));
      if (bit0) {
        const int bit1 = mbmi->ref_frame[0] != GOLDEN_FRAME;
        vp9_write(w, bit1, vp9_get_pred_prob_single_ref_p2(cm, xd));
      }
Paul Wilkins's avatar
Paul Wilkins committed
224
    }
John Koleszar's avatar
John Koleszar committed
225
  }
Paul Wilkins's avatar
Paul Wilkins committed
226
}
John Koleszar's avatar
John Koleszar committed
227

228
229
static void pack_inter_mode_mvs(VP9_COMP *cpi, const MODE_INFO *mi,
                                vp9_writer *w) {
230
231
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
232
233
  const MACROBLOCK *const x = &cpi->mb;
  const MACROBLOCKD *const xd = &x->e_mbd;
234
  const struct segmentation *const seg = &cm->seg;
235
236
237
238
  const MB_MODE_INFO *const mbmi = &mi->mbmi;
  const MB_PREDICTION_MODE mode = mbmi->mode;
  const int segment_id = mbmi->segment_id;
  const BLOCK_SIZE bsize = mbmi->sb_type;
239
  const int allow_hp = cm->allow_high_precision_mv;
240
241
242
  const int is_inter = is_inter_block(mbmi);
  const int is_compound = has_second_ref(mbmi);
  int skip, ref;
Adrian Grange's avatar
Adrian Grange committed
243

244
245
  if (seg->update_map) {
    if (seg->temporal_update) {
246
      const int pred_flag = mbmi->seg_id_predicted;
247
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
248
      vp9_write(w, pred_flag, pred_prob);
249
      if (!pred_flag)
250
        write_segment_id(w, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
251
    } else {
252
      write_segment_id(w, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
253
254
    }
  }
255

256
  skip = write_skip(cpi, segment_id, mi, w);
John Koleszar's avatar
John Koleszar committed
257

258
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
259
    vp9_write(w, is_inter, vp9_get_intra_inter_prob(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
260

261
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
262
      !(is_inter &&
263
        (skip || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
264
    write_selected_tx_size(cpi, mbmi->tx_size, bsize, w);
265
266
  }

267
  if (!is_inter) {
268
    if (bsize >= BLOCK_8X8) {
269
      write_intra_mode(w, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
270
    } else {
271
      int idx, idy;
272
273
274
275
276
277
      const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_h) {
        for (idx = 0; idx < 2; idx += num_4x4_w) {
          const MB_PREDICTION_MODE b_mode = mi->bmi[idy * 2 + idx].as_mode;
          write_intra_mode(w, b_mode, cm->fc.y_mode_prob[0]);
278
        }
Jim Bankoski's avatar
Jim Bankoski committed
279
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
280
    }
281
    write_intra_mode(w, mbmi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
282
  } else {
283
284
285
    const int mode_ctx = mbmi->mode_context[mbmi->ref_frame[0]];
    const vp9_prob *const inter_probs = cm->fc.inter_mode_probs[mode_ctx];
    write_ref_frames(cpi, w);
Yaowu Xu's avatar
Yaowu Xu committed
286

287
    // If segment skip is not enabled code the mode.
288
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
289
      if (bsize >= BLOCK_8X8) {
290
291
        write_inter_mode(w, mode, inter_probs);
        ++cm->counts.inter_mode[mode_ctx][INTER_OFFSET(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
292
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
293
    }
294

295
    if (cm->interp_filter == SWITCHABLE) {
296
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
297
      vp9_write_token(w, vp9_switchable_interp_tree,
298
                      cm->fc.switchable_interp_prob[ctx],
299
                      &switchable_interp_encodings[mbmi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
300
    } else {
301
      assert(mbmi->interp_filter == cm->interp_filter);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
302
    }
303

304
    if (bsize < BLOCK_8X8) {
305
306
      const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
307
      int idx, idy;
308
309
      for (idy = 0; idy < 2; idy += num_4x4_h) {
        for (idx = 0; idx < 2; idx += num_4x4_w) {
310
          const int j = idy * 2 + idx;
311
312
313
          const MB_PREDICTION_MODE b_mode = mi->bmi[j].as_mode;
          write_inter_mode(w, b_mode, inter_probs);
          ++cm->counts.inter_mode[mode_ctx][INTER_OFFSET(b_mode)];
314
          if (b_mode == NEWMV) {
315
316
317
318
            for (ref = 0; ref < 1 + is_compound; ++ref)
              vp9_encode_mv(cpi, w, &mi->bmi[j].as_mv[ref].as_mv,
                            &mbmi->ref_mvs[mbmi->ref_frame[ref]][0].as_mv,
                            nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
319
          }
320
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
321
      }
322
323
324
325
326
327
328
    } else {
      if (mode == NEWMV) {
        for (ref = 0; ref < 1 + is_compound; ++ref)
          vp9_encode_mv(cpi, w, &mbmi->mv[ref].as_mv,
                        &mbmi->ref_mvs[mbmi->ref_frame[ref]][0].as_mv, nmvc,
                        allow_hp);
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
329
330
    }
  }
John Koleszar's avatar
John Koleszar committed
331
}
332

333
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
334
                              vp9_writer *w) {
335
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
336
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
337
  const struct segmentation *const seg = &cm->seg;
338
339
340
341
342
  const MODE_INFO *const mi = mi_8x8[0];
  const MODE_INFO *const above_mi = mi_8x8[-xd->mode_info_stride];
  const MODE_INFO *const left_mi = xd->left_available ? mi_8x8[-1] : NULL;
  const MB_MODE_INFO *const mbmi = &mi->mbmi;
  const BLOCK_SIZE bsize = mbmi->sb_type;
343

344
  if (seg->update_map)
345
    write_segment_id(w, seg, mbmi->segment_id);
346

347
  write_skip(cpi, mbmi->segment_id, mi, w);
348

349
350
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
    write_selected_tx_size(cpi, mbmi->tx_size, bsize, w);
351

352
353
  if (bsize >= BLOCK_8X8) {
    write_intra_mode(w, mbmi->mode, get_y_mode_probs(mi, above_mi, left_mi, 0));
354
  } else {
355
356
    const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
    const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
357
    int idx, idy;
358
359
360
361
362
363

    for (idy = 0; idy < 2; idy += num_4x4_h) {
      for (idx = 0; idx < 2; idx += num_4x4_w) {
        const int block = idy * 2 + idx;
        write_intra_mode(w, mi->bmi[block].as_mode,
                         get_y_mode_probs(mi, above_mi, left_mi, block));
364
365
      }
    }
366
367
  }

368
  write_intra_mode(w, mbmi->uv_mode, vp9_kf_uv_mode_prob[mbmi->mode]);
369
370
}

James Zern's avatar
James Zern committed
371
static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile,
372
373
                          vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                          int mi_row, int mi_col) {
374
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
375
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
376
  MODE_INFO *m;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
377

378
379
  xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col);
  m = xd->mi_8x8[0];
380

James Zern's avatar
James Zern committed
381
  set_mi_row_col(xd, tile,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
382
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
James Zern's avatar
James Zern committed
383
384
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type],
                 cm->mi_rows, cm->mi_cols);
385
  if (frame_is_intra_only(cm)) {
386
    write_mb_modes_kf(cpi, xd->mi_8x8, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
387
  } else {
388
    pack_inter_mode_mvs(cpi, m, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
389
390
391
  }

  assert(*tok < tok_end);
392
  pack_mb_tokens(w, tok, tok_end);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
393
394
}

395
396
static void write_partition(VP9_COMMON *cm, MACROBLOCKD *xd,
                            int hbs, int mi_row, int mi_col,
397
                            PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
398
  const int ctx = partition_plane_context(xd, mi_row, mi_col, bsize);
399
400
401
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
402
403

  if (has_rows && has_cols) {
404
    vp9_write_token(w, vp9_partition_tree, probs, &partition_encodings[p]);
405
  } else if (!has_rows && has_cols) {
406
407
    assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
    vp9_write(w, p == PARTITION_SPLIT, probs[1]);
408
  } else if (has_rows && !has_cols) {
409
410
    assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
    vp9_write(w, p == PARTITION_SPLIT, probs[2]);
411
  } else {
412
    assert(p == PARTITION_SPLIT);
413
414
415
  }
}

416
417
static void write_modes_sb(VP9_COMP *cpi,
                           const TileInfo *const tile,
418
419
                           vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                           int mi_row, int mi_col, BLOCK_SIZE bsize) {
420
  VP9_COMMON *const cm = &cpi->common;
421
422
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;

423
424
425
  const int bsl = b_width_log2(bsize);
  const int bs = (1 << bsl) / 4;
  PARTITION_TYPE partition;
426
  BLOCK_SIZE subsize;
427
  MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col];
428

429
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
430
431
    return;

Jim Bankoski's avatar
Jim Bankoski committed
432
  partition = partition_lookup[bsl][m->mbmi.sb_type];
433
  write_partition(cm, xd, bs, mi_row, mi_col, partition, bsize, w);
434
  subsize = get_subsize(bsize, partition);
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
  if (subsize < BLOCK_8X8) {
    write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
  } else {
    switch (partition) {
      case PARTITION_NONE:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        break;
      case PARTITION_HORZ:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_row + bs < cm->mi_rows)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col);
        break;
      case PARTITION_VERT:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_col + bs < cm->mi_cols)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs);
        break;
      case PARTITION_SPLIT:
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs,
                       subsize);
        break;
      default:
        assert(0);
    }
464
  }
465
466

  // update partition context
467
  if (bsize >= BLOCK_8X8 &&
468
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
469
    update_partition_context(xd, mi_row, mi_col, subsize, bsize);
470
471
}

472
473
static void write_modes(VP9_COMP *cpi,
                        const TileInfo *const tile,
474
                        vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
475
  int mi_row, mi_col;
476

James Zern's avatar
James Zern committed
477
  for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
478
       mi_row += MI_BLOCK_SIZE) {
479
    vp9_zero(cpi->mb.e_mbd.left_seg_context);
James Zern's avatar
James Zern committed
480
    for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
481
         mi_col += MI_BLOCK_SIZE)
482
483
      write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col,
                     BLOCK_64X64);
John Koleszar's avatar
John Koleszar committed
484
  }
John Koleszar's avatar
John Koleszar committed
485
}
486

487
488
489
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
490
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
491
492
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
493
  int i, j, k, l, m;
494

495
  for (i = 0; i < PLANE_TYPES; ++i) {
496
497
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
498
        for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
499
          vp9_tree_probs_from_distribution(vp9_coef_tree,
500
                                           coef_branch_ct[i][j][k][l],
501
                                           coef_counts[i][j][k][l]);
502
503
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
504
505
506
507
          for (m = 0; m < UNCONSTRAINED_NODES; ++m)
            coef_probs[i][j][k][l][m] = get_binary_prob(
                                            coef_branch_ct[i][j][k][l][m][0],
                                            coef_branch_ct[i][j][k][l][m][1]);
508
        }
Daniel Kang's avatar
Daniel Kang committed
509
510
511
      }
    }
  }
512
513
}

514
515
516
517
518
519
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
520
  const vp9_prob upd = DIFF_UPDATE_PROB;
521
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
522
523
524
525
526
527
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
528
      for (i = 0; i < PLANE_TYPES; ++i) {
529
530
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
531
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
532
533
534
535
536
537
538
539
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
540
                      old_frame_coef_probs[i][j][k][l], &newp, upd);
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
556

557
558
559
560
561
562
563
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
564
      for (i = 0; i < PLANE_TYPES; ++i) {
565
566
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
567
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
568
569
570
571
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
572
                const vp9_prob upd = DIFF_UPDATE_PROB;
573
574
575
576
577
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
578
                      old_frame_coef_probs[i][j][k][l], &newp, upd);
579
580
581
582
583
584
585
586
587
588
589
590
591
592
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
593
          }
Daniel Kang's avatar
Daniel Kang committed
594
595
        }
      }
596
      return;
Daniel Kang's avatar
Daniel Kang committed
597
    }
John Koleszar's avatar
John Koleszar committed
598

599
600
601
    case 1:
    case 2: {
      const int prev_coef_contexts_to_update =
602
603
          cpi->sf.use_fast_coef_updates == 2 ? COEFF_CONTEXTS >> 1
                                             : COEFF_CONTEXTS;
604
      const int coef_band_to_update =
605
606
          cpi->sf.use_fast_coef_updates == 2 ? COEF_BANDS >> 1
                                             : COEF_BANDS;
607
608
      int updates = 0;
      int noupdates_before_first = 0;
609
      for (i = 0; i < PLANE_TYPES; ++i) {
610
611
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
612
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
613
614
615
616
617
618
619
620
621
622
623
624
625
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
                int s;
                int u = 0;
                if (l >= prev_coef_contexts_to_update ||
                    k >= coef_band_to_update) {
                  u = 0;
                } else {
                  if (t == PIVOT_NODE)
                    s = vp9_prob_diff_update_savings_search_model(
                        frame_branch_ct[i][j][k][l][0],
626
                        old_frame_coef_probs[i][j][k][l], &newp, upd);
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
                  else
                    s = vp9_prob_diff_update_savings_search(
                        frame_branch_ct[i][j][k][l][t],
                        *oldp, &newp, upd);
                  if (s > 0 && newp != *oldp)
                    u = 1;
                }
                updates += u;
                if (u == 0 && updates == 0) {
                  noupdates_before_first++;
                  continue;
                }
                if (u == 1 && updates == 1) {
                  int v;
                  // first update
                  vp9_write_bit(bc, 1);
                  for (v = 0; v < noupdates_before_first; ++v)
                    vp9_write(bc, 0, upd);
                }
                vp9_write(bc, u, upd);
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
John Koleszar's avatar
John Koleszar committed
653
            }
Daniel Kang's avatar
Daniel Kang committed
654
655
656
          }
        }
      }
657
658
659
660
      if (updates == 0) {
        vp9_write_bit(bc, 0);  // no updates
      }
      return;
Daniel Kang's avatar
Daniel Kang committed
661
    }
662
663
664

    default:
      assert(0);
John Koleszar's avatar
John Koleszar committed
665
  }
666
}
John Koleszar's avatar
John Koleszar committed
667

668
static void update_coef_probs(VP9_COMP *cpi, vp9_writer* w) {
669
  const TX_MODE tx_mode = cpi->common.tx_mode;
670
671
  const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
  TX_SIZE tx_size;
672
  vp9_clear_system_state();
673

674
675
  for (tx_size = TX_4X4; tx_size <= TX_32X32; ++tx_size)
    build_tree_distribution(cpi, tx_size);
676

677
678
  for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
    update_coef_probs_common(w, cpi, tx_size);
John Koleszar's avatar
John Koleszar committed
679
}
680

681
static void encode_loopfilter(struct loopfilter *lf,
682
                              struct vp9_write_bit_buffer *wb) {
683
684
  int i;

685
  // Encode the loop filter level and type
686
687
  vp9_wb_write_literal(wb, lf->filter_level, 6);
  vp9_wb_write_literal(wb, lf->sharpness_level, 3);
688

689
690
  // Write out loop filter deltas applied at the MB level based on mode or
  // ref frame (if they are enabled).
691
  vp9_wb_write_bit(wb, lf->mode_ref_delta_enabled);
692

693
694
695
  if (lf->mode_ref_delta_enabled) {
    vp9_wb_write_bit(wb, lf->mode_ref_delta_update);
    if (lf->mode_ref_delta_update) {
696
      for (i = 0; i < MAX_REF_LF_DELTAS; i++) {
697
        const int delta = lf->ref_deltas[i];
698
699
700
        const int changed = delta != lf->last_ref_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
701
          lf->last_ref_deltas[i] = delta;
702
703
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
704
705
706
707
        }
      }

      for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
708
        const int delta = lf->mode_deltas[i];
709
710
711
        const int changed = delta != lf->last_mode_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
712
          lf->last_mode_deltas[i] = delta;
713
714
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
715
716
717
718
719
720
        }
      }
    }
  }
}

721
static void write_delta_q(struct vp9_write_bit_buffer *wb, int delta_q) {
722
  if (delta_q != 0) {
723
724
725
    vp9_wb_write_bit(wb, 1);
    vp9_wb_write_literal(wb, abs(delta_q), 4);
    vp9_wb_write_bit(wb, delta_q < 0);
726
  } else {
727
    vp9_wb_write_bit(wb, 0);
728
729
730
  }
}

731
732
733
734
735
736
static void encode_quantization(VP9_COMMON *cm,
                                struct vp9_write_bit_buffer *wb) {
  vp9_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
  write_delta_q(wb, cm->y_dc_delta_q);
  write_delta_q(wb, cm->uv_dc_delta_q);
  write_delta_q(wb, cm->uv_ac_delta_q);
737
738
739
}


740
static void encode_segmentation(VP9_COMP *cpi,
741
                                struct vp9_write_bit_buffer *wb) {
John Koleszar's avatar
John Koleszar committed
742
  int i, j;
743

744
  struct segmentation *seg = &cpi->common.seg;
745
746
747

  vp9_wb_write_bit(wb, seg->enabled);
  if (!seg->enabled)
748
749
750
    return;

  // Segmentation map
751
752
  vp9_wb_write_bit(wb, seg->update_map);
  if (seg->update_map) {
753
754
755
    // Select the coding strategy (temporal or spatial)
    vp9_choose_segmap_coding_method(cpi);
    // Write out probabilities used to decode unpredicted  macro-block segments
Paul Wilkins's avatar
Paul Wilkins committed
756
    for (i = 0; i < SEG_TREE_PROBS; i++) {
757
      const int prob = seg->tree_probs[i];
758
759
760
761
      const int update = prob != MAX_PROB;
      vp9_wb_write_bit(wb, update);
      if (update)
        vp9_wb_write_literal(wb, prob, 8);
762
763
764
    }

    // Write out the chosen coding method.
765
766
    vp9_wb_write_bit(wb, seg->temporal_update);
    if (seg->temporal_update) {
767
      for (i = 0; i < PREDICTION_PROBS; i++) {
768
        const int prob = seg->pred_probs[i];
769
770
771
772
        const int update = prob != MAX_PROB;
        vp9_wb_write_bit(wb, update);
        if (update)
          vp9_wb_write_literal(wb, prob, 8);
773
774
775
776
777
      }
    }
  }

  // Segmentation data
778
779
780
  vp9_wb_write_bit(wb, seg->update_data);
  if (seg->update_data) {
    vp9_wb_write_bit(wb, seg->abs_delta);
781

Paul Wilkins's avatar
Paul Wilkins committed
782
    for (i = 0; i < MAX_SEGMENTS; i++) {
783
      for (j = 0; j < SEG_LVL_MAX; j++) {
784
        const int active = vp9_segfeature_active(seg, i, j);
785
786
        vp9_wb_write_bit(wb, active);
        if (active) {
787
          const int data = vp9_get_segdata(seg, i, j);
788
          const int data_max = vp9_seg_feature_data_max(j);
789
790

          if (vp9_is_segfeature_signed(j)) {
791
            encode_unsigned_max(wb, abs(data), data_max);
792
            vp9_wb_write_bit(wb, data < 0);
793
          } else {
794
            encode_unsigned_max(wb, data, data_max);
795
796
797
798
799
800
801
          }
        }
      }
    }
  }
}

802

803
static void encode_txfm_probs(VP9_COMMON *cm, vp9_writer *w) {
804
  // Mode
805
806
807
  vp9_write_literal(w, MIN(cm->tx_mode, ALLOW_32X32), 2);
  if (cm->tx_mode >= ALLOW_32X32)
    vp9_write_bit(w, cm->tx_mode == TX_MODE_SELECT);
808
809

  // Probabilities
810
  if (cm->tx_mode == TX_MODE_SELECT) {
811
    int i, j;
812
813
814
    unsigned int ct_8x8p[TX_SIZES - 3][2];
    unsigned int ct_16x16p[TX_SIZES - 2][2];
    unsigned int ct_32x32p[TX_SIZES - 1][2];
815
816


817
    for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
818
      tx_counts_to_branch_counts_8x8(cm->counts.tx.p8x8[i], ct_8x8p);
819
      for (j = 0; j < TX_SIZES - 3; j++)
820
        vp9_cond_prob_diff_update(w, &cm->fc.tx_probs.p8x8[i][j], ct_8x8p[j]);
821
    }
822

823
    for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
824
      tx_counts_to_branch_counts_16x16(cm->counts.tx.p16x16[i], ct_16x16p);
825
      for (j = 0; j < TX_SIZES - 2; j++)
826
        vp9_cond_prob_diff_update(w, &cm->fc.tx_probs.p16x16[i][j],
827
                                  ct_16x16p[j]);
828
    }
829

830
    for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
831
      tx_counts_to_branch_counts_32x32(cm->counts.tx.p32x32[i], ct_32x32p);
832
      for (j = 0; j < TX_SIZES - 1; j++)
833
        vp9_cond_prob_diff_update(w, &cm->fc.tx_probs.p32x32[i][j],
834
                                  ct_32x32p[j]);
835
    }
836
837
838
  }
}

839
840
841
static void write_interp_filter(INTERP_FILTER filter,
                                struct vp9_write_bit_buffer *wb) {
  const int filter_to_literal[] = { 1, 0, 2, 3 };
842

843
844
845
  vp9_wb_write_bit(wb, filter == SWITCHABLE);
  if (filter != SWITCHABLE)
    vp9_wb_write_literal(wb, filter_to_literal[filter], 2);
846
847
}

848
849
static void fix_interp_filter(VP9_COMMON *cm) {
  if (cm->interp_filter == SWITCHABLE) {
850
    // Check to see if only one of the filters is actually used
851
    int count[SWITCHABLE_FILTERS];
852
    int i, j, c = 0;