vp9_bitstream.c 42.6 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"
17
#include "vpx_ports/mem_ops.h"
18

19
#include "vp9/common/vp9_entropy.h"
20
#include "vp9/common/vp9_entropymode.h"
21
#include "vp9/common/vp9_entropymv.h"
22
#include "vp9/common/vp9_mvref_common.h"
23
#include "vp9/common/vp9_pragmas.h"
24
25
26
27
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_tile_common.h"
28
29

#include "vp9/encoder/vp9_bitstream.h"
30
31
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_mcomp.h"
32
#include "vp9/encoder/vp9_segmentation.h"
33
#include "vp9/encoder/vp9_subexp.h"
34
#include "vp9/encoder/vp9_tokenize.h"
35
36
#include "vp9/encoder/vp9_write_bit_buffer.h"

John Koleszar's avatar
John Koleszar committed
37
38
39
40
#ifdef ENTROPY_STATS
extern unsigned int active_section;
#endif

41
42
43
44
45
46
47
48
49
50
51
52
53
54
static struct vp9_token intra_mode_encodings[INTRA_MODES];
static struct vp9_token switchable_interp_encodings[SWITCHABLE_FILTERS];
static struct vp9_token partition_encodings[PARTITION_TYPES];
static struct vp9_token inter_mode_encodings[INTER_MODES];

void vp9_entropy_mode_init() {
  vp9_tokens_from_tree(intra_mode_encodings, vp9_intra_mode_tree);
  vp9_tokens_from_tree(switchable_interp_encodings, vp9_switchable_interp_tree);
  vp9_tokens_from_tree(partition_encodings, vp9_partition_tree);
  vp9_tokens_from_tree(inter_mode_encodings, vp9_inter_mode_tree);
}

static void write_intra_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
55
  vp9_write_token(w, vp9_intra_mode_tree, probs, &intra_mode_encodings[mode]);
56
57
58
59
60
}

static void write_inter_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  assert(is_inter_mode(mode));
61
62
  vp9_write_token(w, vp9_inter_mode_tree, probs,
                  &inter_mode_encodings[INTER_OFFSET(mode)]);
63
64
}

65
66
static void encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                                int data, int max) {
67
68
69
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

70
71
72
73
74
75
static void prob_diff_update(const vp9_tree_index *tree,
                             vp9_prob probs[/*n - 1*/],
                             const unsigned int counts[/*n - 1*/],
                             int n, vp9_writer *w) {
  int i;
  unsigned int branch_ct[32][2];
76
77

  // Assuming max number of probabilities <= 32
78
  assert(n <= 32);
79

80
  vp9_tree_probs_from_distribution(tree, branch_ct, counts);
81
  for (i = 0; i < n - 1; ++i)
82
    vp9_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
83
84
}

85
static void write_selected_tx_size(const VP9_COMP *cpi,
86
87
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
88
  const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
89
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
90
91
  const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
                                                 &cpi->common.fc.tx_probs);
92
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
93
  if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
94
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
95
    if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
96
97
98
99
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

100
101
static int write_skip(const VP9_COMP *cpi, int segment_id, MODE_INFO *m,
                      vp9_writer *w) {
102
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
103
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
104
105
    return 1;
  } else {
106
    const int skip = m->mbmi.skip;
107
108
    vp9_write(w, skip, vp9_get_skip_prob(&cpi->common, xd));
    return skip;
109
110
111
  }
}

112
static void update_skip_probs(VP9_COMMON *cm, vp9_writer *w) {
113
114
  int k;

Dmitry Kovalev's avatar
Dmitry Kovalev committed
115
116
  for (k = 0; k < SKIP_CONTEXTS; ++k)
    vp9_cond_prob_diff_update(w, &cm->fc.skip_probs[k], cm->counts.skip[k]);
117
118
}

119
static void update_switchable_interp_probs(VP9_COMMON *cm, vp9_writer *w) {
120
121
122
123
124
  int j;
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
    prob_diff_update(vp9_switchable_interp_tree,
                     cm->fc.switchable_interp_prob[j],
                     cm->counts.switchable_interp[j], SWITCHABLE_FILTERS, w);
125
126
}

127
128
static void pack_mb_tokens(vp9_writer *w,
                           TOKENEXTRA **tp, const TOKENEXTRA *stop) {
129
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
130

131
  while (p < stop && p->token != EOSB_TOKEN) {
132
    const int t = p->token;
133
134
    const struct vp9_token *const a = &vp9_coef_encodings[t];
    const vp9_extra_bit *const b = &vp9_extra_bits[t];
John Koleszar's avatar
John Koleszar committed
135
136
    int i = 0;
    int v = a->value;
137
    int n = a->len;
138

John Koleszar's avatar
John Koleszar committed
139
140
141
142
143
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
144

145
146
147
148
149
150
151
152
    // TODO(jbb): expanding this can lead to big gains.  It allows
    // much better branch prediction and would enable us to avoid numerous
    // lookups and compares.

    // If we have a token that's in the constrained set, the coefficient tree
    // is split into two treed writes.  The first treed write takes care of the
    // unconstrained nodes.  The second treed write takes care of the
    // constrained nodes.
153
    if (t >= TWO_TOKEN && t < EOB_TOKEN) {
154
155
      int len = UNCONSTRAINED_NODES - p->skip_eob_node;
      int bits = v >> (n - len);
156
157
158
159
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, bits, len, i);
      vp9_write_tree(w, vp9_coef_con_tree,
                     vp9_pareto8_full[p->context_tree[PIVOT_NODE] - 1],
                     v, n - len, 0);
160
    } else {
161
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, v, n, i);
162
    }
John Koleszar's avatar
John Koleszar committed
163

John Koleszar's avatar
John Koleszar committed
164
    if (b->base_val) {
165
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
166

167
      if (l) {
168
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
169
        int v = e >> 1;
170
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
171
        int i = 0;
John Koleszar's avatar
John Koleszar committed
172

John Koleszar's avatar
John Koleszar committed
173
174
        do {
          const int bb = (v >> --n) & 1;
175
          vp9_write(w, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
176
177
178
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
179

180
      vp9_write_bit(w, e & 1);
John Koleszar's avatar
John Koleszar committed
181
    }
John Koleszar's avatar
John Koleszar committed
182
183
184
    ++p;
  }

185
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
186
187
}

188
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
189
                             int segment_id) {
190
  if (seg->enabled && seg->update_map)
191
    vp9_write_tree(w, vp9_segment_tree, seg->tree_probs, segment_id, 3, 0);
John Koleszar's avatar
John Koleszar committed
192
193
}

Paul Wilkins's avatar
Paul Wilkins committed
194
// This function encodes the reference frame
195
196
197
198
199
200
201
static void write_ref_frames(const VP9_COMP *cpi, vp9_writer *w) {
  const VP9_COMMON *const cm = &cpi->common;
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
  const MB_MODE_INFO *const mbmi = &xd->mi_8x8[0]->mbmi;
  const int is_compound = has_second_ref(mbmi);
  const int segment_id = mbmi->segment_id;

John Koleszar's avatar
John Koleszar committed
202
203
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
204
205
206
207
208
  if (vp9_segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
    assert(!is_compound);
    assert(mbmi->ref_frame[0] ==
               vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
  } else {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
209
210
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
211
    if (cm->reference_mode == REFERENCE_MODE_SELECT) {
212
      vp9_write(w, is_compound, vp9_get_reference_mode_prob(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
213
    } else {
214
      assert(!is_compound == (cm->reference_mode == SINGLE_REFERENCE));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
215
    }
216

217
218
    if (is_compound) {
      vp9_write(w, mbmi->ref_frame[0] == GOLDEN_FRAME,
219
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
220
    } else {
221
222
223
224
225
226
      const int bit0 = mbmi->ref_frame[0] != LAST_FRAME;
      vp9_write(w, bit0, vp9_get_pred_prob_single_ref_p1(cm, xd));
      if (bit0) {
        const int bit1 = mbmi->ref_frame[0] != GOLDEN_FRAME;
        vp9_write(w, bit1, vp9_get_pred_prob_single_ref_p2(cm, xd));
      }
Paul Wilkins's avatar
Paul Wilkins committed
227
    }
John Koleszar's avatar
John Koleszar committed
228
  }
Paul Wilkins's avatar
Paul Wilkins committed
229
}
John Koleszar's avatar
John Koleszar committed
230

231
static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) {
232
233
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
234
235
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
236
  const struct segmentation *const seg = &cm->seg;
237
  const MB_MODE_INFO *const mi = &m->mbmi;
238
239
  const MV_REFERENCE_FRAME ref0 = mi->ref_frame[0];
  const MV_REFERENCE_FRAME ref1 = mi->ref_frame[1];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
240
241
  const MB_PREDICTION_MODE mode = mi->mode;
  const int segment_id = mi->segment_id;
242
  const BLOCK_SIZE bsize = mi->sb_type;
243
  const int allow_hp = cm->allow_high_precision_mv;
244
  int skip;
Adrian Grange's avatar
Adrian Grange committed
245

Ronald S. Bultje's avatar
Ronald S. Bultje committed
246
247
#ifdef ENTROPY_STATS
  active_section = 9;
248
#endif
249

250
251
  if (seg->update_map) {
    if (seg->temporal_update) {
Scott LaVarnway's avatar
Scott LaVarnway committed
252
      const int pred_flag = mi->seg_id_predicted;
253
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
254
255
256
      vp9_write(bc, pred_flag, pred_prob);
      if (!pred_flag)
        write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
257
    } else {
258
      write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
259
260
    }
  }
261

262
  skip = write_skip(cpi, segment_id, m, bc);
John Koleszar's avatar
John Koleszar committed
263

264
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
265
    vp9_write(bc, ref0 != INTRA_FRAME, vp9_get_intra_inter_prob(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
266

267
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
268
      !(ref0 != INTRA_FRAME &&
269
        (skip || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
270
    write_selected_tx_size(cpi, mi->tx_size, bsize, bc);
271
272
  }

273
  if (ref0 == INTRA_FRAME) {
274
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
275
    active_section = 6;
276
#endif
Paul Wilkins's avatar
Paul Wilkins committed
277

278
    if (bsize >= BLOCK_8X8) {
279
      write_intra_mode(bc, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
280
    } else {
281
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
282
283
284
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
Jim Bankoski's avatar
Jim Bankoski committed
285
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
286
          const MB_PREDICTION_MODE bm = m->bmi[idy * 2 + idx].as_mode;
287
          write_intra_mode(bc, bm, cm->fc.y_mode_prob[0]);
288
        }
Jim Bankoski's avatar
Jim Bankoski committed
289
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
290
    }
291
    write_intra_mode(bc, mi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
292
  } else {
293
    vp9_prob *mv_ref_p;
294
    write_ref_frames(cpi, bc);
295
    mv_ref_p = cm->fc.inter_mode_probs[mi->mode_context[ref0]];
Yaowu Xu's avatar
Yaowu Xu committed
296

John Koleszar's avatar
John Koleszar committed
297
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
298
    active_section = 3;
John Koleszar's avatar
John Koleszar committed
299
300
#endif

301
    // If segment skip is not enabled code the mode.
302
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
303
      if (bsize >= BLOCK_8X8) {
304
        write_inter_mode(bc, mode, mv_ref_p);
305
        ++cm->counts.inter_mode[mi->mode_context[ref0]][INTER_OFFSET(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
306
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
307
    }
308

309
    if (cm->interp_filter == SWITCHABLE) {
310
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
311
312
313
      vp9_write_token(bc, vp9_switchable_interp_tree,
                      cm->fc.switchable_interp_prob[ctx],
                      &switchable_interp_encodings[mi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
314
    } else {
315
      assert(mi->interp_filter == cm->interp_filter);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
316
    }
317

318
    if (bsize < BLOCK_8X8) {
Jim Bankoski's avatar
Jim Bankoski committed
319
320
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
321
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
322
323
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
324
          const int j = idy * 2 + idx;
325
326
327
328
          const MB_PREDICTION_MODE b_mode = m->bmi[j].as_mode;
          write_inter_mode(bc, b_mode, mv_ref_p);
          ++cm->counts.inter_mode[mi->mode_context[ref0]][INTER_OFFSET(b_mode)];
          if (b_mode == NEWMV) {
329
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
330
            active_section = 11;
331
#endif
332
            vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[0].as_mv,
333
                          &mi->ref_mvs[ref0][0].as_mv, nmvc, allow_hp);
334
335
336

            if (has_second_ref(mi))
              vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[1].as_mv,
337
                            &mi->ref_mvs[ref1][0].as_mv, nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
338
          }
339
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
340
341
342
343
344
      }
    } else if (mode == NEWMV) {
#ifdef ENTROPY_STATS
      active_section = 5;
#endif
345
      vp9_encode_mv(cpi, bc, &mi->mv[0].as_mv,
346
                    &mi->ref_mvs[ref0][0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
347

348
349
      if (has_second_ref(mi))
        vp9_encode_mv(cpi, bc, &mi->mv[1].as_mv,
350
                      &mi->ref_mvs[ref1][0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
351
352
    }
  }
John Koleszar's avatar
John Koleszar committed
353
}
354

355
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
356
                              vp9_writer *bc) {
357
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
358
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
359
  const struct segmentation *const seg = &cm->seg;
360
  MODE_INFO *m = mi_8x8[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
361
362
  const int ym = m->mbmi.mode;
  const int segment_id = m->mbmi.segment_id;
363
  MODE_INFO *above_mi = mi_8x8[-xd->mode_info_stride];
364
  MODE_INFO *left_mi = xd->left_available ? mi_8x8[-1] : NULL;
365

366
367
  if (seg->update_map)
    write_segment_id(bc, seg, m->mbmi.segment_id);
368

369
  write_skip(cpi, segment_id, m, bc);
370

371
  if (m->mbmi.sb_type >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
372
    write_selected_tx_size(cpi, m->mbmi.tx_size, m->mbmi.sb_type, bc);
373

374
  if (m->mbmi.sb_type >= BLOCK_8X8) {
375
376
    const MB_PREDICTION_MODE A = vp9_above_block_mode(m, above_mi, 0);
    const MB_PREDICTION_MODE L = vp9_left_block_mode(m, left_mi, 0);
377
    write_intra_mode(bc, ym, vp9_kf_y_mode_prob[A][L]);
378
  } else {
379
    int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
380
381
    const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[m->mbmi.sb_type];
    const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[m->mbmi.sb_type];
Jim Bankoski's avatar
Jim Bankoski committed
382
383
    for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
      for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
384
        int i = idy * 2 + idx;
385
386
        const MB_PREDICTION_MODE A = vp9_above_block_mode(m, above_mi, i);
        const MB_PREDICTION_MODE L = vp9_left_block_mode(m, left_mi, i);
387
        const int bm = m->bmi[i].as_mode;
388
        write_intra_mode(bc, bm, vp9_kf_y_mode_prob[A][L]);
389
390
      }
    }
391
392
  }

393
  write_intra_mode(bc, m->mbmi.uv_mode, vp9_kf_uv_mode_prob[ym]);
394
395
}

James Zern's avatar
James Zern committed
396
static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile,
397
398
                          vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                          int mi_row, int mi_col) {
399
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
400
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
401
  MODE_INFO *m;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
402

403
404
  xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col);
  m = xd->mi_8x8[0];
405

James Zern's avatar
James Zern committed
406
  set_mi_row_col(xd, tile,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
407
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
James Zern's avatar
James Zern committed
408
409
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type],
                 cm->mi_rows, cm->mi_cols);
410
  if (frame_is_intra_only(cm)) {
411
    write_mb_modes_kf(cpi, xd->mi_8x8, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
412
413
414
415
#ifdef ENTROPY_STATS
    active_section = 8;
#endif
  } else {
416
    pack_inter_mode_mvs(cpi, m, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
417
418
419
420
421
422
#ifdef ENTROPY_STATS
    active_section = 1;
#endif
  }

  assert(*tok < tok_end);
423
  pack_mb_tokens(w, tok, tok_end);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
424
425
}

426
427
428
429
430
431
432
433
434
static void write_partition(VP9_COMP *cpi, int hbs, int mi_row, int mi_col,
                            PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
  VP9_COMMON *const cm = &cpi->common;
  const int ctx = partition_plane_context(cpi->above_seg_context,
                                          cpi->left_seg_context,
                                          mi_row, mi_col, bsize);
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
435
436

  if (has_rows && has_cols) {
437
    vp9_write_token(w, vp9_partition_tree, probs, &partition_encodings[p]);
438
  } else if (!has_rows && has_cols) {
439
440
    assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
    vp9_write(w, p == PARTITION_SPLIT, probs[1]);
441
  } else if (has_rows && !has_cols) {
442
443
    assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
    vp9_write(w, p == PARTITION_SPLIT, probs[2]);
444
  } else {
445
    assert(p == PARTITION_SPLIT);
446
447
448
  }
}

James Zern's avatar
James Zern committed
449
static void write_modes_sb(VP9_COMP *cpi, const TileInfo *const tile,
450
451
                           vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                           int mi_row, int mi_col, BLOCK_SIZE bsize) {
452
  VP9_COMMON *const cm = &cpi->common;
453
454
455
  const int bsl = b_width_log2(bsize);
  const int bs = (1 << bsl) / 4;
  PARTITION_TYPE partition;
456
  BLOCK_SIZE subsize;
457
  MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col];
458

459
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
460
461
    return;

Jim Bankoski's avatar
Jim Bankoski committed
462
  partition = partition_lookup[bsl][m->mbmi.sb_type];
463
  write_partition(cpi, bs, mi_row, mi_col, partition, bsize, w);
464
  subsize = get_subsize(bsize, partition);
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
  if (subsize < BLOCK_8X8) {
    write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
  } else {
    switch (partition) {
      case PARTITION_NONE:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        break;
      case PARTITION_HORZ:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_row + bs < cm->mi_rows)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col);
        break;
      case PARTITION_VERT:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_col + bs < cm->mi_cols)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs);
        break;
      case PARTITION_SPLIT:
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs,
                       subsize);
        break;
      default:
        assert(0);
    }
494
  }
495
496

  // update partition context
497
  if (bsize >= BLOCK_8X8 &&
498
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
499
    update_partition_context(cpi->above_seg_context, cpi->left_seg_context,
500
                             mi_row, mi_col, subsize, bsize);
501
502
}

James Zern's avatar
James Zern committed
503
static void write_modes(VP9_COMP *cpi, const TileInfo *const tile,
504
                        vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
505
  int mi_row, mi_col;
506

James Zern's avatar
James Zern committed
507
  for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
508
509
       mi_row += MI_BLOCK_SIZE) {
      vp9_zero(cpi->left_seg_context);
James Zern's avatar
James Zern committed
510
    for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
511
512
         mi_col += MI_BLOCK_SIZE)
      write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, BLOCK_64X64);
John Koleszar's avatar
John Koleszar committed
513
  }
John Koleszar's avatar
John Koleszar committed
514
}
515

516
517
518
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
519
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
520
521
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
522
  int i, j, k, l, m;
523

524
  for (i = 0; i < PLANE_TYPES; ++i) {
525
526
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
527
        for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
528
          vp9_tree_probs_from_distribution(vp9_coef_tree,
529
                                           coef_branch_ct[i][j][k][l],
530
                                           coef_counts[i][j][k][l]);
531
532
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
533
534
535
536
          for (m = 0; m < UNCONSTRAINED_NODES; ++m)
            coef_probs[i][j][k][l][m] = get_binary_prob(
                                            coef_branch_ct[i][j][k][l][m][0],
                                            coef_branch_ct[i][j][k][l][m][1]);
537
        }
Daniel Kang's avatar
Daniel Kang committed
538
539
540
      }
    }
  }
541
542
}

543
544
545
546
547
548
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
549
  const vp9_prob upd = DIFF_UPDATE_PROB;
550
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
551
552
553
554
555
556
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
557
      for (i = 0; i < PLANE_TYPES; ++i) {
558
559
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
560
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
585

586
587
588
589
590
591
592
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
593
      for (i = 0; i < PLANE_TYPES; ++i) {
594
595
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
596
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
597
598
599
600
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
601
                const vp9_prob upd = DIFF_UPDATE_PROB;
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
622
          }
Daniel Kang's avatar
Daniel Kang committed
623
624
        }
      }
625
      return;
Daniel Kang's avatar
Daniel Kang committed
626
    }
John Koleszar's avatar
John Koleszar committed
627

628
629
630
    case 1:
    case 2: {
      const int prev_coef_contexts_to_update =
631
632
          cpi->sf.use_fast_coef_updates == 2 ? COEFF_CONTEXTS >> 1
                                             : COEFF_CONTEXTS;
633
      const int coef_band_to_update =
634
635
          cpi->sf.use_fast_coef_updates == 2 ? COEF_BANDS >> 1
                                             : COEF_BANDS;
636
637
      int updates = 0;
      int noupdates_before_first = 0;
638
      for (i = 0; i < PLANE_TYPES; ++i) {
639
640
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
641
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
                int s;
                int u = 0;
                if (l >= prev_coef_contexts_to_update ||
                    k >= coef_band_to_update) {
                  u = 0;
                } else {
                  if (t == PIVOT_NODE)
                    s = vp9_prob_diff_update_savings_search_model(
                        frame_branch_ct[i][j][k][l][0],
                        old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                  else
                    s = vp9_prob_diff_update_savings_search(
                        frame_branch_ct[i][j][k][l][t],
                        *oldp, &newp, upd);
                  if (s > 0 && newp != *oldp)
                    u = 1;
                }
                updates += u;
                if (u == 0 && updates == 0) {
                  noupdates_before_first++;
                  continue;
                }
                if (u == 1 && updates == 1) {
                  int v;
                  // first update
                  vp9_write_bit(bc, 1);
                  for (v = 0; v < noupdates_before_first; ++v)
                    vp9_write(bc, 0, upd);
                }
                vp9_write(bc, u, upd);
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
John Koleszar's avatar
John Koleszar committed
682
            }
Daniel Kang's avatar
Daniel Kang committed
683
684
685
          }
        }
      }
686
687
688
689
      if (updates == 0) {
        vp9_write_bit(bc, 0);  // no updates
      }
      return;
Daniel Kang's avatar
Daniel Kang committed
690
    }
691
692
693

    default:
      assert(0);
John Koleszar's avatar
John Koleszar committed
694
  }
695
}
John Koleszar's avatar
John Koleszar committed
696

697
static void update_coef_probs(VP9_COMP *cpi, vp9_writer* w) {
698
  const TX_MODE tx_mode = cpi->common.tx_mode;
699
700
  const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
  TX_SIZE tx_size;
701
  vp9_clear_system_state();
702

703
704
  for (tx_size = TX_4X4; tx_size <= TX_32X32; ++tx_size)
    build_tree_distribution(cpi, tx_size);
705

706
707
  for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
    update_coef_probs_common(w, cpi, tx_size);
John Koleszar's avatar
John Koleszar committed
708
}
709

710
static void encode_loopfilter(struct loopfilter *lf,
711
                              struct vp9_write_bit_buffer *wb) {
712
713
  int i;

714
  // Encode the loop filter level and type
715
716
  vp9_wb_write_literal(wb, lf->filter_level, 6);
  vp9_wb_write_literal(wb, lf->sharpness_level, 3);
717

718
719
  // Write out loop filter deltas applied at the MB level based on mode or
  // ref frame (if they are enabled).
720
  vp9_wb_write_bit(wb, lf->mode_ref_delta_enabled);
721

722
723
724
  if (lf->mode_ref_delta_enabled) {
    vp9_wb_write_bit(wb, lf->mode_ref_delta_update);
    if (lf->mode_ref_delta_update) {
725
      for (i = 0; i < MAX_REF_LF_DELTAS; i++) {
726
        const int delta = lf->ref_deltas[i];
727
728
729
        const int changed = delta != lf->last_ref_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
730
          lf->last_ref_deltas[i] = delta;
731
732
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
733
734
735
736
        }
      }

      for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
737
        const int delta = lf->mode_deltas[i];
738
739
740
        const int changed = delta != lf->last_mode_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
741
          lf->last_mode_deltas[i] = delta;
742
743
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
744
745
746
747
748
749
        }
      }
    }
  }
}

750
static void write_delta_q(struct vp9_write_bit_buffer *wb, int delta_q) {
751
  if (delta_q != 0) {
752
753
754
    vp9_wb_write_bit(wb, 1);
    vp9_wb_write_literal(wb, abs(delta_q), 4);
    vp9_wb_write_bit(wb, delta_q < 0);
755
  } else {
756
    vp9_wb_write_bit(wb, 0);
757
758
759
  }
}

760
761
762
763
764
765
static void encode_quantization(VP9_COMMON *cm,
                                struct vp9_write_bit_buffer *wb) {
  vp9_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
  write_delta_q(wb, cm->y_dc_delta_q);
  write_delta_q(wb, cm->uv_dc_delta_q);
  write_delta_q(wb, cm->uv_ac_delta_q);
766
767
768
}


769
static void encode_segmentation(VP9_COMP *cpi,
770
                                struct vp9_write_bit_buffer *wb) {
John Koleszar's avatar
John Koleszar committed
771
  int i, j;
772

773
  struct segmentation *seg = &cpi->common.seg;
774
775
776

  vp9_wb_write_bit(wb, seg->enabled);
  if (!seg->enabled)
777
778
779
    return;

  // Segmentation map
780
781
  vp9_wb_write_bit(wb, seg->update_map);
  if (seg->update_map) {
782
783
784
    // Select the coding strategy (temporal or spatial)
    vp9_choose_segmap_coding_method(cpi);
    // Write out probabilities used to decode unpredicted  macro-block segments
Paul Wilkins's avatar
Paul Wilkins committed
785
    for (i = 0; i < SEG_TREE_PROBS; i++) {
786
      const int prob = seg->tree_probs[i];
787
788
789
790
      const int update = prob != MAX_PROB;
      vp9_wb_write_bit(wb, update);
      if (update)
        vp9_wb_write_literal(wb, prob, 8);
791
792
793
    }

    // Write out the chosen coding method.
794
795
    vp9_wb_write_bit(wb, seg->temporal_update);
    if (seg->temporal_update) {
796
      for (i = 0; i < PREDICTION_PROBS; i++) {
797
        const int prob = seg->pred_probs[i];
798
799
800
801
        const int update = prob != MAX_PROB;
        vp9_wb_write_bit(wb, update);
        if (update)
          vp9_wb_write_literal(wb, prob, 8);
802
803
804
805
806
      }
    }
  }

  // Segmentation data
807
808
809
  vp9_wb_write_bit(wb, seg->update_data);
  if (seg->update_data) {
    vp9_wb_write_bit(wb, seg->abs_delta);
810

Paul Wilkins's avatar
Paul Wilkins committed
811
    for (i = 0; i < MAX_SEGMENTS; i++) {
812
      for (j = 0; j < SEG_LVL_MAX; j++) {
813
        const int active = vp9_segfeature_active(seg, i, j);
814
815
        vp9_wb_write_bit(wb, active);
        if (active) {
816
          const int data = vp9_get_segdata(seg, i, j);
817
          const int data_max = vp9_seg_feature_data_max(j);
818
819

          if (vp9_is_segfeature_signed(j)) {
820
            encode_unsigned_max(wb, abs(data), data_max);
821
            vp9_wb_write_bit(wb, data < 0);
822
          } else {
823
            encode_unsigned_max(wb, data, data_max);
824
825
826
827
828
829
830
          }
        }
      }
    }
  }
}

831

832
static void encode_txfm_probs(VP9_COMMON *cm, vp9_writer *w) {
833
  // Mode
834
835
836
  vp9_write_literal(w, MIN(cm->tx_mode, ALLOW_32X32), 2);
  if (cm->tx_mode >= ALLOW_32X32)
    vp9_write_bit(w, cm->tx_mode == TX_MODE_SELECT);
837
838

  // Probabilities
839
  if (cm->tx_mode == TX_MODE_SELECT) {
840
    int i, j;
841
842
843
    unsigned int ct_8x8p[TX_SIZES - 3][2];
    unsigned int ct_16x16p[TX_SIZES - 2][2];
    unsigned int ct_32x32p[TX_SIZES - 1][2];
844
845


846
    for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
847
      tx_counts_to_branch_counts_8x8(cm->counts.tx.p8x8[i], ct_8x8p);
848
      for (j = 0; j < TX_SIZES - 3; j++)
849
        vp9_cond_prob_diff_update(w, &cm->fc.tx_probs.p8x8[i][j], ct_8x8p[j]);
850
    }
851

852
    for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
853
      tx_counts_to_branch_counts_16x16(cm->counts.tx.p16x16[i], ct_16x16p);
854
      for (j = 0; j < TX_SIZES - 2; j++)