vp9_bitstream.c 43 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"
17
#include "vpx_ports/mem_ops.h"
18

19
#include "vp9/common/vp9_entropymode.h"
20
#include "vp9/common/vp9_entropymv.h"
21
#include "vp9/common/vp9_tile_common.h"
22
23
24
25
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_entropy.h"
#include "vp9/common/vp9_mvref_common.h"
26
27
28
29
30
31
32
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_pragmas.h"

#include "vp9/encoder/vp9_mcomp.h"
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_bitstream.h"
#include "vp9/encoder/vp9_segmentation.h"
33
#include "vp9/encoder/vp9_subexp.h"
34
#include "vp9/encoder/vp9_tokenize.h"
35
36
#include "vp9/encoder/vp9_write_bit_buffer.h"

John Koleszar's avatar
John Koleszar committed
37
38
39
40
#ifdef ENTROPY_STATS
extern unsigned int active_section;
#endif

41
42
43
44
45
46
47
48
49
50
51
52
53
54
static struct vp9_token intra_mode_encodings[INTRA_MODES];
static struct vp9_token switchable_interp_encodings[SWITCHABLE_FILTERS];
static struct vp9_token partition_encodings[PARTITION_TYPES];
static struct vp9_token inter_mode_encodings[INTER_MODES];

void vp9_entropy_mode_init() {
  vp9_tokens_from_tree(intra_mode_encodings, vp9_intra_mode_tree);
  vp9_tokens_from_tree(switchable_interp_encodings, vp9_switchable_interp_tree);
  vp9_tokens_from_tree(partition_encodings, vp9_partition_tree);
  vp9_tokens_from_tree(inter_mode_encodings, vp9_inter_mode_tree);
}

static void write_intra_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
55
  vp9_write_token(w, vp9_intra_mode_tree, probs, &intra_mode_encodings[mode]);
56
57
58
59
60
}

static void write_inter_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  assert(is_inter_mode(mode));
61
62
  vp9_write_token(w, vp9_inter_mode_tree, probs,
                  &inter_mode_encodings[INTER_OFFSET(mode)]);
63
64
}

65
66
67
68
69
void vp9_encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                             int data, int max) {
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

70
71
72
73
74
75
static void prob_diff_update(const vp9_tree_index *tree,
                             vp9_prob probs[/*n - 1*/],
                             const unsigned int counts[/*n - 1*/],
                             int n, vp9_writer *w) {
  int i;
  unsigned int branch_ct[32][2];
76
77

  // Assuming max number of probabilities <= 32
78
  assert(n <= 32);
79

80
  vp9_tree_probs_from_distribution(tree, branch_ct, counts);
81
  for (i = 0; i < n - 1; ++i)
82
    vp9_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
83
84
}

85
86
87
static void write_selected_tx_size(const VP9_COMP *cpi, MODE_INFO *m,
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
88
  const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
89
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
90
91
  const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
                                                 &cpi->common.fc.tx_probs);
92
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
93
  if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
94
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
95
    if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
96
97
98
99
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

100
101
static int write_skip(const VP9_COMP *cpi, int segment_id, MODE_INFO *m,
                      vp9_writer *w) {
102
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
103
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
104
105
    return 1;
  } else {
106
    const int skip = m->mbmi.skip;
107
108
    vp9_write(w, skip, vp9_get_skip_prob(&cpi->common, xd));
    return skip;
109
110
111
  }
}

112
void vp9_update_skip_probs(VP9_COMMON *cm, vp9_writer *w) {
113
114
  int k;

Dmitry Kovalev's avatar
Dmitry Kovalev committed
115
116
  for (k = 0; k < SKIP_CONTEXTS; ++k)
    vp9_cond_prob_diff_update(w, &cm->fc.skip_probs[k], cm->counts.skip[k]);
117
118
}

119
static void update_switchable_interp_probs(VP9_COMP *cpi, vp9_writer *w) {
120
  VP9_COMMON *const cm = &cpi->common;
121
122
123
124
125
  int j;
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
    prob_diff_update(vp9_switchable_interp_tree,
                     cm->fc.switchable_interp_prob[j],
                     cm->counts.switchable_interp[j], SWITCHABLE_FILTERS, w);
126
127
}

128
static void pack_mb_tokens(vp9_writer* const w,
129
130
131
                           TOKENEXTRA **tp,
                           const TOKENEXTRA *const stop) {
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
132

133
  while (p < stop && p->token != EOSB_TOKEN) {
134
    const int t = p->token;
135
136
    const struct vp9_token *const a = &vp9_coef_encodings[t];
    const vp9_extra_bit *const b = &vp9_extra_bits[t];
John Koleszar's avatar
John Koleszar committed
137
138
    int i = 0;
    int v = a->value;
139
    int n = a->len;
140

John Koleszar's avatar
John Koleszar committed
141
142
143
144
145
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
146

147
148
149
150
151
152
153
154
    // TODO(jbb): expanding this can lead to big gains.  It allows
    // much better branch prediction and would enable us to avoid numerous
    // lookups and compares.

    // If we have a token that's in the constrained set, the coefficient tree
    // is split into two treed writes.  The first treed write takes care of the
    // unconstrained nodes.  The second treed write takes care of the
    // constrained nodes.
155
    if (t >= TWO_TOKEN && t < EOB_TOKEN) {
156
157
      int len = UNCONSTRAINED_NODES - p->skip_eob_node;
      int bits = v >> (n - len);
158
159
160
161
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, bits, len, i);
      vp9_write_tree(w, vp9_coef_con_tree,
                     vp9_pareto8_full[p->context_tree[PIVOT_NODE] - 1],
                     v, n - len, 0);
162
    } else {
163
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, v, n, i);
164
    }
John Koleszar's avatar
John Koleszar committed
165

John Koleszar's avatar
John Koleszar committed
166
    if (b->base_val) {
167
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
168

169
      if (l) {
170
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
171
        int v = e >> 1;
172
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
173
        int i = 0;
John Koleszar's avatar
John Koleszar committed
174

John Koleszar's avatar
John Koleszar committed
175
176
        do {
          const int bb = (v >> --n) & 1;
177
          vp9_write(w, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
178
179
180
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
181

182
      vp9_write_bit(w, e & 1);
John Koleszar's avatar
John Koleszar committed
183
    }
John Koleszar's avatar
John Koleszar committed
184
185
186
    ++p;
  }

187
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
188
189
}

190
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
191
                             int segment_id) {
192
  if (seg->enabled && seg->update_map)
193
    vp9_write_tree(w, vp9_segment_tree, seg->tree_probs, segment_id, 3, 0);
John Koleszar's avatar
John Koleszar committed
194
195
}

Paul Wilkins's avatar
Paul Wilkins committed
196
// This function encodes the reference frame
Ronald S. Bultje's avatar
Ronald S. Bultje committed
197
static void encode_ref_frame(VP9_COMP *cpi, vp9_writer *bc) {
198
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
199
200
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
201
  MB_MODE_INFO *mi = &xd->mi_8x8[0]->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
202
  const int segment_id = mi->segment_id;
203
  int seg_ref_active = vp9_segfeature_active(&cm->seg, segment_id,
Ronald S. Bultje's avatar
Ronald S. Bultje committed
204
                                             SEG_LVL_REF_FRAME);
John Koleszar's avatar
John Koleszar committed
205
206
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
207
  if (!seg_ref_active) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
208
209
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
210
    if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
211
      vp9_write(bc, mi->ref_frame[1] > INTRA_FRAME,
212
                vp9_get_reference_mode_prob(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
213
214
    } else {
      assert((mi->ref_frame[1] <= INTRA_FRAME) ==
215
             (cm->reference_mode == SINGLE_REFERENCE));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
216
    }
217

Ronald S. Bultje's avatar
Ronald S. Bultje committed
218
219
    if (mi->ref_frame[1] > INTRA_FRAME) {
      vp9_write(bc, mi->ref_frame[0] == GOLDEN_FRAME,
220
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
221
222
    } else {
      vp9_write(bc, mi->ref_frame[0] != LAST_FRAME,
223
                vp9_get_pred_prob_single_ref_p1(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
224
225
      if (mi->ref_frame[0] != LAST_FRAME)
        vp9_write(bc, mi->ref_frame[0] != GOLDEN_FRAME,
226
                  vp9_get_pred_prob_single_ref_p2(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
227
    }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
228
229
  } else {
    assert(mi->ref_frame[1] <= INTRA_FRAME);
230
    assert(vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) ==
231
           mi->ref_frame[0]);
John Koleszar's avatar
John Koleszar committed
232
  }
Paul Wilkins's avatar
Paul Wilkins committed
233

Jingning Han's avatar
Jingning Han committed
234
235
  // If using the prediction model we have nothing further to do because
  // the reference frame is fully coded by the segment.
Paul Wilkins's avatar
Paul Wilkins committed
236
}
John Koleszar's avatar
John Koleszar committed
237

238
static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) {
239
240
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
241
242
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
243
  const struct segmentation *const seg = &cm->seg;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
244
  MB_MODE_INFO *const mi = &m->mbmi;
245
246
  const MV_REFERENCE_FRAME ref0 = mi->ref_frame[0];
  const MV_REFERENCE_FRAME ref1 = mi->ref_frame[1];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
247
248
  const MB_PREDICTION_MODE mode = mi->mode;
  const int segment_id = mi->segment_id;
249
  const BLOCK_SIZE bsize = mi->sb_type;
250
  const int allow_hp = cm->allow_high_precision_mv;
251
  int skip;
Adrian Grange's avatar
Adrian Grange committed
252

Ronald S. Bultje's avatar
Ronald S. Bultje committed
253
254
#ifdef ENTROPY_STATS
  active_section = 9;
255
#endif
256

257
258
  if (seg->update_map) {
    if (seg->temporal_update) {
Scott LaVarnway's avatar
Scott LaVarnway committed
259
      const int pred_flag = mi->seg_id_predicted;
260
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
261
262
263
      vp9_write(bc, pred_flag, pred_prob);
      if (!pred_flag)
        write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
264
    } else {
265
      write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
266
267
    }
  }
268

269
  skip = write_skip(cpi, segment_id, m, bc);
John Koleszar's avatar
John Koleszar committed
270

271
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
272
    vp9_write(bc, ref0 != INTRA_FRAME, vp9_get_intra_inter_prob(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
273

274
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
275
      !(ref0 != INTRA_FRAME &&
276
        (skip || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
277
    write_selected_tx_size(cpi, m, mi->tx_size, bsize, bc);
278
279
  }

280
  if (ref0 == INTRA_FRAME) {
281
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
282
    active_section = 6;
283
#endif
Paul Wilkins's avatar
Paul Wilkins committed
284

285
    if (bsize >= BLOCK_8X8) {
286
      write_intra_mode(bc, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
287
    } else {
288
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
289
290
291
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
Jim Bankoski's avatar
Jim Bankoski committed
292
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
293
          const MB_PREDICTION_MODE bm = m->bmi[idy * 2 + idx].as_mode;
294
          write_intra_mode(bc, bm, cm->fc.y_mode_prob[0]);
295
        }
Jim Bankoski's avatar
Jim Bankoski committed
296
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
297
    }
298
    write_intra_mode(bc, mi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
299
  } else {
300
    vp9_prob *mv_ref_p;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
301
    encode_ref_frame(cpi, bc);
302
    mv_ref_p = cm->fc.inter_mode_probs[mi->mode_context[ref0]];
Yaowu Xu's avatar
Yaowu Xu committed
303

John Koleszar's avatar
John Koleszar committed
304
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
305
    active_section = 3;
John Koleszar's avatar
John Koleszar committed
306
307
#endif

308
    // If segment skip is not enabled code the mode.
309
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
310
      if (bsize >= BLOCK_8X8) {
311
        write_inter_mode(bc, mode, mv_ref_p);
312
        ++cm->counts.inter_mode[mi->mode_context[ref0]][INTER_OFFSET(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
313
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
314
    }
315

316
    if (cm->interp_filter == SWITCHABLE) {
317
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
318
319
320
      vp9_write_token(bc, vp9_switchable_interp_tree,
                      cm->fc.switchable_interp_prob[ctx],
                      &switchable_interp_encodings[mi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
321
    } else {
322
      assert(mi->interp_filter == cm->interp_filter);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
323
    }
324

325
    if (bsize < BLOCK_8X8) {
Jim Bankoski's avatar
Jim Bankoski committed
326
327
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
328
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
329
330
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
331
          const int j = idy * 2 + idx;
332
333
334
335
          const MB_PREDICTION_MODE b_mode = m->bmi[j].as_mode;
          write_inter_mode(bc, b_mode, mv_ref_p);
          ++cm->counts.inter_mode[mi->mode_context[ref0]][INTER_OFFSET(b_mode)];
          if (b_mode == NEWMV) {
336
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
337
            active_section = 11;
338
#endif
339
            vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[0].as_mv,
340
                          &mi->ref_mvs[ref0][0].as_mv, nmvc, allow_hp);
341
342
343

            if (has_second_ref(mi))
              vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[1].as_mv,
344
                            &mi->ref_mvs[ref1][0].as_mv, nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
345
          }
346
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
347
348
349
350
351
      }
    } else if (mode == NEWMV) {
#ifdef ENTROPY_STATS
      active_section = 5;
#endif
352
      vp9_encode_mv(cpi, bc, &mi->mv[0].as_mv,
353
                    &mi->ref_mvs[ref0][0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
354

355
356
      if (has_second_ref(mi))
        vp9_encode_mv(cpi, bc, &mi->mv[1].as_mv,
357
                      &mi->ref_mvs[ref1][0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
358
359
    }
  }
John Koleszar's avatar
John Koleszar committed
360
}
361

362
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
363
                              vp9_writer *bc) {
364
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
365
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
366
  const struct segmentation *const seg = &cm->seg;
367
  MODE_INFO *m = mi_8x8[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
368
369
  const int ym = m->mbmi.mode;
  const int segment_id = m->mbmi.segment_id;
370
  MODE_INFO *above_mi = mi_8x8[-xd->mode_info_stride];
371
  MODE_INFO *left_mi = xd->left_available ? mi_8x8[-1] : NULL;
372

373
374
  if (seg->update_map)
    write_segment_id(bc, seg, m->mbmi.segment_id);
375

376
  write_skip(cpi, segment_id, m, bc);
377

378
  if (m->mbmi.sb_type >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
379
    write_selected_tx_size(cpi, m, m->mbmi.tx_size, m->mbmi.sb_type, bc);
380

381
  if (m->mbmi.sb_type >= BLOCK_8X8) {
382
383
    const MB_PREDICTION_MODE A = vp9_above_block_mode(m, above_mi, 0);
    const MB_PREDICTION_MODE L = vp9_left_block_mode(m, left_mi, 0);
384
    write_intra_mode(bc, ym, vp9_kf_y_mode_prob[A][L]);
385
  } else {
386
    int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
387
388
    const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[m->mbmi.sb_type];
    const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[m->mbmi.sb_type];
Jim Bankoski's avatar
Jim Bankoski committed
389
390
    for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
      for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
391
        int i = idy * 2 + idx;
392
393
        const MB_PREDICTION_MODE A = vp9_above_block_mode(m, above_mi, i);
        const MB_PREDICTION_MODE L = vp9_left_block_mode(m, left_mi, i);
394
        const int bm = m->bmi[i].as_mode;
395
        write_intra_mode(bc, bm, vp9_kf_y_mode_prob[A][L]);
396
397
      }
    }
398
399
  }

400
  write_intra_mode(bc, m->mbmi.uv_mode, vp9_kf_uv_mode_prob[ym]);
401
402
}

James Zern's avatar
James Zern committed
403
static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile,
404
405
                          vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                          int mi_row, int mi_col) {
406
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
407
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
408
  MODE_INFO *m;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
409

410
411
  xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col);
  m = xd->mi_8x8[0];
412

James Zern's avatar
James Zern committed
413
  set_mi_row_col(xd, tile,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
414
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
James Zern's avatar
James Zern committed
415
416
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type],
                 cm->mi_rows, cm->mi_cols);
417
  if (frame_is_intra_only(cm)) {
418
    write_mb_modes_kf(cpi, xd->mi_8x8, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
419
420
421
422
#ifdef ENTROPY_STATS
    active_section = 8;
#endif
  } else {
423
    pack_inter_mode_mvs(cpi, m, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
424
425
426
427
428
429
#ifdef ENTROPY_STATS
    active_section = 1;
#endif
  }

  assert(*tok < tok_end);
430
  pack_mb_tokens(w, tok, tok_end);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
431
432
}

433
434
435
436
437
438
439
440
441
static void write_partition(VP9_COMP *cpi, int hbs, int mi_row, int mi_col,
                            PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
  VP9_COMMON *const cm = &cpi->common;
  const int ctx = partition_plane_context(cpi->above_seg_context,
                                          cpi->left_seg_context,
                                          mi_row, mi_col, bsize);
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
442
443

  if (has_rows && has_cols) {
444
    vp9_write_token(w, vp9_partition_tree, probs, &partition_encodings[p]);
445
  } else if (!has_rows && has_cols) {
446
447
    assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
    vp9_write(w, p == PARTITION_SPLIT, probs[1]);
448
  } else if (has_rows && !has_cols) {
449
450
    assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
    vp9_write(w, p == PARTITION_SPLIT, probs[2]);
451
  } else {
452
    assert(p == PARTITION_SPLIT);
453
454
455
  }
}

James Zern's avatar
James Zern committed
456
static void write_modes_sb(VP9_COMP *cpi, const TileInfo *const tile,
457
458
                           vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                           int mi_row, int mi_col, BLOCK_SIZE bsize) {
459
  VP9_COMMON *const cm = &cpi->common;
460
461
462
  const int bsl = b_width_log2(bsize);
  const int bs = (1 << bsl) / 4;
  PARTITION_TYPE partition;
463
  BLOCK_SIZE subsize;
464
  MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col];
465

466
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
467
468
    return;

Jim Bankoski's avatar
Jim Bankoski committed
469
  partition = partition_lookup[bsl][m->mbmi.sb_type];
470
  write_partition(cpi, bs, mi_row, mi_col, partition, bsize, w);
471
  subsize = get_subsize(bsize, partition);
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
  if (subsize < BLOCK_8X8) {
    write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
  } else {
    switch (partition) {
      case PARTITION_NONE:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        break;
      case PARTITION_HORZ:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_row + bs < cm->mi_rows)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col);
        break;
      case PARTITION_VERT:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_col + bs < cm->mi_cols)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs);
        break;
      case PARTITION_SPLIT:
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs,
                       subsize);
        break;
      default:
        assert(0);
    }
501
  }
502
503

  // update partition context
504
  if (bsize >= BLOCK_8X8 &&
505
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
506
    update_partition_context(cpi->above_seg_context, cpi->left_seg_context,
507
                             mi_row, mi_col, subsize, bsize);
508
509
}

James Zern's avatar
James Zern committed
510
static void write_modes(VP9_COMP *cpi, const TileInfo *const tile,
511
                        vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
512
  int mi_row, mi_col;
513

James Zern's avatar
James Zern committed
514
  for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
515
516
       mi_row += MI_BLOCK_SIZE) {
      vp9_zero(cpi->left_seg_context);
James Zern's avatar
James Zern committed
517
    for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
518
519
         mi_col += MI_BLOCK_SIZE)
      write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, BLOCK_64X64);
John Koleszar's avatar
John Koleszar committed
520
  }
John Koleszar's avatar
John Koleszar committed
521
}
522

523
524
525
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
526
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
527
528
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
529
  int i, j, k, l, m;
530

531
  for (i = 0; i < PLANE_TYPES; ++i) {
532
533
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
534
        for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
535
          vp9_tree_probs_from_distribution(vp9_coef_tree,
536
                                           coef_branch_ct[i][j][k][l],
537
                                           coef_counts[i][j][k][l]);
538
539
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
540
541
542
543
          for (m = 0; m < UNCONSTRAINED_NODES; ++m)
            coef_probs[i][j][k][l][m] = get_binary_prob(
                                            coef_branch_ct[i][j][k][l][m][0],
                                            coef_branch_ct[i][j][k][l][m][1]);
544
        }
Daniel Kang's avatar
Daniel Kang committed
545
546
547
      }
    }
  }
548
549
}

550
551
552
553
554
555
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
556
  const vp9_prob upd = DIFF_UPDATE_PROB;
557
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
558
559
560
561
562
563
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
564
      for (i = 0; i < PLANE_TYPES; ++i) {
565
566
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
567
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
592

593
594
595
596
597
598
599
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
600
      for (i = 0; i < PLANE_TYPES; ++i) {
601
602
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
603
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
604
605
606
607
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
608
                const vp9_prob upd = DIFF_UPDATE_PROB;
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
629
          }
Daniel Kang's avatar
Daniel Kang committed
630
631
        }
      }
632
      return;
Daniel Kang's avatar
Daniel Kang committed
633
    }
John Koleszar's avatar
John Koleszar committed
634

635
636
637
    case 1:
    case 2: {
      const int prev_coef_contexts_to_update =
638
639
          cpi->sf.use_fast_coef_updates == 2 ? COEFF_CONTEXTS >> 1
                                             : COEFF_CONTEXTS;
640
      const int coef_band_to_update =
641
642
          cpi->sf.use_fast_coef_updates == 2 ? COEF_BANDS >> 1
                                             : COEF_BANDS;
643
644
      int updates = 0;
      int noupdates_before_first = 0;
645
      for (i = 0; i < PLANE_TYPES; ++i) {
646
647
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
648
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
                int s;
                int u = 0;
                if (l >= prev_coef_contexts_to_update ||
                    k >= coef_band_to_update) {
                  u = 0;
                } else {
                  if (t == PIVOT_NODE)
                    s = vp9_prob_diff_update_savings_search_model(
                        frame_branch_ct[i][j][k][l][0],
                        old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                  else
                    s = vp9_prob_diff_update_savings_search(
                        frame_branch_ct[i][j][k][l][t],
                        *oldp, &newp, upd);
                  if (s > 0 && newp != *oldp)
                    u = 1;
                }
                updates += u;
                if (u == 0 && updates == 0) {
                  noupdates_before_first++;
                  continue;
                }
                if (u == 1 && updates == 1) {
                  int v;
                  // first update
                  vp9_write_bit(bc, 1);
                  for (v = 0; v < noupdates_before_first; ++v)
                    vp9_write(bc, 0, upd);
                }
                vp9_write(bc, u, upd);
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
John Koleszar's avatar
John Koleszar committed
689
            }
Daniel Kang's avatar
Daniel Kang committed
690
691
692
          }
        }
      }
693
694
695
696
      if (updates == 0) {
        vp9_write_bit(bc, 0);  // no updates
      }
      return;
Daniel Kang's avatar
Daniel Kang committed
697
    }
698
699
700

    default:
      assert(0);
John Koleszar's avatar
John Koleszar committed
701
  }
702
}
John Koleszar's avatar
John Koleszar committed
703

704
static void update_coef_probs(VP9_COMP* cpi, vp9_writer* w) {
705
  const TX_MODE tx_mode = cpi->common.tx_mode;
706
707
  const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
  TX_SIZE tx_size;
708
  vp9_clear_system_state();
709

710
711
  for (tx_size = TX_4X4; tx_size <= TX_32X32; ++tx_size)
    build_tree_distribution(cpi, tx_size);
712

713
714
  for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
    update_coef_probs_common(w, cpi, tx_size);
John Koleszar's avatar
John Koleszar committed
715
}
716

717
static void encode_loopfilter(struct loopfilter *lf,
718
                              struct vp9_write_bit_buffer *wb) {
719
720
  int i;

721
  // Encode the loop filter level and type
722
723
  vp9_wb_write_literal(wb, lf->filter_level, 6);
  vp9_wb_write_literal(wb, lf->sharpness_level, 3);
724

725
726
  // Write out loop filter deltas applied at the MB level based on mode or
  // ref frame (if they are enabled).
727
  vp9_wb_write_bit(wb, lf->mode_ref_delta_enabled);
728

729
730
731
  if (lf->mode_ref_delta_enabled) {
    vp9_wb_write_bit(wb, lf->mode_ref_delta_update);
    if (lf->mode_ref_delta_update) {
732
      for (i = 0; i < MAX_REF_LF_DELTAS; i++) {
733
        const int delta = lf->ref_deltas[i];
734
735
736
        const int changed = delta != lf->last_ref_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
737
          lf->last_ref_deltas[i] = delta;
738
739
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
740
741
742
743
        }
      }

      for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
744
        const int delta = lf->mode_deltas[i];
745
746
747
        const int changed = delta != lf->last_mode_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
748
          lf->last_mode_deltas[i] = delta;
749
750
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
751
752
753
754
755
756
        }
      }
    }
  }
}

757
static void write_delta_q(struct vp9_write_bit_buffer *wb, int delta_q) {
758
  if (delta_q != 0) {
759
760
761
    vp9_wb_write_bit(wb, 1);
    vp9_wb_write_literal(wb, abs(delta_q), 4);
    vp9_wb_write_bit(wb, delta_q < 0);
762
  } else {
763
    vp9_wb_write_bit(wb, 0);
764
765
766
  }
}

767
768
769
770
771
772
static void encode_quantization(VP9_COMMON *cm,
                                struct vp9_write_bit_buffer *wb) {
  vp9_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
  write_delta_q(wb, cm->y_dc_delta_q);
  write_delta_q(wb, cm->uv_dc_delta_q);
  write_delta_q(wb, cm->uv_ac_delta_q);
773
774
775
}


776
static void encode_segmentation(VP9_COMP *cpi,
777
                                struct vp9_write_bit_buffer *wb) {
John Koleszar's avatar
John Koleszar committed
778
  int i, j;
779

780
  struct segmentation *seg = &cpi->common.seg;
781
782
783

  vp9_wb_write_bit(wb, seg->enabled);
  if (!seg->enabled)
784
785
786
    return;

  // Segmentation map
787
788
  vp9_wb_write_bit(wb, seg->update_map);
  if (seg->update_map) {
789
790
791
    // Select the coding strategy (temporal or spatial)
    vp9_choose_segmap_coding_method(cpi);
    // Write out probabilities used to decode unpredicted  macro-block segments
Paul Wilkins's avatar
Paul Wilkins committed
792
    for (i = 0; i < SEG_TREE_PROBS; i++) {
793
      const int prob = seg->tree_probs[i];
794
795
796
797
      const int update = prob != MAX_PROB;
      vp9_wb_write_bit(wb, update);
      if (update)
        vp9_wb_write_literal(wb, prob, 8);
798
799
800
    }

    // Write out the chosen coding method.
801
802
    vp9_wb_write_bit(wb, seg->temporal_update);
    if (seg->temporal_update) {
803
      for (i = 0; i < PREDICTION_PROBS; i++) {
804
        const int prob = seg->pred_probs[i];
805
806
807
808
        const int update = prob != MAX_PROB;
        vp9_wb_write_bit(wb, update);
        if (update)
          vp9_wb_write_literal(wb, prob, 8);
809
810
811
812
813
      }
    }
  }

  // Segmentation data
814
815
816
  vp9_wb_write_bit(wb, seg->update_data);
  if (seg->update_data) {
    vp9_wb_write_bit(wb, seg->abs_delta);
817

Paul Wilkins's avatar
Paul Wilkins committed
818
    for (i = 0; i < MAX_SEGMENTS; i++) {
819
      for (j = 0; j < SEG_LVL_MAX; j++) {
820
        const int active = vp9_segfeature_active(seg, i, j);
821
822
        vp9_wb_write_bit(wb, active);
        if (active) {
823
          const int data = vp9_get_segdata(seg, i, j);
824
          const int data_max = vp9_seg_feature_data_max(j);
825
826

          if (vp9_is_segfeature_signed(j)) {
827
828
            vp9_encode_unsigned_max(wb, abs(data), data_max);
            vp9_wb_write_bit(wb, data < 0);
829
          } else {
830
            vp9_encode_unsigned_max(wb, data, data_max);
831
832
833
834
835
836
837
          }
        }
      }
    }
  }
}

838

839
static void encode_txfm_probs(VP9_COMP *cpi, vp9_writer *w) {
840
841
842
  VP9_COMMON *const cm = &cpi->common;

  // Mode
843
844
845
  vp9_write_literal(w, MIN(cm->tx_mode, ALLOW_32X32), 2);
  if (cm->tx_mode >= ALLOW_32X32)
    vp9_write_bit(w, cm->tx_mode == TX_MODE_SELECT);
846
847

  // Probabilities
848
  if (cm->tx_mode == TX_MODE_SELECT) {
849
    int i, j;
850
851
852
    unsigned int ct_8x8p[TX_SIZES - 3][2];
    unsigned int ct_16x16p[TX_SIZES - 2][2];
    unsigned int ct_32x32p[TX_SIZES - 1][2];
853
854


855
    for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
856
      tx_counts_to_branch_counts_8x8(cm->counts.tx.p8x8[