vp9_bitstream.c 44.4 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
17
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"

18
#include "vp9/common/vp9_entropymode.h"
19
#include "vp9/common/vp9_entropymv.h"
20
#include "vp9/common/vp9_tile_common.h"
21
22
23
24
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_entropy.h"
#include "vp9/common/vp9_mvref_common.h"
25
26
27
28
29
30
31
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_pragmas.h"

#include "vp9/encoder/vp9_mcomp.h"
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_bitstream.h"
#include "vp9/encoder/vp9_segmentation.h"
32
#include "vp9/encoder/vp9_subexp.h"
33
#include "vp9/encoder/vp9_tokenize.h"
34
35
#include "vp9/encoder/vp9_write_bit_buffer.h"

Paul Wilkins's avatar
Paul Wilkins committed
36

John Koleszar's avatar
John Koleszar committed
37
38
39
40
41
#if defined(SECTIONBITS_OUTPUT)
unsigned __int64 Sectionbits[500];
#endif

#ifdef ENTROPY_STATS
42
43
44
int intra_mode_stats[INTRA_MODES]
                    [INTRA_MODES]
                    [INTRA_MODES];
45
vp9_coeff_stats tree_update_hist[TX_SIZES][PLANE_TYPES];
46

John Koleszar's avatar
John Koleszar committed
47
48
49
extern unsigned int active_section;
#endif

50
51
52
53
54
55
56
57
58
59
60
61
62
63
static struct vp9_token intra_mode_encodings[INTRA_MODES];
static struct vp9_token switchable_interp_encodings[SWITCHABLE_FILTERS];
static struct vp9_token partition_encodings[PARTITION_TYPES];
static struct vp9_token inter_mode_encodings[INTER_MODES];

void vp9_entropy_mode_init() {
  vp9_tokens_from_tree(intra_mode_encodings, vp9_intra_mode_tree);
  vp9_tokens_from_tree(switchable_interp_encodings, vp9_switchable_interp_tree);
  vp9_tokens_from_tree(partition_encodings, vp9_partition_tree);
  vp9_tokens_from_tree(inter_mode_encodings, vp9_inter_mode_tree);
}

static void write_intra_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
64
  vp9_write_token(w, vp9_intra_mode_tree, probs, &intra_mode_encodings[mode]);
65
66
67
68
69
}

static void write_inter_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  assert(is_inter_mode(mode));
70
71
  vp9_write_token(w, vp9_inter_mode_tree, probs,
                  &inter_mode_encodings[INTER_OFFSET(mode)]);
72
73
}

74
75
76
77
78
static INLINE void write_be32(uint8_t *p, int value) {
  p[0] = value >> 24;
  p[1] = value >> 16;
  p[2] = value >> 8;
  p[3] = value;
79
80
}

81
82
83
84
85
void vp9_encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                             int data, int max) {
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

86
87
88
89
90
91
static void prob_diff_update(const vp9_tree_index *tree,
                             vp9_prob probs[/*n - 1*/],
                             const unsigned int counts[/*n - 1*/],
                             int n, vp9_writer *w) {
  int i;
  unsigned int branch_ct[32][2];
92
93

  // Assuming max number of probabilities <= 32
94
  assert(n <= 32);
95

96
  vp9_tree_probs_from_distribution(tree, branch_ct, counts);
97
  for (i = 0; i < n - 1; ++i)
98
    vp9_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
99
100
}

101
102
103
static void write_selected_tx_size(const VP9_COMP *cpi, MODE_INFO *m,
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
104
  const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
105
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
106
107
  const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
                                                 &cpi->common.fc.tx_probs);
108
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
109
  if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
110
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
111
    if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
112
113
114
115
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

116
117
118
static int write_skip_coeff(const VP9_COMP *cpi, int segment_id, MODE_INFO *m,
                            vp9_writer *w) {
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
119
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
120
121
    return 1;
  } else {
122
123
124
    const int skip = m->mbmi.skip_coeff;
    vp9_write(w, skip, vp9_get_skip_prob(&cpi->common, xd));
    return skip;
125
126
127
  }
}

128
void vp9_update_skip_probs(VP9_COMP *cpi, vp9_writer *w) {
129
  VP9_COMMON *cm = &cpi->common;
130
131
  int k;

132
  for (k = 0; k < MBSKIP_CONTEXTS; ++k)
133
    vp9_cond_prob_diff_update(w, &cm->fc.mbskip_probs[k], cm->counts.mbskip[k]);
134
135
}

136
static void update_switchable_interp_probs(VP9_COMP *cpi, vp9_writer *w) {
137
  VP9_COMMON *const cm = &cpi->common;
138
139
140
141
142
  int j;
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
    prob_diff_update(vp9_switchable_interp_tree,
                     cm->fc.switchable_interp_prob[j],
                     cm->counts.switchable_interp[j], SWITCHABLE_FILTERS, w);
143

144
145
#ifdef MODE_STATS
  if (!cpi->dummy_packing)
146
    update_switchable_interp_stats(cm);
147
#endif
148
149
}

150
static void pack_mb_tokens(vp9_writer* const w,
151
152
153
                           TOKENEXTRA **tp,
                           const TOKENEXTRA *const stop) {
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
154

155
  while (p < stop && p->token != EOSB_TOKEN) {
156
    const int t = p->token;
157
158
    const struct vp9_token *const a = &vp9_coef_encodings[t];
    const vp9_extra_bit *const b = &vp9_extra_bits[t];
John Koleszar's avatar
John Koleszar committed
159
160
    int i = 0;
    int v = a->value;
161
    int n = a->len;
162

John Koleszar's avatar
John Koleszar committed
163
164
165
166
167
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
168

169
170
171
172
173
174
175
176
    // TODO(jbb): expanding this can lead to big gains.  It allows
    // much better branch prediction and would enable us to avoid numerous
    // lookups and compares.

    // If we have a token that's in the constrained set, the coefficient tree
    // is split into two treed writes.  The first treed write takes care of the
    // unconstrained nodes.  The second treed write takes care of the
    // constrained nodes.
177
    if (t >= TWO_TOKEN && t < EOB_TOKEN) {
178
179
      int len = UNCONSTRAINED_NODES - p->skip_eob_node;
      int bits = v >> (n - len);
180
181
182
183
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, bits, len, i);
      vp9_write_tree(w, vp9_coef_con_tree,
                     vp9_pareto8_full[p->context_tree[PIVOT_NODE] - 1],
                     v, n - len, 0);
184
    } else {
185
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, v, n, i);
186
    }
John Koleszar's avatar
John Koleszar committed
187

John Koleszar's avatar
John Koleszar committed
188
    if (b->base_val) {
189
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
190

191
      if (l) {
192
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
193
        int v = e >> 1;
194
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
195
        int i = 0;
John Koleszar's avatar
John Koleszar committed
196

John Koleszar's avatar
John Koleszar committed
197
198
        do {
          const int bb = (v >> --n) & 1;
199
          vp9_write(w, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
200
201
202
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
203

204
      vp9_write_bit(w, e & 1);
John Koleszar's avatar
John Koleszar committed
205
    }
John Koleszar's avatar
John Koleszar committed
206
207
208
    ++p;
  }

209
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
210
211
}

212
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
213
                             int segment_id) {
214
  if (seg->enabled && seg->update_map)
215
    vp9_write_tree(w, vp9_segment_tree, seg->tree_probs, segment_id, 3, 0);
John Koleszar's avatar
John Koleszar committed
216
217
}

Paul Wilkins's avatar
Paul Wilkins committed
218
// This function encodes the reference frame
Ronald S. Bultje's avatar
Ronald S. Bultje committed
219
static void encode_ref_frame(VP9_COMP *cpi, vp9_writer *bc) {
220
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
221
222
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
223
  MB_MODE_INFO *mi = &xd->mi_8x8[0]->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
224
  const int segment_id = mi->segment_id;
225
  int seg_ref_active = vp9_segfeature_active(&cm->seg, segment_id,
Ronald S. Bultje's avatar
Ronald S. Bultje committed
226
                                             SEG_LVL_REF_FRAME);
John Koleszar's avatar
John Koleszar committed
227
228
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
229
  if (!seg_ref_active) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
230
231
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
232
    if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
233
      vp9_write(bc, mi->ref_frame[1] > INTRA_FRAME,
234
                vp9_get_reference_mode_prob(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
235
236
    } else {
      assert((mi->ref_frame[1] <= INTRA_FRAME) ==
237
             (cm->reference_mode == SINGLE_REFERENCE));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
238
    }
239

Ronald S. Bultje's avatar
Ronald S. Bultje committed
240
241
    if (mi->ref_frame[1] > INTRA_FRAME) {
      vp9_write(bc, mi->ref_frame[0] == GOLDEN_FRAME,
242
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
243
244
    } else {
      vp9_write(bc, mi->ref_frame[0] != LAST_FRAME,
245
                vp9_get_pred_prob_single_ref_p1(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
246
247
      if (mi->ref_frame[0] != LAST_FRAME)
        vp9_write(bc, mi->ref_frame[0] != GOLDEN_FRAME,
248
                  vp9_get_pred_prob_single_ref_p2(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
249
    }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
250
251
  } else {
    assert(mi->ref_frame[1] <= INTRA_FRAME);
252
    assert(vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) ==
253
           mi->ref_frame[0]);
John Koleszar's avatar
John Koleszar committed
254
  }
Paul Wilkins's avatar
Paul Wilkins committed
255

Jingning Han's avatar
Jingning Han committed
256
257
  // If using the prediction model we have nothing further to do because
  // the reference frame is fully coded by the segment.
Paul Wilkins's avatar
Paul Wilkins committed
258
}
John Koleszar's avatar
John Koleszar committed
259

260
static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) {
261
262
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
263
264
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
265
  struct segmentation *seg = &cm->seg;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
266
  MB_MODE_INFO *const mi = &m->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
267
  const MV_REFERENCE_FRAME rf = mi->ref_frame[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
268
269
270
  const MB_PREDICTION_MODE mode = mi->mode;
  const int segment_id = mi->segment_id;
  int skip_coeff;
271
  const BLOCK_SIZE bsize = mi->sb_type;
272
  const int allow_hp = cm->allow_high_precision_mv;
Adrian Grange's avatar
Adrian Grange committed
273

Ronald S. Bultje's avatar
Ronald S. Bultje committed
274
275
#ifdef ENTROPY_STATS
  active_section = 9;
276
#endif
277

278
279
  if (seg->update_map) {
    if (seg->temporal_update) {
Scott LaVarnway's avatar
Scott LaVarnway committed
280
      const int pred_flag = mi->seg_id_predicted;
281
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
282
283
284
      vp9_write(bc, pred_flag, pred_prob);
      if (!pred_flag)
        write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
285
    } else {
286
      write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
287
288
    }
  }
289

290
  skip_coeff = write_skip_coeff(cpi, segment_id, m, bc);
John Koleszar's avatar
John Koleszar committed
291

292
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
293
    vp9_write(bc, rf != INTRA_FRAME, vp9_get_intra_inter_prob(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
294

295
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
296
      !(rf != INTRA_FRAME &&
297
        (skip_coeff || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
298
    write_selected_tx_size(cpi, m, mi->tx_size, bsize, bc);
299
300
  }

Ronald S. Bultje's avatar
Ronald S. Bultje committed
301
  if (rf == INTRA_FRAME) {
302
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
303
    active_section = 6;
304
#endif
Paul Wilkins's avatar
Paul Wilkins committed
305

306
    if (bsize >= BLOCK_8X8) {
307
      write_intra_mode(bc, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
308
    } else {
309
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
310
311
312
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
Jim Bankoski's avatar
Jim Bankoski committed
313
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
314
          const MB_PREDICTION_MODE bm = m->bmi[idy * 2 + idx].as_mode;
315
          write_intra_mode(bc, bm, cm->fc.y_mode_prob[0]);
316
        }
Jim Bankoski's avatar
Jim Bankoski committed
317
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
318
    }
319
    write_intra_mode(bc, mi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
320
  } else {
321
    vp9_prob *mv_ref_p;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
322
    encode_ref_frame(cpi, bc);
Paul Wilkins's avatar
Paul Wilkins committed
323
    mv_ref_p = cpi->common.fc.inter_mode_probs[mi->mode_context[rf]];
Yaowu Xu's avatar
Yaowu Xu committed
324

John Koleszar's avatar
John Koleszar committed
325
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
326
    active_section = 3;
John Koleszar's avatar
John Koleszar committed
327
328
#endif

329
    // If segment skip is not enabled code the mode.
330
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
331
      if (bsize >= BLOCK_8X8) {
332
        write_inter_mode(bc, mode, mv_ref_p);
333
        ++cm->counts.inter_mode[mi->mode_context[rf]][INTER_OFFSET(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
334
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
335
    }
336

337
    if (cm->mcomp_filter_type == SWITCHABLE) {
338
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
339
340
341
      vp9_write_token(bc, vp9_switchable_interp_tree,
                      cm->fc.switchable_interp_prob[ctx],
                      &switchable_interp_encodings[mi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
342
    } else {
343
      assert(mi->interp_filter == cm->mcomp_filter_type);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
344
    }
345

346
    if (bsize < BLOCK_8X8) {
Jim Bankoski's avatar
Jim Bankoski committed
347
348
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
349
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
350
351
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
352
          const int j = idy * 2 + idx;
353
          const MB_PREDICTION_MODE blockmode = m->bmi[j].as_mode;
354
          write_inter_mode(bc, blockmode, mv_ref_p);
355
          ++cm->counts.inter_mode[mi->mode_context[rf]]
356
                                 [INTER_OFFSET(blockmode)];
357

Ronald S. Bultje's avatar
Ronald S. Bultje committed
358
          if (blockmode == NEWMV) {
359
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
360
            active_section = 11;
361
#endif
362
363
364
365
366
367
            vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[0].as_mv,
                          &mi->best_mv[0].as_mv, nmvc, allow_hp);

            if (has_second_ref(mi))
              vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[1].as_mv,
                            &mi->best_mv[1].as_mv, nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
368
          }
369
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
370
371
372
373
374
      }
    } else if (mode == NEWMV) {
#ifdef ENTROPY_STATS
      active_section = 5;
#endif
375
376
      vp9_encode_mv(cpi, bc, &mi->mv[0].as_mv,
                    &mi->best_mv[0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
377

378
379
380
      if (has_second_ref(mi))
        vp9_encode_mv(cpi, bc, &mi->mv[1].as_mv,
                      &mi->best_mv[1].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
381
382
    }
  }
John Koleszar's avatar
John Koleszar committed
383
}
384

385
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
386
                              vp9_writer *bc) {
387
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
388
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
389
  const struct segmentation *const seg = &cm->seg;
390
  MODE_INFO *m = mi_8x8[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
391
392
  const int ym = m->mbmi.mode;
  const int segment_id = m->mbmi.segment_id;
393
  MODE_INFO *above_mi = mi_8x8[-xd->mode_info_stride];
394
  MODE_INFO *left_mi = xd->left_available ? mi_8x8[-1] : NULL;
395

396
397
  if (seg->update_map)
    write_segment_id(bc, seg, m->mbmi.segment_id);
398

399
  write_skip_coeff(cpi, segment_id, m, bc);
400

401
  if (m->mbmi.sb_type >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
402
    write_selected_tx_size(cpi, m, m->mbmi.tx_size, m->mbmi.sb_type, bc);
403

404
  if (m->mbmi.sb_type >= BLOCK_8X8) {
405
    const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, 0);
406
    const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, 0);
407
    write_intra_mode(bc, ym, vp9_kf_y_mode_prob[A][L]);
408
  } else {
409
    int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
410
411
    const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[m->mbmi.sb_type];
    const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[m->mbmi.sb_type];
Jim Bankoski's avatar
Jim Bankoski committed
412
413
    for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
      for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
414
415
        int i = idy * 2 + idx;
        const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, i);
416
        const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, i);
417
        const int bm = m->bmi[i].as_mode;
418
419
420
#ifdef ENTROPY_STATS
        ++intra_mode_stats[A][L][bm];
#endif
421
        write_intra_mode(bc, bm, vp9_kf_y_mode_prob[A][L]);
422
423
      }
    }
424
425
  }

426
  write_intra_mode(bc, m->mbmi.uv_mode, vp9_kf_uv_mode_prob[ym]);
427
428
}

James Zern's avatar
James Zern committed
429
static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile,
430
431
                          vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                          int mi_row, int mi_col) {
432
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
433
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
434
  MODE_INFO *m;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
435

436
437
  xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col);
  m = xd->mi_8x8[0];
438

James Zern's avatar
James Zern committed
439
  set_mi_row_col(xd, tile,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
440
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
James Zern's avatar
James Zern committed
441
442
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type],
                 cm->mi_rows, cm->mi_cols);
443
  if (frame_is_intra_only(cm)) {
444
    write_mb_modes_kf(cpi, xd->mi_8x8, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
445
446
447
448
#ifdef ENTROPY_STATS
    active_section = 8;
#endif
  } else {
449
    pack_inter_mode_mvs(cpi, m, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
450
451
452
453
454
455
#ifdef ENTROPY_STATS
    active_section = 1;
#endif
  }

  assert(*tok < tok_end);
456
  pack_mb_tokens(w, tok, tok_end);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
457
458
}

459
460
461
462
463
464
465
466
467
static void write_partition(VP9_COMP *cpi, int hbs, int mi_row, int mi_col,
                            PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
  VP9_COMMON *const cm = &cpi->common;
  const int ctx = partition_plane_context(cpi->above_seg_context,
                                          cpi->left_seg_context,
                                          mi_row, mi_col, bsize);
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
468
469

  if (has_rows && has_cols) {
470
    vp9_write_token(w, vp9_partition_tree, probs, &partition_encodings[p]);
471
  } else if (!has_rows && has_cols) {
472
473
    assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
    vp9_write(w, p == PARTITION_SPLIT, probs[1]);
474
  } else if (has_rows && !has_cols) {
475
476
    assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
    vp9_write(w, p == PARTITION_SPLIT, probs[2]);
477
  } else {
478
    assert(p == PARTITION_SPLIT);
479
480
481
  }
}

James Zern's avatar
James Zern committed
482
static void write_modes_sb(VP9_COMP *cpi, const TileInfo *const tile,
483
484
                           vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                           int mi_row, int mi_col, BLOCK_SIZE bsize) {
485
  VP9_COMMON *const cm = &cpi->common;
486
487
488
  const int bsl = b_width_log2(bsize);
  const int bs = (1 << bsl) / 4;
  PARTITION_TYPE partition;
489
  BLOCK_SIZE subsize;
490
  MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col];
491

492
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
493
494
    return;

Jim Bankoski's avatar
Jim Bankoski committed
495
  partition = partition_lookup[bsl][m->mbmi.sb_type];
496
  write_partition(cpi, bs, mi_row, mi_col, partition, bsize, w);
497
  subsize = get_subsize(bsize, partition);
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
  if (subsize < BLOCK_8X8) {
    write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
  } else {
    switch (partition) {
      case PARTITION_NONE:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        break;
      case PARTITION_HORZ:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_row + bs < cm->mi_rows)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col);
        break;
      case PARTITION_VERT:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_col + bs < cm->mi_cols)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs);
        break;
      case PARTITION_SPLIT:
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs,
                       subsize);
        break;
      default:
        assert(0);
    }
527
  }
528
529

  // update partition context
530
  if (bsize >= BLOCK_8X8 &&
531
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
532
    update_partition_context(cpi->above_seg_context, cpi->left_seg_context,
533
                             mi_row, mi_col, subsize, bsize);
534
535
}

James Zern's avatar
James Zern committed
536
static void write_modes(VP9_COMP *cpi, const TileInfo *const tile,
537
                        vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
538
  int mi_row, mi_col;
539

James Zern's avatar
James Zern committed
540
  for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
541
542
       mi_row += MI_BLOCK_SIZE) {
      vp9_zero(cpi->left_seg_context);
James Zern's avatar
James Zern committed
543
    for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
544
545
         mi_col += MI_BLOCK_SIZE)
      write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, BLOCK_64X64);
John Koleszar's avatar
John Koleszar committed
546
  }
John Koleszar's avatar
John Koleszar committed
547
}
548

549
550
551
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
552
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
553
554
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
555
  int i, j, k, l, m;
556

557
  for (i = 0; i < PLANE_TYPES; ++i) {
558
559
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
560
        for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
561
          vp9_tree_probs_from_distribution(vp9_coef_tree,
562
                                           coef_branch_ct[i][j][k][l],
563
                                           coef_counts[i][j][k][l]);
564
565
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
566
567
568
569
          for (m = 0; m < UNCONSTRAINED_NODES; ++m)
            coef_probs[i][j][k][l][m] = get_binary_prob(
                                            coef_branch_ct[i][j][k][l][m][0],
                                            coef_branch_ct[i][j][k][l][m][1]);
570
#ifdef ENTROPY_STATS
571
          if (!cpi->dummy_packing) {
572
            int t;
573
            for (t = 0; t < ENTROPY_TOKENS; ++t)
574
              context_counters[tx_size][i][j][k][l][t] +=
575
                  coef_counts[i][j][k][l][t];
576
            context_counters[tx_size][i][j][k][l][ENTROPY_TOKENS] +=
577
578
                eob_branch_ct[i][j][k][l];
          }
John Koleszar's avatar
John Koleszar committed
579
#endif
580
        }
Daniel Kang's avatar
Daniel Kang committed
581
582
583
      }
    }
  }
584
585
}

586
587
588
589
590
591
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
592
  const vp9_prob upd = DIFF_UPDATE_PROB;
593
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
594
595
596
597
598
599
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
600
      for (i = 0; i < PLANE_TYPES; ++i) {
601
602
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
603
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
628

629
630
631
632
633
634
635
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
636
      for (i = 0; i < PLANE_TYPES; ++i) {
637
638
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
639
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
640
641
642
643
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
644
                const vp9_prob upd = DIFF_UPDATE_PROB;
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
669
          }
Daniel Kang's avatar
Daniel Kang committed
670
671
        }
      }
672
      return;
Daniel Kang's avatar
Daniel Kang committed
673
    }
John Koleszar's avatar
John Koleszar committed
674

675
676
677
    case 1:
    case 2: {
      const int prev_coef_contexts_to_update =
678
679
          cpi->sf.use_fast_coef_updates == 2 ? COEFF_CONTEXTS >> 1
                                             : COEFF_CONTEXTS;
680
      const int coef_band_to_update =
681
682
          cpi->sf.use_fast_coef_updates == 2 ? COEF_BANDS >> 1
                                             : COEF_BANDS;
683
684
      int updates = 0;
      int noupdates_before_first = 0;
685
      for (i = 0; i < PLANE_TYPES; ++i) {
686
687
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
688
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
                int s;
                int u = 0;
                if (l >= prev_coef_contexts_to_update ||
                    k >= coef_band_to_update) {
                  u = 0;
                } else {
                  if (t == PIVOT_NODE)
                    s = vp9_prob_diff_update_savings_search_model(
                        frame_branch_ct[i][j][k][l][0],
                        old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                  else
                    s = vp9_prob_diff_update_savings_search(
                        frame_branch_ct[i][j][k][l][t],
                        *oldp, &newp, upd);
                  if (s > 0 && newp != *oldp)
                    u = 1;
                }
                updates += u;
                if (u == 0 && updates == 0) {
                  noupdates_before_first++;
713
#ifdef ENTROPY_STATS
714
715
                  if (!cpi->dummy_packing)
                    ++tree_update_hist[tx_size][i][j][k][l][t][u];
716
#endif
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
                  continue;
                }
                if (u == 1 && updates == 1) {
                  int v;
                  // first update
                  vp9_write_bit(bc, 1);
                  for (v = 0; v < noupdates_before_first; ++v)
                    vp9_write(bc, 0, upd);
                }
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
John Koleszar's avatar
John Koleszar committed
737
            }
Daniel Kang's avatar
Daniel Kang committed
738
739
740
          }
        }
      }
741
742
743
744
      if (updates == 0) {
        vp9_write_bit(bc, 0);  // no updates
      }
      return;
Daniel Kang's avatar
Daniel Kang committed
745
    }
746
747
748

    default:
      assert(0);
John Koleszar's avatar
John Koleszar committed
749
  }
750
}
John Koleszar's avatar
John Koleszar committed
751

752
static void update_coef_probs(VP9_COMP* cpi, vp9_writer* w) {
753
  const TX_MODE tx_mode = cpi->common.tx_mode;
754
755
  const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
  TX_SIZE tx_size;
756
  vp9_clear_system_state();
757

758
759
  for (tx_size = TX_4X4; tx_size <= TX_32X32; ++tx_size)
    build_tree_distribution(cpi, tx_size);
760

761
762
  for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
    update_coef_probs_common(w, cpi, tx_size);
John Koleszar's avatar
John Koleszar committed
763
}
764

765
static void encode_loopfilter(struct loopfilter *lf,
766
                              struct vp9_write_bit_buffer *wb) {
767
768
  int i;

769
  // Encode the loop filter level and type
770
771
  vp9_wb_write_literal(wb, lf->filter_level, 6);
  vp9_wb_write_literal(wb, lf->sharpness_level, 3);
772

773
774
  // Write out loop filter deltas applied at the MB level based on mode or
  // ref frame (if they are enabled).
775
  vp9_wb_write_bit(wb, lf->mode_ref_delta_enabled);
776

777
778
779
  if (lf->mode_ref_delta_enabled) {
    vp9_wb_write_bit(wb, lf->mode_ref_delta_update);
    if (lf->mode_ref_delta_update) {
780
      for (i = 0; i < MAX_REF_LF_DELTAS; i++) {
781
        const int delta = lf->ref_deltas[i];
782
783
784
        const int changed = delta != lf->last_ref_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
785
          lf->last_ref_deltas[i] = delta;
786
787
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
788
789
790
791
        }
      }

      for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
792
        const int delta = lf->mode_deltas[i];
793
794
795
        const int changed = delta != lf->last_mode_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
796
          lf->last_mode_deltas[i] = delta;
797
798
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
799
800
801
802
803
804
        }
      }
    }
  }
}

805
static void write_delta_q(struct vp9_write_bit_buffer *wb, int delta_q) {
806
  if (delta_q != 0) {
807
808
809
    vp9_wb_write_bit(wb, 1);
    vp9_wb_write_literal(wb, abs(delta_q), 4);
    vp9_wb_write_bit(wb, delta_q < 0);
810
  } else {
811
    vp9_wb_write_bit(wb, 0);
812
813
814
  }
}

815
816
817
818
819
820
static void encode_quantization(VP9_COMMON *cm,
                                struct vp9_write_bit_buffer *wb) {
  vp9_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
  write_delta_q(wb, cm->y_dc_delta_q);
  write_delta_q(wb, cm->uv_dc_delta_q);
  write_delta_q(wb, cm->uv_ac_delta_q);
821
822
823
}


824
static void encode_segmentation(VP9_COMP *cpi,
825
                                struct vp9_write_bit_buffer *wb) {
John Koleszar's avatar
John Koleszar committed
826
  int i, j;
827

828
  struct segmentation *seg = &cpi->common.seg;
829
830
831

  vp9_wb_write_bit(wb, seg->enabled);
  if (!seg->enabled)
832
833
834
    return;

  // Segmentation map
835
836
  vp9_wb_write_bit(wb, seg->update_map);
  if (seg->update_map) {
837
838
839
    // Select the coding strategy (temporal or spatial)
    vp9_choose_segmap_coding_method(cpi);
    // Write out probabilities used to decode unpredicted  macro-block segments
Paul Wilkins's avatar
Paul Wilkins committed
840
    for (i = 0; i < SEG_TREE_PROBS; i++) {
841
      const int prob = seg->tree_probs[i];
842
843
844
845
      const int update = prob != MAX_PROB;
      vp9_wb_write_bit(wb, update);
      if (update)
        vp9_wb_write_literal(wb, prob, 8);
846
847
848
    }

    // Write out the chosen coding method.
849
850
    vp9_wb_write_bit(wb, seg->temporal_update);
    if (seg->temporal_update) {
851
      for (i = 0; i < PREDICTION_PROBS; i++) {
852
        const int prob = seg->pred_probs[i];
853
854
855
856
        const int update = prob != MAX_PROB;
        vp9_wb_write_bit(wb, update);
        if (update)
          vp9_wb_write_literal(wb, prob, 8);
857
858
859
860
861
      }
    }
  }

  // Segmentation data