vp9_bitstream.c 44.6 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
17
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"

18
#include "vp9/common/vp9_entropymode.h"
19
#include "vp9/common/vp9_entropymv.h"
20
#include "vp9/common/vp9_findnearmv.h"
21
#include "vp9/common/vp9_tile_common.h"
22
23
24
25
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_entropy.h"
#include "vp9/common/vp9_mvref_common.h"
26
#include "vp9/common/vp9_treecoder.h"
27
28
29
30
31
32
33
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_pragmas.h"

#include "vp9/encoder/vp9_mcomp.h"
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_bitstream.h"
#include "vp9/encoder/vp9_segmentation.h"
34
#include "vp9/encoder/vp9_subexp.h"
35
#include "vp9/encoder/vp9_tokenize.h"
36
37
#include "vp9/encoder/vp9_write_bit_buffer.h"

Paul Wilkins's avatar
Paul Wilkins committed
38

John Koleszar's avatar
John Koleszar committed
39
40
41
42
43
#if defined(SECTIONBITS_OUTPUT)
unsigned __int64 Sectionbits[500];
#endif

#ifdef ENTROPY_STATS
44
45
46
int intra_mode_stats[INTRA_MODES]
                    [INTRA_MODES]
                    [INTRA_MODES];
47
vp9_coeff_stats tree_update_hist[TX_SIZES][PLANE_TYPES];
48

John Koleszar's avatar
John Koleszar committed
49
50
51
extern unsigned int active_section;
#endif

52
53
54
55
56
57
58
59
60
61
62
63
64
65
static struct vp9_token intra_mode_encodings[INTRA_MODES];
static struct vp9_token switchable_interp_encodings[SWITCHABLE_FILTERS];
static struct vp9_token partition_encodings[PARTITION_TYPES];
static struct vp9_token inter_mode_encodings[INTER_MODES];

void vp9_entropy_mode_init() {
  vp9_tokens_from_tree(intra_mode_encodings, vp9_intra_mode_tree);
  vp9_tokens_from_tree(switchable_interp_encodings, vp9_switchable_interp_tree);
  vp9_tokens_from_tree(partition_encodings, vp9_partition_tree);
  vp9_tokens_from_tree(inter_mode_encodings, vp9_inter_mode_tree);
}

static void write_intra_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
66
  vp9_write_token(w, vp9_intra_mode_tree, probs, &intra_mode_encodings[mode]);
67
68
69
70
71
}

static void write_inter_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  assert(is_inter_mode(mode));
72
73
  vp9_write_token(w, vp9_inter_mode_tree, probs,
                  &inter_mode_encodings[INTER_OFFSET(mode)]);
74
75
}

76
77
78
79
80
static INLINE void write_be32(uint8_t *p, int value) {
  p[0] = value >> 24;
  p[1] = value >> 16;
  p[2] = value >> 8;
  p[3] = value;
81
82
}

83
84
85
86
87
void vp9_encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                             int data, int max) {
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

88
89
90
91
92
93
static void prob_diff_update(const vp9_tree_index *tree,
                             vp9_prob probs[/*n - 1*/],
                             const unsigned int counts[/*n - 1*/],
                             int n, vp9_writer *w) {
  int i;
  unsigned int branch_ct[32][2];
94
95

  // Assuming max number of probabilities <= 32
96
  assert(n <= 32);
97

98
  vp9_tree_probs_from_distribution(tree, branch_ct, counts);
99
  for (i = 0; i < n - 1; ++i)
100
    vp9_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
101
102
}

103
104
105
static void write_selected_tx_size(const VP9_COMP *cpi, MODE_INFO *m,
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
106
  const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
107
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
108
109
  const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
                                                 &cpi->common.fc.tx_probs);
110
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
111
  if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
112
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
113
    if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
114
115
116
117
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

118
119
120
static int write_skip_coeff(const VP9_COMP *cpi, int segment_id, MODE_INFO *m,
                            vp9_writer *w) {
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
121
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
122
123
    return 1;
  } else {
124
125
126
    const int skip = m->mbmi.skip_coeff;
    vp9_write(w, skip, vp9_get_skip_prob(&cpi->common, xd));
    return skip;
127
128
129
  }
}

130
void vp9_update_skip_probs(VP9_COMP *cpi, vp9_writer *w) {
131
  VP9_COMMON *cm = &cpi->common;
132
133
  int k;

134
  for (k = 0; k < MBSKIP_CONTEXTS; ++k)
135
    vp9_cond_prob_diff_update(w, &cm->fc.mbskip_probs[k], cm->counts.mbskip[k]);
136
137
}

138
static void update_switchable_interp_probs(VP9_COMP *cpi, vp9_writer *w) {
139
  VP9_COMMON *const cm = &cpi->common;
140
141
142
143
144
  int j;
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
    prob_diff_update(vp9_switchable_interp_tree,
                     cm->fc.switchable_interp_prob[j],
                     cm->counts.switchable_interp[j], SWITCHABLE_FILTERS, w);
145

146
147
#ifdef MODE_STATS
  if (!cpi->dummy_packing)
148
    update_switchable_interp_stats(cm);
149
#endif
150
151
}

152
static void pack_mb_tokens(vp9_writer* const w,
153
154
155
                           TOKENEXTRA **tp,
                           const TOKENEXTRA *const stop) {
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
156

157
  while (p < stop && p->token != EOSB_TOKEN) {
158
    const int t = p->token;
159
160
    const struct vp9_token *const a = &vp9_coef_encodings[t];
    const vp9_extra_bit *const b = &vp9_extra_bits[t];
John Koleszar's avatar
John Koleszar committed
161
162
    int i = 0;
    int v = a->value;
163
    int n = a->len;
164

John Koleszar's avatar
John Koleszar committed
165
166
167
168
169
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
170

171
172
173
174
175
176
177
178
    // TODO(jbb): expanding this can lead to big gains.  It allows
    // much better branch prediction and would enable us to avoid numerous
    // lookups and compares.

    // If we have a token that's in the constrained set, the coefficient tree
    // is split into two treed writes.  The first treed write takes care of the
    // unconstrained nodes.  The second treed write takes care of the
    // constrained nodes.
179
    if (t >= TWO_TOKEN && t < EOB_TOKEN) {
180
181
      int len = UNCONSTRAINED_NODES - p->skip_eob_node;
      int bits = v >> (n - len);
182
183
184
185
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, bits, len, i);
      vp9_write_tree(w, vp9_coef_con_tree,
                     vp9_pareto8_full[p->context_tree[PIVOT_NODE] - 1],
                     v, n - len, 0);
186
    } else {
187
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, v, n, i);
188
    }
John Koleszar's avatar
John Koleszar committed
189

John Koleszar's avatar
John Koleszar committed
190
    if (b->base_val) {
191
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
192

193
      if (l) {
194
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
195
        int v = e >> 1;
196
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
197
        int i = 0;
John Koleszar's avatar
John Koleszar committed
198

John Koleszar's avatar
John Koleszar committed
199
200
        do {
          const int bb = (v >> --n) & 1;
201
          vp9_write(w, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
202
203
204
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
205

206
      vp9_write_bit(w, e & 1);
John Koleszar's avatar
John Koleszar committed
207
    }
John Koleszar's avatar
John Koleszar committed
208
209
210
    ++p;
  }

211
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
212
213
}

214
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
215
                             int segment_id) {
216
  if (seg->enabled && seg->update_map)
217
    vp9_write_tree(w, vp9_segment_tree, seg->tree_probs, segment_id, 3, 0);
John Koleszar's avatar
John Koleszar committed
218
219
}

Paul Wilkins's avatar
Paul Wilkins committed
220
// This function encodes the reference frame
Ronald S. Bultje's avatar
Ronald S. Bultje committed
221
static void encode_ref_frame(VP9_COMP *cpi, vp9_writer *bc) {
222
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
223
224
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
225
  MB_MODE_INFO *mi = &xd->mi_8x8[0]->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
226
  const int segment_id = mi->segment_id;
227
  int seg_ref_active = vp9_segfeature_active(&cm->seg, segment_id,
Ronald S. Bultje's avatar
Ronald S. Bultje committed
228
                                             SEG_LVL_REF_FRAME);
John Koleszar's avatar
John Koleszar committed
229
230
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
231
  if (!seg_ref_active) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
232
233
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
234
    if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
235
      vp9_write(bc, mi->ref_frame[1] > INTRA_FRAME,
236
                vp9_get_reference_mode_prob(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
237
238
    } else {
      assert((mi->ref_frame[1] <= INTRA_FRAME) ==
239
             (cm->reference_mode == SINGLE_REFERENCE));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
240
    }
241

Ronald S. Bultje's avatar
Ronald S. Bultje committed
242
243
    if (mi->ref_frame[1] > INTRA_FRAME) {
      vp9_write(bc, mi->ref_frame[0] == GOLDEN_FRAME,
244
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
245
246
    } else {
      vp9_write(bc, mi->ref_frame[0] != LAST_FRAME,
247
                vp9_get_pred_prob_single_ref_p1(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
248
249
      if (mi->ref_frame[0] != LAST_FRAME)
        vp9_write(bc, mi->ref_frame[0] != GOLDEN_FRAME,
250
                  vp9_get_pred_prob_single_ref_p2(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
251
    }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
252
253
  } else {
    assert(mi->ref_frame[1] <= INTRA_FRAME);
254
    assert(vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) ==
255
           mi->ref_frame[0]);
John Koleszar's avatar
John Koleszar committed
256
  }
Paul Wilkins's avatar
Paul Wilkins committed
257

Jingning Han's avatar
Jingning Han committed
258
259
  // If using the prediction model we have nothing further to do because
  // the reference frame is fully coded by the segment.
Paul Wilkins's avatar
Paul Wilkins committed
260
}
John Koleszar's avatar
John Koleszar committed
261

262
static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) {
263
264
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
265
266
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
267
  struct segmentation *seg = &cm->seg;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
268
  MB_MODE_INFO *const mi = &m->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
269
  const MV_REFERENCE_FRAME rf = mi->ref_frame[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
270
271
272
  const MB_PREDICTION_MODE mode = mi->mode;
  const int segment_id = mi->segment_id;
  int skip_coeff;
273
  const BLOCK_SIZE bsize = mi->sb_type;
274
  const int allow_hp = cm->allow_high_precision_mv;
Adrian Grange's avatar
Adrian Grange committed
275

Ronald S. Bultje's avatar
Ronald S. Bultje committed
276
277
#ifdef ENTROPY_STATS
  active_section = 9;
278
#endif
279

280
281
  if (seg->update_map) {
    if (seg->temporal_update) {
Scott LaVarnway's avatar
Scott LaVarnway committed
282
      const int pred_flag = mi->seg_id_predicted;
283
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
284
285
286
      vp9_write(bc, pred_flag, pred_prob);
      if (!pred_flag)
        write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
287
    } else {
288
      write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
289
290
    }
  }
291

292
  skip_coeff = write_skip_coeff(cpi, segment_id, m, bc);
John Koleszar's avatar
John Koleszar committed
293

294
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
295
    vp9_write(bc, rf != INTRA_FRAME, vp9_get_intra_inter_prob(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
296

297
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
298
      !(rf != INTRA_FRAME &&
299
        (skip_coeff || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
300
    write_selected_tx_size(cpi, m, mi->tx_size, bsize, bc);
301
302
  }

Ronald S. Bultje's avatar
Ronald S. Bultje committed
303
  if (rf == INTRA_FRAME) {
304
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
305
    active_section = 6;
306
#endif
Paul Wilkins's avatar
Paul Wilkins committed
307

308
    if (bsize >= BLOCK_8X8) {
309
      write_intra_mode(bc, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
310
    } else {
311
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
312
313
314
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
Jim Bankoski's avatar
Jim Bankoski committed
315
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
316
          const MB_PREDICTION_MODE bm = m->bmi[idy * 2 + idx].as_mode;
317
          write_intra_mode(bc, bm, cm->fc.y_mode_prob[0]);
318
        }
Jim Bankoski's avatar
Jim Bankoski committed
319
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
320
    }
321
    write_intra_mode(bc, mi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
322
  } else {
323
    vp9_prob *mv_ref_p;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
324
    encode_ref_frame(cpi, bc);
Paul Wilkins's avatar
Paul Wilkins committed
325
    mv_ref_p = cpi->common.fc.inter_mode_probs[mi->mode_context[rf]];
Yaowu Xu's avatar
Yaowu Xu committed
326

John Koleszar's avatar
John Koleszar committed
327
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
328
    active_section = 3;
John Koleszar's avatar
John Koleszar committed
329
330
#endif

331
    // If segment skip is not enabled code the mode.
332
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
333
      if (bsize >= BLOCK_8X8) {
334
        write_inter_mode(bc, mode, mv_ref_p);
335
        ++cm->counts.inter_mode[mi->mode_context[rf]][INTER_OFFSET(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
336
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
337
    }
338

339
    if (cm->mcomp_filter_type == SWITCHABLE) {
340
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
341
342
343
      vp9_write_token(bc, vp9_switchable_interp_tree,
                      cm->fc.switchable_interp_prob[ctx],
                      &switchable_interp_encodings[mi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
344
    } else {
345
      assert(mi->interp_filter == cm->mcomp_filter_type);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
346
    }
347

348
    if (bsize < BLOCK_8X8) {
Jim Bankoski's avatar
Jim Bankoski committed
349
350
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
351
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
352
353
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
354
          const int j = idy * 2 + idx;
355
          const MB_PREDICTION_MODE blockmode = m->bmi[j].as_mode;
356
          write_inter_mode(bc, blockmode, mv_ref_p);
357
          ++cm->counts.inter_mode[mi->mode_context[rf]]
358
                                 [INTER_OFFSET(blockmode)];
359

Ronald S. Bultje's avatar
Ronald S. Bultje committed
360
          if (blockmode == NEWMV) {
361
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
362
            active_section = 11;
363
#endif
364
365
366
367
368
369
            vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[0].as_mv,
                          &mi->best_mv[0].as_mv, nmvc, allow_hp);

            if (has_second_ref(mi))
              vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[1].as_mv,
                            &mi->best_mv[1].as_mv, nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
370
          }
371
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
372
373
374
375
376
      }
    } else if (mode == NEWMV) {
#ifdef ENTROPY_STATS
      active_section = 5;
#endif
377
378
      vp9_encode_mv(cpi, bc, &mi->mv[0].as_mv,
                    &mi->best_mv[0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
379

380
381
382
      if (has_second_ref(mi))
        vp9_encode_mv(cpi, bc, &mi->mv[1].as_mv,
                      &mi->best_mv[1].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
383
384
    }
  }
John Koleszar's avatar
John Koleszar committed
385
}
386

387
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
388
                              vp9_writer *bc) {
389
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
390
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
391
  const struct segmentation *const seg = &cm->seg;
392
  MODE_INFO *m = mi_8x8[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
393
394
  const int ym = m->mbmi.mode;
  const int segment_id = m->mbmi.segment_id;
395
  MODE_INFO *above_mi = mi_8x8[-xd->mode_info_stride];
396
  MODE_INFO *left_mi = xd->left_available ? mi_8x8[-1] : NULL;
397

398
399
  if (seg->update_map)
    write_segment_id(bc, seg, m->mbmi.segment_id);
400

401
  write_skip_coeff(cpi, segment_id, m, bc);
402

403
  if (m->mbmi.sb_type >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
404
    write_selected_tx_size(cpi, m, m->mbmi.tx_size, m->mbmi.sb_type, bc);
405

406
  if (m->mbmi.sb_type >= BLOCK_8X8) {
407
    const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, 0);
408
    const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, 0);
409
    write_intra_mode(bc, ym, vp9_kf_y_mode_prob[A][L]);
410
  } else {
411
    int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
412
413
    const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[m->mbmi.sb_type];
    const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[m->mbmi.sb_type];
Jim Bankoski's avatar
Jim Bankoski committed
414
415
    for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
      for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
416
417
        int i = idy * 2 + idx;
        const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, i);
418
        const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, i);
419
        const int bm = m->bmi[i].as_mode;
420
421
422
#ifdef ENTROPY_STATS
        ++intra_mode_stats[A][L][bm];
#endif
423
        write_intra_mode(bc, bm, vp9_kf_y_mode_prob[A][L]);
424
425
      }
    }
426
427
  }

428
  write_intra_mode(bc, m->mbmi.uv_mode, vp9_kf_uv_mode_prob[ym]);
429
430
}

James Zern's avatar
James Zern committed
431
static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile,
432
433
                          vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                          int mi_row, int mi_col) {
434
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
435
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
436
  MODE_INFO *m;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
437

438
439
  xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col);
  m = xd->mi_8x8[0];
440

James Zern's avatar
James Zern committed
441
  set_mi_row_col(xd, tile,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
442
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
James Zern's avatar
James Zern committed
443
444
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type],
                 cm->mi_rows, cm->mi_cols);
445
  if (frame_is_intra_only(cm)) {
446
    write_mb_modes_kf(cpi, xd->mi_8x8, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
447
448
449
450
#ifdef ENTROPY_STATS
    active_section = 8;
#endif
  } else {
451
    pack_inter_mode_mvs(cpi, m, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
452
453
454
455
456
457
#ifdef ENTROPY_STATS
    active_section = 1;
#endif
  }

  assert(*tok < tok_end);
458
  pack_mb_tokens(w, tok, tok_end);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
459
460
}

461
462
463
464
465
466
467
468
469
static void write_partition(VP9_COMP *cpi, int hbs, int mi_row, int mi_col,
                            PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
  VP9_COMMON *const cm = &cpi->common;
  const int ctx = partition_plane_context(cpi->above_seg_context,
                                          cpi->left_seg_context,
                                          mi_row, mi_col, bsize);
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
470
471

  if (has_rows && has_cols) {
472
    vp9_write_token(w, vp9_partition_tree, probs, &partition_encodings[p]);
473
  } else if (!has_rows && has_cols) {
474
475
    assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
    vp9_write(w, p == PARTITION_SPLIT, probs[1]);
476
  } else if (has_rows && !has_cols) {
477
478
    assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
    vp9_write(w, p == PARTITION_SPLIT, probs[2]);
479
  } else {
480
    assert(p == PARTITION_SPLIT);
481
482
483
  }
}

James Zern's avatar
James Zern committed
484
static void write_modes_sb(VP9_COMP *cpi, const TileInfo *const tile,
485
486
                           vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                           int mi_row, int mi_col, BLOCK_SIZE bsize) {
487
  VP9_COMMON *const cm = &cpi->common;
488
489
490
  const int bsl = b_width_log2(bsize);
  const int bs = (1 << bsl) / 4;
  PARTITION_TYPE partition;
491
  BLOCK_SIZE subsize;
492
  MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col];
493

494
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
495
496
    return;

Jim Bankoski's avatar
Jim Bankoski committed
497
  partition = partition_lookup[bsl][m->mbmi.sb_type];
498
  write_partition(cpi, bs, mi_row, mi_col, partition, bsize, w);
499
  subsize = get_subsize(bsize, partition);
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
  if (subsize < BLOCK_8X8) {
    write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
  } else {
    switch (partition) {
      case PARTITION_NONE:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        break;
      case PARTITION_HORZ:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_row + bs < cm->mi_rows)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col);
        break;
      case PARTITION_VERT:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_col + bs < cm->mi_cols)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs);
        break;
      case PARTITION_SPLIT:
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs,
                       subsize);
        break;
      default:
        assert(0);
    }
529
  }
530
531

  // update partition context
532
  if (bsize >= BLOCK_8X8 &&
533
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
534
    update_partition_context(cpi->above_seg_context, cpi->left_seg_context,
535
                             mi_row, mi_col, subsize, bsize);
536
537
}

James Zern's avatar
James Zern committed
538
static void write_modes(VP9_COMP *cpi, const TileInfo *const tile,
539
                        vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
540
  int mi_row, mi_col;
541

James Zern's avatar
James Zern committed
542
  for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
543
544
       mi_row += MI_BLOCK_SIZE) {
      vp9_zero(cpi->left_seg_context);
James Zern's avatar
James Zern committed
545
    for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
546
547
         mi_col += MI_BLOCK_SIZE)
      write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, BLOCK_64X64);
John Koleszar's avatar
John Koleszar committed
548
  }
John Koleszar's avatar
John Koleszar committed
549
}
550

551
552
553
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
554
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
555
556
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
557
  int i, j, k, l, m;
558

559
  for (i = 0; i < PLANE_TYPES; ++i) {
560
561
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
562
        for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
563
          vp9_tree_probs_from_distribution(vp9_coef_tree,
564
                                           coef_branch_ct[i][j][k][l],
565
                                           coef_counts[i][j][k][l]);
566
567
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
568
569
570
571
          for (m = 0; m < UNCONSTRAINED_NODES; ++m)
            coef_probs[i][j][k][l][m] = get_binary_prob(
                                            coef_branch_ct[i][j][k][l][m][0],
                                            coef_branch_ct[i][j][k][l][m][1]);
572
#ifdef ENTROPY_STATS
573
          if (!cpi->dummy_packing) {
574
            int t;
575
            for (t = 0; t < ENTROPY_TOKENS; ++t)
576
              context_counters[tx_size][i][j][k][l][t] +=
577
                  coef_counts[i][j][k][l][t];
578
            context_counters[tx_size][i][j][k][l][ENTROPY_TOKENS] +=
579
580
                eob_branch_ct[i][j][k][l];
          }
John Koleszar's avatar
John Koleszar committed
581
#endif
582
        }
Daniel Kang's avatar
Daniel Kang committed
583
584
585
      }
    }
  }
586
587
}

588
589
590
591
592
593
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
594
  const vp9_prob upd = DIFF_UPDATE_PROB;
595
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
596
597
598
599
600
601
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
602
      for (i = 0; i < PLANE_TYPES; ++i) {
603
604
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
605
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
630

631
632
633
634
635
636
637
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
638
      for (i = 0; i < PLANE_TYPES; ++i) {
639
640
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
641
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
642
643
644
645
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
646
                const vp9_prob upd = DIFF_UPDATE_PROB;
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
671
          }
Daniel Kang's avatar
Daniel Kang committed
672
673
        }
      }
674
      return;
Daniel Kang's avatar
Daniel Kang committed
675
    }
John Koleszar's avatar
John Koleszar committed
676

677
678
679
    case 1:
    case 2: {
      const int prev_coef_contexts_to_update =
680
681
          cpi->sf.use_fast_coef_updates == 2 ? COEFF_CONTEXTS >> 1
                                             : COEFF_CONTEXTS;
682
      const int coef_band_to_update =
683
684
          cpi->sf.use_fast_coef_updates == 2 ? COEF_BANDS >> 1
                                             : COEF_BANDS;
685
686
      int updates = 0;
      int noupdates_before_first = 0;
687
      for (i = 0; i < PLANE_TYPES; ++i) {
688
689
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
690
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
                int s;
                int u = 0;
                if (l >= prev_coef_contexts_to_update ||
                    k >= coef_band_to_update) {
                  u = 0;
                } else {
                  if (t == PIVOT_NODE)
                    s = vp9_prob_diff_update_savings_search_model(
                        frame_branch_ct[i][j][k][l][0],
                        old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                  else
                    s = vp9_prob_diff_update_savings_search(
                        frame_branch_ct[i][j][k][l][t],
                        *oldp, &newp, upd);
                  if (s > 0 && newp != *oldp)
                    u = 1;
                }
                updates += u;
                if (u == 0 && updates == 0) {
                  noupdates_before_first++;
715
#ifdef ENTROPY_STATS
716
717
                  if (!cpi->dummy_packing)
                    ++tree_update_hist[tx_size][i][j][k][l][t][u];
718
#endif
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
                  continue;
                }
                if (u == 1 && updates == 1) {
                  int v;
                  // first update
                  vp9_write_bit(bc, 1);
                  for (v = 0; v < noupdates_before_first; ++v)
                    vp9_write(bc, 0, upd);
                }
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
John Koleszar's avatar
John Koleszar committed
739
            }
Daniel Kang's avatar
Daniel Kang committed
740
741
742
          }
        }
      }
743
744
745
746
      if (updates == 0) {
        vp9_write_bit(bc, 0);  // no updates
      }
      return;
Daniel Kang's avatar
Daniel Kang committed
747
    }
748
749
750

    default:
      assert(0);
John Koleszar's avatar
John Koleszar committed
751
  }
752
}
John Koleszar's avatar
John Koleszar committed
753

754
static void update_coef_probs(VP9_COMP* cpi, vp9_writer* w) {
755
  const TX_MODE tx_mode = cpi->common.tx_mode;
756
757
  const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
  TX_SIZE tx_size;
758
  vp9_clear_system_state();
759

760
761
  for (tx_size = TX_4X4; tx_size <= TX_32X32; ++tx_size)
    build_tree_distribution(cpi, tx_size);
762

763
764
  for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
    update_coef_probs_common(w, cpi, tx_size);
John Koleszar's avatar
John Koleszar committed
765
}
766

767
static void encode_loopfilter(struct loopfilter *lf,
768
                              struct vp9_write_bit_buffer *wb) {
769
770
  int i;

771
  // Encode the loop filter level and type
772
773
  vp9_wb_write_literal(wb, lf->filter_level, 6);
  vp9_wb_write_literal(wb, lf->sharpness_level, 3);
774

775
776
  // Write out loop filter deltas applied at the MB level based on mode or
  // ref frame (if they are enabled).
777
  vp9_wb_write_bit(wb, lf->mode_ref_delta_enabled);
778

779
780
781
  if (lf->mode_ref_delta_enabled) {
    vp9_wb_write_bit(wb, lf->mode_ref_delta_update);
    if (lf->mode_ref_delta_update) {
782
      for (i = 0; i < MAX_REF_LF_DELTAS; i++) {
783
        const int delta = lf->ref_deltas[i];
784
785
786
        const int changed = delta != lf->last_ref_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
787
          lf->last_ref_deltas[i] = delta;
788
789
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
790
791
792
793
        }
      }

      for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
794
        const int delta = lf->mode_deltas[i];
795
796
797
        const int changed = delta != lf->last_mode_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
798
          lf->last_mode_deltas[i] = delta;
799
800
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
801
802
803
804
805
806
        }
      }
    }
  }
}

807
static void write_delta_q(struct vp9_write_bit_buffer *wb, int delta_q) {
808
  if (delta_q != 0) {
809
810
811
    vp9_wb_write_bit(wb, 1);
    vp9_wb_write_literal(wb, abs(delta_q), 4);
    vp9_wb_write_bit(wb, delta_q < 0);
812
  } else {
813
    vp9_wb_write_bit(wb, 0);
814
815
816
  }
}

817
818
819
820
821
822
static void encode_quantization(VP9_COMMON *cm,
                                struct vp9_write_bit_buffer *wb) {
  vp9_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
  write_delta_q(wb, cm->y_dc_delta_q);
  write_delta_q(wb, cm->uv_dc_delta_q);
  write_delta_q(wb, cm->uv_ac_delta_q);
823
824
825
}


826
static void encode_segmentation(VP9_COMP *cpi,
827
                                struct vp9_write_bit_buffer *wb) {
John Koleszar's avatar
John Koleszar committed
828
  int i, j;
829

830
  struct segmentation *seg = &cpi->common.seg;
831
832
833

  vp9_wb_write_bit(wb, seg->enabled);
  if (!seg->enabled)
834
835
836
    return;

  // Segmentation map
837
838
  vp9_wb_write_bit(wb, seg->update_map);
  if (seg->update_map) {
839
840
841
    // Select the coding strategy (temporal or spatial)
    vp9_choose_segmap_coding_method(cpi);
    // Write out probabilities used to decode unpredicted  macro-block segments
Paul Wilkins's avatar
Paul Wilkins committed
842
    for (i = 0; i < SEG_TREE_PROBS; i++) {
843
      const int prob = seg->tree_probs[i];
844
845
846
847
      const int update = prob != MAX_PROB;
      vp9_wb_write_bit(wb, update);
      if (update)
        vp9_wb_write_literal(wb, prob, 8);
848
849
850
    }

    // Write out the chosen coding method.
851
852
    vp9_wb_write_bit(wb, seg->temporal_update);
    if (seg->temporal_update) {
853
      for (i = 0; i < PREDICTION_PROBS; i++) {
854
        const int prob = seg->pred_probs[i];
855
856
857
858
        const int update = prob != MAX_PROB;
        vp9_wb_write_bit(wb, update);
        if (update)
          vp9_wb_write_literal(wb,