vp9_bitstream.c 44 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
17
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"

18
#include "vp9/common/vp9_entropymode.h"
19
#include "vp9/common/vp9_entropymv.h"
20
#include "vp9/common/vp9_tile_common.h"
21
22
23
24
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_entropy.h"
#include "vp9/common/vp9_mvref_common.h"
25
26
27
28
29
30
31
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_pragmas.h"

#include "vp9/encoder/vp9_mcomp.h"
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_bitstream.h"
#include "vp9/encoder/vp9_segmentation.h"
32
#include "vp9/encoder/vp9_subexp.h"
33
#include "vp9/encoder/vp9_tokenize.h"
34
35
#include "vp9/encoder/vp9_write_bit_buffer.h"

Paul Wilkins's avatar
Paul Wilkins committed
36

John Koleszar's avatar
John Koleszar committed
37
38
39
40
41
#if defined(SECTIONBITS_OUTPUT)
unsigned __int64 Sectionbits[500];
#endif

#ifdef ENTROPY_STATS
42
vp9_coeff_stats tree_update_hist[TX_SIZES][PLANE_TYPES];
John Koleszar's avatar
John Koleszar committed
43
44
45
extern unsigned int active_section;
#endif

46
47
48
49
50
51
52
53
54
55
56
57
58
59
static struct vp9_token intra_mode_encodings[INTRA_MODES];
static struct vp9_token switchable_interp_encodings[SWITCHABLE_FILTERS];
static struct vp9_token partition_encodings[PARTITION_TYPES];
static struct vp9_token inter_mode_encodings[INTER_MODES];

void vp9_entropy_mode_init() {
  vp9_tokens_from_tree(intra_mode_encodings, vp9_intra_mode_tree);
  vp9_tokens_from_tree(switchable_interp_encodings, vp9_switchable_interp_tree);
  vp9_tokens_from_tree(partition_encodings, vp9_partition_tree);
  vp9_tokens_from_tree(inter_mode_encodings, vp9_inter_mode_tree);
}

static void write_intra_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
60
  vp9_write_token(w, vp9_intra_mode_tree, probs, &intra_mode_encodings[mode]);
61
62
63
64
65
}

static void write_inter_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  assert(is_inter_mode(mode));
66
67
  vp9_write_token(w, vp9_inter_mode_tree, probs,
                  &inter_mode_encodings[INTER_OFFSET(mode)]);
68
69
}

70
71
72
73
74
static INLINE void write_be32(uint8_t *p, int value) {
  p[0] = value >> 24;
  p[1] = value >> 16;
  p[2] = value >> 8;
  p[3] = value;
75
76
}

77
78
79
80
81
void vp9_encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                             int data, int max) {
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

82
83
84
85
86
87
static void prob_diff_update(const vp9_tree_index *tree,
                             vp9_prob probs[/*n - 1*/],
                             const unsigned int counts[/*n - 1*/],
                             int n, vp9_writer *w) {
  int i;
  unsigned int branch_ct[32][2];
88
89

  // Assuming max number of probabilities <= 32
90
  assert(n <= 32);
91

92
  vp9_tree_probs_from_distribution(tree, branch_ct, counts);
93
  for (i = 0; i < n - 1; ++i)
94
    vp9_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
95
96
}

97
98
99
static void write_selected_tx_size(const VP9_COMP *cpi, MODE_INFO *m,
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
100
  const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
101
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
102
103
  const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
                                                 &cpi->common.fc.tx_probs);
104
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
105
  if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
106
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
107
    if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
108
109
110
111
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

112
113
114
static int write_skip_coeff(const VP9_COMP *cpi, int segment_id, MODE_INFO *m,
                            vp9_writer *w) {
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
115
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
116
117
    return 1;
  } else {
118
119
120
    const int skip = m->mbmi.skip_coeff;
    vp9_write(w, skip, vp9_get_skip_prob(&cpi->common, xd));
    return skip;
121
122
123
  }
}

124
void vp9_update_skip_probs(VP9_COMMON *cm, vp9_writer *w) {
125
126
  int k;

Dmitry Kovalev's avatar
Dmitry Kovalev committed
127
128
  for (k = 0; k < SKIP_CONTEXTS; ++k)
    vp9_cond_prob_diff_update(w, &cm->fc.skip_probs[k], cm->counts.skip[k]);
129
130
}

131
static void update_switchable_interp_probs(VP9_COMP *cpi, vp9_writer *w) {
132
  VP9_COMMON *const cm = &cpi->common;
133
134
135
136
137
  int j;
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
    prob_diff_update(vp9_switchable_interp_tree,
                     cm->fc.switchable_interp_prob[j],
                     cm->counts.switchable_interp[j], SWITCHABLE_FILTERS, w);
138
139
}

140
static void pack_mb_tokens(vp9_writer* const w,
141
142
143
                           TOKENEXTRA **tp,
                           const TOKENEXTRA *const stop) {
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
144

145
  while (p < stop && p->token != EOSB_TOKEN) {
146
    const int t = p->token;
147
148
    const struct vp9_token *const a = &vp9_coef_encodings[t];
    const vp9_extra_bit *const b = &vp9_extra_bits[t];
John Koleszar's avatar
John Koleszar committed
149
150
    int i = 0;
    int v = a->value;
151
    int n = a->len;
152

John Koleszar's avatar
John Koleszar committed
153
154
155
156
157
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
158

159
160
161
162
163
164
165
166
    // TODO(jbb): expanding this can lead to big gains.  It allows
    // much better branch prediction and would enable us to avoid numerous
    // lookups and compares.

    // If we have a token that's in the constrained set, the coefficient tree
    // is split into two treed writes.  The first treed write takes care of the
    // unconstrained nodes.  The second treed write takes care of the
    // constrained nodes.
167
    if (t >= TWO_TOKEN && t < EOB_TOKEN) {
168
169
      int len = UNCONSTRAINED_NODES - p->skip_eob_node;
      int bits = v >> (n - len);
170
171
172
173
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, bits, len, i);
      vp9_write_tree(w, vp9_coef_con_tree,
                     vp9_pareto8_full[p->context_tree[PIVOT_NODE] - 1],
                     v, n - len, 0);
174
    } else {
175
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, v, n, i);
176
    }
John Koleszar's avatar
John Koleszar committed
177

John Koleszar's avatar
John Koleszar committed
178
    if (b->base_val) {
179
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
180

181
      if (l) {
182
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
183
        int v = e >> 1;
184
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
185
        int i = 0;
John Koleszar's avatar
John Koleszar committed
186

John Koleszar's avatar
John Koleszar committed
187
188
        do {
          const int bb = (v >> --n) & 1;
189
          vp9_write(w, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
190
191
192
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
193

194
      vp9_write_bit(w, e & 1);
John Koleszar's avatar
John Koleszar committed
195
    }
John Koleszar's avatar
John Koleszar committed
196
197
198
    ++p;
  }

199
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
200
201
}

202
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
203
                             int segment_id) {
204
  if (seg->enabled && seg->update_map)
205
    vp9_write_tree(w, vp9_segment_tree, seg->tree_probs, segment_id, 3, 0);
John Koleszar's avatar
John Koleszar committed
206
207
}

Paul Wilkins's avatar
Paul Wilkins committed
208
// This function encodes the reference frame
Ronald S. Bultje's avatar
Ronald S. Bultje committed
209
static void encode_ref_frame(VP9_COMP *cpi, vp9_writer *bc) {
210
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
211
212
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
213
  MB_MODE_INFO *mi = &xd->mi_8x8[0]->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
214
  const int segment_id = mi->segment_id;
215
  int seg_ref_active = vp9_segfeature_active(&cm->seg, segment_id,
Ronald S. Bultje's avatar
Ronald S. Bultje committed
216
                                             SEG_LVL_REF_FRAME);
John Koleszar's avatar
John Koleszar committed
217
218
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
219
  if (!seg_ref_active) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
220
221
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
222
    if (cm->reference_mode == REFERENCE_MODE_SELECT) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
223
      vp9_write(bc, mi->ref_frame[1] > INTRA_FRAME,
224
                vp9_get_reference_mode_prob(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
225
226
    } else {
      assert((mi->ref_frame[1] <= INTRA_FRAME) ==
227
             (cm->reference_mode == SINGLE_REFERENCE));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
228
    }
229

Ronald S. Bultje's avatar
Ronald S. Bultje committed
230
231
    if (mi->ref_frame[1] > INTRA_FRAME) {
      vp9_write(bc, mi->ref_frame[0] == GOLDEN_FRAME,
232
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
233
234
    } else {
      vp9_write(bc, mi->ref_frame[0] != LAST_FRAME,
235
                vp9_get_pred_prob_single_ref_p1(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
236
237
      if (mi->ref_frame[0] != LAST_FRAME)
        vp9_write(bc, mi->ref_frame[0] != GOLDEN_FRAME,
238
                  vp9_get_pred_prob_single_ref_p2(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
239
    }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
240
241
  } else {
    assert(mi->ref_frame[1] <= INTRA_FRAME);
242
    assert(vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) ==
243
           mi->ref_frame[0]);
John Koleszar's avatar
John Koleszar committed
244
  }
Paul Wilkins's avatar
Paul Wilkins committed
245

Jingning Han's avatar
Jingning Han committed
246
247
  // If using the prediction model we have nothing further to do because
  // the reference frame is fully coded by the segment.
Paul Wilkins's avatar
Paul Wilkins committed
248
}
John Koleszar's avatar
John Koleszar committed
249

250
static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) {
251
252
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
253
254
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
255
  struct segmentation *seg = &cm->seg;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
256
  MB_MODE_INFO *const mi = &m->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
257
  const MV_REFERENCE_FRAME rf = mi->ref_frame[0];
Jingning Han's avatar
Jingning Han committed
258
  const MV_REFERENCE_FRAME sec_rf = mi->ref_frame[1];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
259
260
261
  const MB_PREDICTION_MODE mode = mi->mode;
  const int segment_id = mi->segment_id;
  int skip_coeff;
262
  const BLOCK_SIZE bsize = mi->sb_type;
263
  const int allow_hp = cm->allow_high_precision_mv;
Adrian Grange's avatar
Adrian Grange committed
264

Ronald S. Bultje's avatar
Ronald S. Bultje committed
265
266
#ifdef ENTROPY_STATS
  active_section = 9;
267
#endif
268

269
270
  if (seg->update_map) {
    if (seg->temporal_update) {
Scott LaVarnway's avatar
Scott LaVarnway committed
271
      const int pred_flag = mi->seg_id_predicted;
272
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
273
274
275
      vp9_write(bc, pred_flag, pred_prob);
      if (!pred_flag)
        write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
276
    } else {
277
      write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
278
279
    }
  }
280

281
  skip_coeff = write_skip_coeff(cpi, segment_id, m, bc);
John Koleszar's avatar
John Koleszar committed
282

283
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
284
    vp9_write(bc, rf != INTRA_FRAME, vp9_get_intra_inter_prob(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
285

286
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
287
      !(rf != INTRA_FRAME &&
288
        (skip_coeff || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
289
    write_selected_tx_size(cpi, m, mi->tx_size, bsize, bc);
290
291
  }

Ronald S. Bultje's avatar
Ronald S. Bultje committed
292
  if (rf == INTRA_FRAME) {
293
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
294
    active_section = 6;
295
#endif
Paul Wilkins's avatar
Paul Wilkins committed
296

297
    if (bsize >= BLOCK_8X8) {
298
      write_intra_mode(bc, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
299
    } else {
300
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
301
302
303
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
Jim Bankoski's avatar
Jim Bankoski committed
304
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
305
          const MB_PREDICTION_MODE bm = m->bmi[idy * 2 + idx].as_mode;
306
          write_intra_mode(bc, bm, cm->fc.y_mode_prob[0]);
307
        }
Jim Bankoski's avatar
Jim Bankoski committed
308
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
309
    }
310
    write_intra_mode(bc, mi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
311
  } else {
312
    vp9_prob *mv_ref_p;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
313
    encode_ref_frame(cpi, bc);
Paul Wilkins's avatar
Paul Wilkins committed
314
    mv_ref_p = cpi->common.fc.inter_mode_probs[mi->mode_context[rf]];
Yaowu Xu's avatar
Yaowu Xu committed
315

John Koleszar's avatar
John Koleszar committed
316
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
317
    active_section = 3;
John Koleszar's avatar
John Koleszar committed
318
319
#endif

320
    // If segment skip is not enabled code the mode.
321
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
322
      if (bsize >= BLOCK_8X8) {
323
        write_inter_mode(bc, mode, mv_ref_p);
324
        ++cm->counts.inter_mode[mi->mode_context[rf]][INTER_OFFSET(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
325
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
326
    }
327

328
    if (cm->interp_filter == SWITCHABLE) {
329
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
330
331
332
      vp9_write_token(bc, vp9_switchable_interp_tree,
                      cm->fc.switchable_interp_prob[ctx],
                      &switchable_interp_encodings[mi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
333
    } else {
334
      assert(mi->interp_filter == cm->interp_filter);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
335
    }
336

337
    if (bsize < BLOCK_8X8) {
Jim Bankoski's avatar
Jim Bankoski committed
338
339
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
340
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
341
342
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
343
          const int j = idy * 2 + idx;
344
          const MB_PREDICTION_MODE blockmode = m->bmi[j].as_mode;
345
          write_inter_mode(bc, blockmode, mv_ref_p);
346
          ++cm->counts.inter_mode[mi->mode_context[rf]]
347
                                 [INTER_OFFSET(blockmode)];
348

Ronald S. Bultje's avatar
Ronald S. Bultje committed
349
          if (blockmode == NEWMV) {
350
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
351
            active_section = 11;
352
#endif
353
            vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[0].as_mv,
Jingning Han's avatar
Jingning Han committed
354
                          &mi->ref_mvs[rf][0].as_mv, nmvc, allow_hp);
355
356
357

            if (has_second_ref(mi))
              vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[1].as_mv,
Jingning Han's avatar
Jingning Han committed
358
                            &mi->ref_mvs[sec_rf][0].as_mv, nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
359
          }
360
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
361
362
363
364
365
      }
    } else if (mode == NEWMV) {
#ifdef ENTROPY_STATS
      active_section = 5;
#endif
366
      vp9_encode_mv(cpi, bc, &mi->mv[0].as_mv,
Jingning Han's avatar
Jingning Han committed
367
                    &mi->ref_mvs[rf][0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
368

369
370
      if (has_second_ref(mi))
        vp9_encode_mv(cpi, bc, &mi->mv[1].as_mv,
Jingning Han's avatar
Jingning Han committed
371
                      &mi->ref_mvs[sec_rf][0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
372
373
    }
  }
John Koleszar's avatar
John Koleszar committed
374
}
375

376
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
377
                              vp9_writer *bc) {
378
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
379
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
380
  const struct segmentation *const seg = &cm->seg;
381
  MODE_INFO *m = mi_8x8[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
382
383
  const int ym = m->mbmi.mode;
  const int segment_id = m->mbmi.segment_id;
384
  MODE_INFO *above_mi = mi_8x8[-xd->mode_info_stride];
385
  MODE_INFO *left_mi = xd->left_available ? mi_8x8[-1] : NULL;
386

387
388
  if (seg->update_map)
    write_segment_id(bc, seg, m->mbmi.segment_id);
389

390
  write_skip_coeff(cpi, segment_id, m, bc);
391

392
  if (m->mbmi.sb_type >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
393
    write_selected_tx_size(cpi, m, m->mbmi.tx_size, m->mbmi.sb_type, bc);
394

395
  if (m->mbmi.sb_type >= BLOCK_8X8) {
396
    const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, 0);
397
    const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, 0);
398
    write_intra_mode(bc, ym, vp9_kf_y_mode_prob[A][L]);
399
  } else {
400
    int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
401
402
    const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[m->mbmi.sb_type];
    const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[m->mbmi.sb_type];
Jim Bankoski's avatar
Jim Bankoski committed
403
404
    for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
      for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
405
406
        int i = idy * 2 + idx;
        const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, i);
407
        const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, i);
408
        const int bm = m->bmi[i].as_mode;
409
        write_intra_mode(bc, bm, vp9_kf_y_mode_prob[A][L]);
410
411
      }
    }
412
413
  }

414
  write_intra_mode(bc, m->mbmi.uv_mode, vp9_kf_uv_mode_prob[ym]);
415
416
}

James Zern's avatar
James Zern committed
417
static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile,
418
419
                          vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                          int mi_row, int mi_col) {
420
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
421
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
422
  MODE_INFO *m;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
423

424
425
  xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col);
  m = xd->mi_8x8[0];
426

James Zern's avatar
James Zern committed
427
  set_mi_row_col(xd, tile,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
428
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
James Zern's avatar
James Zern committed
429
430
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type],
                 cm->mi_rows, cm->mi_cols);
431
  if (frame_is_intra_only(cm)) {
432
    write_mb_modes_kf(cpi, xd->mi_8x8, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
433
434
435
436
#ifdef ENTROPY_STATS
    active_section = 8;
#endif
  } else {
437
    pack_inter_mode_mvs(cpi, m, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
438
439
440
441
442
443
#ifdef ENTROPY_STATS
    active_section = 1;
#endif
  }

  assert(*tok < tok_end);
444
  pack_mb_tokens(w, tok, tok_end);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
445
446
}

447
448
449
450
451
452
453
454
455
static void write_partition(VP9_COMP *cpi, int hbs, int mi_row, int mi_col,
                            PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
  VP9_COMMON *const cm = &cpi->common;
  const int ctx = partition_plane_context(cpi->above_seg_context,
                                          cpi->left_seg_context,
                                          mi_row, mi_col, bsize);
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
456
457

  if (has_rows && has_cols) {
458
    vp9_write_token(w, vp9_partition_tree, probs, &partition_encodings[p]);
459
  } else if (!has_rows && has_cols) {
460
461
    assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
    vp9_write(w, p == PARTITION_SPLIT, probs[1]);
462
  } else if (has_rows && !has_cols) {
463
464
    assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
    vp9_write(w, p == PARTITION_SPLIT, probs[2]);
465
  } else {
466
    assert(p == PARTITION_SPLIT);
467
468
469
  }
}

James Zern's avatar
James Zern committed
470
static void write_modes_sb(VP9_COMP *cpi, const TileInfo *const tile,
471
472
                           vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                           int mi_row, int mi_col, BLOCK_SIZE bsize) {
473
  VP9_COMMON *const cm = &cpi->common;
474
475
476
  const int bsl = b_width_log2(bsize);
  const int bs = (1 << bsl) / 4;
  PARTITION_TYPE partition;
477
  BLOCK_SIZE subsize;
478
  MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col];
479

480
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
481
482
    return;

Jim Bankoski's avatar
Jim Bankoski committed
483
  partition = partition_lookup[bsl][m->mbmi.sb_type];
484
  write_partition(cpi, bs, mi_row, mi_col, partition, bsize, w);
485
  subsize = get_subsize(bsize, partition);
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
  if (subsize < BLOCK_8X8) {
    write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
  } else {
    switch (partition) {
      case PARTITION_NONE:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        break;
      case PARTITION_HORZ:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_row + bs < cm->mi_rows)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col);
        break;
      case PARTITION_VERT:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_col + bs < cm->mi_cols)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs);
        break;
      case PARTITION_SPLIT:
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs,
                       subsize);
        break;
      default:
        assert(0);
    }
515
  }
516
517

  // update partition context
518
  if (bsize >= BLOCK_8X8 &&
519
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
520
    update_partition_context(cpi->above_seg_context, cpi->left_seg_context,
521
                             mi_row, mi_col, subsize, bsize);
522
523
}

James Zern's avatar
James Zern committed
524
static void write_modes(VP9_COMP *cpi, const TileInfo *const tile,
525
                        vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
526
  int mi_row, mi_col;
527

James Zern's avatar
James Zern committed
528
  for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
529
530
       mi_row += MI_BLOCK_SIZE) {
      vp9_zero(cpi->left_seg_context);
James Zern's avatar
James Zern committed
531
    for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
532
533
         mi_col += MI_BLOCK_SIZE)
      write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, BLOCK_64X64);
John Koleszar's avatar
John Koleszar committed
534
  }
John Koleszar's avatar
John Koleszar committed
535
}
536

537
538
539
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
540
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
541
542
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
543
  int i, j, k, l, m;
544

545
  for (i = 0; i < PLANE_TYPES; ++i) {
546
547
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
548
        for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
549
          vp9_tree_probs_from_distribution(vp9_coef_tree,
550
                                           coef_branch_ct[i][j][k][l],
551
                                           coef_counts[i][j][k][l]);
552
553
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
554
555
556
557
          for (m = 0; m < UNCONSTRAINED_NODES; ++m)
            coef_probs[i][j][k][l][m] = get_binary_prob(
                                            coef_branch_ct[i][j][k][l][m][0],
                                            coef_branch_ct[i][j][k][l][m][1]);
558
#ifdef ENTROPY_STATS
559
          if (!cpi->dummy_packing) {
560
            int t;
561
            for (t = 0; t < ENTROPY_TOKENS; ++t)
562
              context_counters[tx_size][i][j][k][l][t] +=
563
                  coef_counts[i][j][k][l][t];
564
            context_counters[tx_size][i][j][k][l][ENTROPY_TOKENS] +=
565
566
                eob_branch_ct[i][j][k][l];
          }
John Koleszar's avatar
John Koleszar committed
567
#endif
568
        }
Daniel Kang's avatar
Daniel Kang committed
569
570
571
      }
    }
  }
572
573
}

574
575
576
577
578
579
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
580
  const vp9_prob upd = DIFF_UPDATE_PROB;
581
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
582
583
584
585
586
587
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
588
      for (i = 0; i < PLANE_TYPES; ++i) {
589
590
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
591
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
616

617
618
619
620
621
622
623
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
624
      for (i = 0; i < PLANE_TYPES; ++i) {
625
626
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
627
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
628
629
630
631
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
632
                const vp9_prob upd = DIFF_UPDATE_PROB;
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
657
          }
Daniel Kang's avatar
Daniel Kang committed
658
659
        }
      }
660
      return;
Daniel Kang's avatar
Daniel Kang committed
661
    }
John Koleszar's avatar
John Koleszar committed
662

663
664
665
    case 1:
    case 2: {
      const int prev_coef_contexts_to_update =
666
667
          cpi->sf.use_fast_coef_updates == 2 ? COEFF_CONTEXTS >> 1
                                             : COEFF_CONTEXTS;
668
      const int coef_band_to_update =
669
670
          cpi->sf.use_fast_coef_updates == 2 ? COEF_BANDS >> 1
                                             : COEF_BANDS;
671
672
      int updates = 0;
      int noupdates_before_first = 0;
673
      for (i = 0; i < PLANE_TYPES; ++i) {
674
675
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
676
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
                int s;
                int u = 0;
                if (l >= prev_coef_contexts_to_update ||
                    k >= coef_band_to_update) {
                  u = 0;
                } else {
                  if (t == PIVOT_NODE)
                    s = vp9_prob_diff_update_savings_search_model(
                        frame_branch_ct[i][j][k][l][0],
                        old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                  else
                    s = vp9_prob_diff_update_savings_search(
                        frame_branch_ct[i][j][k][l][t],
                        *oldp, &newp, upd);
                  if (s > 0 && newp != *oldp)
                    u = 1;
                }
                updates += u;
                if (u == 0 && updates == 0) {
                  noupdates_before_first++;
701
#ifdef ENTROPY_STATS
702
703
                  if (!cpi->dummy_packing)
                    ++tree_update_hist[tx_size][i][j][k][l][t][u];
704
#endif
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
                  continue;
                }
                if (u == 1 && updates == 1) {
                  int v;
                  // first update
                  vp9_write_bit(bc, 1);
                  for (v = 0; v < noupdates_before_first; ++v)
                    vp9_write(bc, 0, upd);
                }
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
John Koleszar's avatar
John Koleszar committed
725
            }
Daniel Kang's avatar
Daniel Kang committed
726
727
728
          }
        }
      }
729
730
731
732
      if (updates == 0) {
        vp9_write_bit(bc, 0);  // no updates
      }
      return;
Daniel Kang's avatar
Daniel Kang committed
733
    }
734
735
736

    default:
      assert(0);
John Koleszar's avatar
John Koleszar committed
737
  }
738
}
John Koleszar's avatar
John Koleszar committed
739

740
static void update_coef_probs(VP9_COMP* cpi, vp9_writer* w) {
741
  const TX_MODE tx_mode = cpi->common.tx_mode;
742
743
  const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
  TX_SIZE tx_size;
744
  vp9_clear_system_state();
745

746
747
  for (tx_size = TX_4X4; tx_size <= TX_32X32; ++tx_size)
    build_tree_distribution(cpi, tx_size);
748

749
750
  for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
    update_coef_probs_common(w, cpi, tx_size);
John Koleszar's avatar
John Koleszar committed
751
}
752

753
static void encode_loopfilter(struct loopfilter *lf,
754
                              struct vp9_write_bit_buffer *wb) {
755
756
  int i;

757
  // Encode the loop filter level and type
758
759
  vp9_wb_write_literal(wb, lf->filter_level, 6);
  vp9_wb_write_literal(wb, lf->sharpness_level, 3);
760

761
762
  // Write out loop filter deltas applied at the MB level based on mode or
  // ref frame (if they are enabled).
763
  vp9_wb_write_bit(wb, lf->mode_ref_delta_enabled);
764

765
766
767
  if (lf->mode_ref_delta_enabled) {
    vp9_wb_write_bit(wb, lf->mode_ref_delta_update);
    if (lf->mode_ref_delta_update) {
768
      for (i = 0; i < MAX_REF_LF_DELTAS; i++) {
769
        const int delta = lf->ref_deltas[i];
770
771
772
        const int changed = delta != lf->last_ref_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
773
          lf->last_ref_deltas[i] = delta;
774
775
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
776
777
778
779
        }
      }

      for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
780
        const int delta = lf->mode_deltas[i];
781
782
783
        const int changed = delta != lf->last_mode_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
784
          lf->last_mode_deltas[i] = delta;
785
786
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
787
788
789
790
791
792
        }
      }
    }
  }
}

793
static void write_delta_q(struct vp9_write_bit_buffer *wb, int delta_q) {
794
  if (delta_q != 0) {
795
796
797
    vp9_wb_write_bit(wb, 1);
    vp9_wb_write_literal(wb, abs(delta_q), 4);
    vp9_wb_write_bit(wb, delta_q < 0);
798
  } else {
799
    vp9_wb_write_bit(wb, 0);
800
801
802
  }
}

803
804
805
806
807
808
static void encode_quantization(VP9_COMMON *cm,
                                struct vp9_write_bit_buffer *wb) {
  vp9_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
  write_delta_q(wb, cm->y_dc_delta_q);
  write_delta_q(wb, cm->uv_dc_delta_q);
  write_delta_q(wb, cm->uv_ac_delta_q);
809
810
811
}


812
static void encode_segmentation(VP9_COMP *cpi,
813
                                struct vp9_write_bit_buffer *wb) {
John Koleszar's avatar
John Koleszar committed
814
  int i, j;
815

816
  struct segmentation *seg = &cpi->common.seg;
817
818
819

  vp9_wb_write_bit(wb, seg->enabled);
  if (!seg->enabled)
820
821
822
    return;

  // Segmentation map
823
824
  vp9_wb_write_bit(wb, seg->update_map);
  if (seg->update_map) {
825
826
827
    // Select the coding strategy (temporal or spatial)
    vp9_choose_segmap_coding_method(cpi);
    // Write out probabilities used to decode unpredicted  macro-block segments
Paul Wilkins's avatar
Paul Wilkins committed
828
    for (i = 0; i < SEG_TREE_PROBS; i++) {
829
      const int prob = seg->tree_probs[i];
830
831
832
833
      const int update = prob != MAX_PROB;
      vp9_wb_write_bit(wb, update);
      if (update)
        vp9_wb_write_literal(wb, prob, 8);
834
835
836
    }

    // Write out the chosen coding method.
837
838
    vp9_wb_write_bit(wb, seg->temporal_update);
    if (seg->temporal_update) {
839
      for (i = 0; i < PREDICTION_PROBS; i++) {
840
        const int prob = seg->pred_probs[i];
841
842
843
844
        const int update = prob != MAX_PROB;
        vp9_wb_write_bit(wb, update);
        if (update)
          vp9_wb_write_literal(wb, prob, 8);
845
846
847
848
849
      }
    }
  }

  // Segmentation data
850
851
852
  vp9_wb_write_bit(wb, seg->update_data);
  if (seg->update_data) {
    vp9_wb_write_bit(wb, seg->abs_delta);
853

Paul Wilkins's avatar
Paul Wilkins committed
854
    for (i = 0; i < MAX_SEGMENTS; i++) {
855
      for (j = 0; j < SEG_LVL_MAX; j++) {