vp9_bitstream.c 45.1 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
17
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"

18
#include "vp9/common/vp9_entropymode.h"
19
#include "vp9/common/vp9_entropymv.h"
20
#include "vp9/common/vp9_findnearmv.h"
21
#include "vp9/common/vp9_tile_common.h"
22
23
24
25
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_entropy.h"
#include "vp9/common/vp9_mvref_common.h"
26
#include "vp9/common/vp9_treecoder.h"
27
28
29
30
31
32
33
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_pragmas.h"

#include "vp9/encoder/vp9_mcomp.h"
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_bitstream.h"
#include "vp9/encoder/vp9_segmentation.h"
34
#include "vp9/encoder/vp9_subexp.h"
35
#include "vp9/encoder/vp9_tokenize.h"
36
37
#include "vp9/encoder/vp9_write_bit_buffer.h"

Paul Wilkins's avatar
Paul Wilkins committed
38

John Koleszar's avatar
John Koleszar committed
39
40
41
42
43
#if defined(SECTIONBITS_OUTPUT)
unsigned __int64 Sectionbits[500];
#endif

#ifdef ENTROPY_STATS
44
45
46
int intra_mode_stats[INTRA_MODES]
                    [INTRA_MODES]
                    [INTRA_MODES];
47
vp9_coeff_stats tree_update_hist[TX_SIZES][BLOCK_TYPES];
48

John Koleszar's avatar
John Koleszar committed
49
50
51
extern unsigned int active_section;
#endif

52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
static struct vp9_token intra_mode_encodings[INTRA_MODES];
static struct vp9_token switchable_interp_encodings[SWITCHABLE_FILTERS];
static struct vp9_token partition_encodings[PARTITION_TYPES];
static struct vp9_token inter_mode_encodings[INTER_MODES];

void vp9_entropy_mode_init() {
  vp9_tokens_from_tree(intra_mode_encodings, vp9_intra_mode_tree);
  vp9_tokens_from_tree(switchable_interp_encodings, vp9_switchable_interp_tree);
  vp9_tokens_from_tree(partition_encodings, vp9_partition_tree);
  vp9_tokens_from_tree(inter_mode_encodings, vp9_inter_mode_tree);
}

static void write_intra_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  write_token(w, vp9_intra_mode_tree, probs, &intra_mode_encodings[mode]);
}

static void write_inter_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  assert(is_inter_mode(mode));
  write_token(w, vp9_inter_mode_tree, probs,
              &inter_mode_encodings[INTER_OFFSET(mode)]);
}

76
77
78
79
80
static INLINE void write_be32(uint8_t *p, int value) {
  p[0] = value >> 24;
  p[1] = value >> 16;
  p[2] = value >> 8;
  p[3] = value;
81
82
}

83
84
85
86
87
void vp9_encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                             int data, int max) {
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

88
89
90
91
92
93
static void prob_diff_update(const vp9_tree_index *tree,
                             vp9_prob probs[/*n - 1*/],
                             const unsigned int counts[/*n - 1*/],
                             int n, vp9_writer *w) {
  int i;
  unsigned int branch_ct[32][2];
94
95

  // Assuming max number of probabilities <= 32
96
  assert(n <= 32);
97

98
  vp9_tree_probs_from_distribution(tree, branch_ct, counts);
99
  for (i = 0; i < n - 1; ++i)
100
    vp9_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
101
102
}

103
104
105
static void write_selected_tx_size(const VP9_COMP *cpi, MODE_INFO *m,
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
106
  const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
107
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
108
109
  const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
                                                 &cpi->common.fc.tx_probs);
110
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
111
  if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
112
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
113
    if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
114
115
116
117
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

118
119
120
static int write_skip_coeff(const VP9_COMP *cpi, int segment_id, MODE_INFO *m,
                            vp9_writer *w) {
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
121
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
122
123
    return 1;
  } else {
Paul Wilkins's avatar
Paul Wilkins committed
124
    const int skip_coeff = m->mbmi.skip_coeff;
125
    vp9_write(w, skip_coeff, vp9_get_pred_prob_mbskip(&cpi->common, xd));
126
127
128
129
    return skip_coeff;
  }
}

130
void vp9_update_skip_probs(VP9_COMP *cpi, vp9_writer *w) {
131
  VP9_COMMON *cm = &cpi->common;
132
133
  int k;

134
  for (k = 0; k < MBSKIP_CONTEXTS; ++k)
135
    vp9_cond_prob_diff_update(w, &cm->fc.mbskip_probs[k], cm->counts.mbskip[k]);
136
137
}

138
static void update_switchable_interp_probs(VP9_COMP *cpi, vp9_writer *w) {
139
  VP9_COMMON *const cm = &cpi->common;
140
141
142
143
144
  int j;
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
    prob_diff_update(vp9_switchable_interp_tree,
                     cm->fc.switchable_interp_prob[j],
                     cm->counts.switchable_interp[j], SWITCHABLE_FILTERS, w);
145

146
147
#ifdef MODE_STATS
  if (!cpi->dummy_packing)
148
    update_switchable_interp_stats(cm);
149
#endif
150
151
}

152
static void pack_mb_tokens(vp9_writer* const w,
153
154
155
                           TOKENEXTRA **tp,
                           const TOKENEXTRA *const stop) {
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
156

157
  while (p < stop && p->token != EOSB_TOKEN) {
158
    const int t = p->token;
159
160
    const struct vp9_token *const a = &vp9_coef_encodings[t];
    const vp9_extra_bit *const b = &vp9_extra_bits[t];
John Koleszar's avatar
John Koleszar committed
161
162
    int i = 0;
    int v = a->value;
163
    int n = a->len;
164

John Koleszar's avatar
John Koleszar committed
165
166
167
168
169
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
170

171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
    // TODO(jbb): expanding this can lead to big gains.  It allows
    // much better branch prediction and would enable us to avoid numerous
    // lookups and compares.

    // If we have a token that's in the constrained set, the coefficient tree
    // is split into two treed writes.  The first treed write takes care of the
    // unconstrained nodes.  The second treed write takes care of the
    // constrained nodes.
    if (t >= TWO_TOKEN && t < DCT_EOB_TOKEN) {
      int len = UNCONSTRAINED_NODES - p->skip_eob_node;
      int bits = v >> (n - len);
      treed_write(w, vp9_coef_tree, p->context_tree, bits, len, i);
      treed_write(w, vp9_coef_con_tree,
                  vp9_pareto8_full[p->context_tree[PIVOT_NODE] - 1], v, n - len,
                  0);
    } else {
      treed_write(w, vp9_coef_tree, p->context_tree, v, n, i);
    }
John Koleszar's avatar
John Koleszar committed
189

John Koleszar's avatar
John Koleszar committed
190
    if (b->base_val) {
191
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
192

193
      if (l) {
194
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
195
        int v = e >> 1;
196
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
197
        int i = 0;
John Koleszar's avatar
John Koleszar committed
198

John Koleszar's avatar
John Koleszar committed
199
200
        do {
          const int bb = (v >> --n) & 1;
201
          vp9_write(w, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
202
203
204
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
205

206
      vp9_write_bit(w, e & 1);
John Koleszar's avatar
John Koleszar committed
207
    }
John Koleszar's avatar
John Koleszar committed
208
209
210
    ++p;
  }

211
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
212
213
}

214
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
215
                             int segment_id) {
216
  if (seg->enabled && seg->update_map)
217
    treed_write(w, vp9_segment_tree, seg->tree_probs, segment_id, 3, 0);
John Koleszar's avatar
John Koleszar committed
218
219
}

Paul Wilkins's avatar
Paul Wilkins committed
220
// This function encodes the reference frame
Ronald S. Bultje's avatar
Ronald S. Bultje committed
221
static void encode_ref_frame(VP9_COMP *cpi, vp9_writer *bc) {
222
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
223
224
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
225
  MB_MODE_INFO *mi = &xd->mi_8x8[0]->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
226
  const int segment_id = mi->segment_id;
227
  int seg_ref_active = vp9_segfeature_active(&cm->seg, segment_id,
Ronald S. Bultje's avatar
Ronald S. Bultje committed
228
                                             SEG_LVL_REF_FRAME);
John Koleszar's avatar
John Koleszar committed
229
230
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
231
  if (!seg_ref_active) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
232
233
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
234
    if (cm->comp_pred_mode == REFERENCE_MODE_SELECT) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
235
      vp9_write(bc, mi->ref_frame[1] > INTRA_FRAME,
236
                vp9_get_pred_prob_comp_inter_inter(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
237
238
    } else {
      assert((mi->ref_frame[1] <= INTRA_FRAME) ==
239
                 (cm->comp_pred_mode == SINGLE_REFERENCE));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
240
    }
241

Ronald S. Bultje's avatar
Ronald S. Bultje committed
242
243
    if (mi->ref_frame[1] > INTRA_FRAME) {
      vp9_write(bc, mi->ref_frame[0] == GOLDEN_FRAME,
244
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
245
246
    } else {
      vp9_write(bc, mi->ref_frame[0] != LAST_FRAME,
247
                vp9_get_pred_prob_single_ref_p1(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
248
249
      if (mi->ref_frame[0] != LAST_FRAME)
        vp9_write(bc, mi->ref_frame[0] != GOLDEN_FRAME,
250
                  vp9_get_pred_prob_single_ref_p2(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
251
    }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
252
253
  } else {
    assert(mi->ref_frame[1] <= INTRA_FRAME);
254
    assert(vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) ==
255
           mi->ref_frame[0]);
John Koleszar's avatar
John Koleszar committed
256
  }
Paul Wilkins's avatar
Paul Wilkins committed
257

Jingning Han's avatar
Jingning Han committed
258
259
  // If using the prediction model we have nothing further to do because
  // the reference frame is fully coded by the segment.
Paul Wilkins's avatar
Paul Wilkins committed
260
}
John Koleszar's avatar
John Koleszar committed
261

262
static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) {
263
264
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
265
266
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
267
  struct segmentation *seg = &cm->seg;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
268
  MB_MODE_INFO *const mi = &m->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
269
  const MV_REFERENCE_FRAME rf = mi->ref_frame[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
270
271
272
  const MB_PREDICTION_MODE mode = mi->mode;
  const int segment_id = mi->segment_id;
  int skip_coeff;
273
  const BLOCK_SIZE bsize = mi->sb_type;
274
  const int allow_hp = cm->allow_high_precision_mv;
Adrian Grange's avatar
Adrian Grange committed
275

Ronald S. Bultje's avatar
Ronald S. Bultje committed
276
277
#ifdef ENTROPY_STATS
  active_section = 9;
278
#endif
279

280
281
  if (seg->update_map) {
    if (seg->temporal_update) {
Scott LaVarnway's avatar
Scott LaVarnway committed
282
      const int pred_flag = mi->seg_id_predicted;
283
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
284
285
286
      vp9_write(bc, pred_flag, pred_prob);
      if (!pred_flag)
        write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
287
    } else {
288
      write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
289
290
    }
  }
291

292
  skip_coeff = write_skip_coeff(cpi, segment_id, m, bc);
John Koleszar's avatar
John Koleszar committed
293

294
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
295
    vp9_write(bc, rf != INTRA_FRAME,
296
              vp9_get_pred_prob_intra_inter(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
297

298
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
299
      !(rf != INTRA_FRAME &&
300
        (skip_coeff || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
301
    write_selected_tx_size(cpi, m, mi->tx_size, bsize, bc);
302
303
  }

Ronald S. Bultje's avatar
Ronald S. Bultje committed
304
  if (rf == INTRA_FRAME) {
305
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
306
    active_section = 6;
307
#endif
Paul Wilkins's avatar
Paul Wilkins committed
308

309
    if (bsize >= BLOCK_8X8) {
310
      write_intra_mode(bc, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
311
    } else {
312
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
313
314
315
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
Jim Bankoski's avatar
Jim Bankoski committed
316
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
317
          const MB_PREDICTION_MODE bm = m->bmi[idy * 2 + idx].as_mode;
318
          write_intra_mode(bc, bm, cm->fc.y_mode_prob[0]);
319
        }
Jim Bankoski's avatar
Jim Bankoski committed
320
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
321
    }
322
    write_intra_mode(bc, mi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
323
  } else {
324
    vp9_prob *mv_ref_p;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
325
    encode_ref_frame(cpi, bc);
Paul Wilkins's avatar
Paul Wilkins committed
326
    mv_ref_p = cpi->common.fc.inter_mode_probs[mi->mode_context[rf]];
Yaowu Xu's avatar
Yaowu Xu committed
327

John Koleszar's avatar
John Koleszar committed
328
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
329
    active_section = 3;
John Koleszar's avatar
John Koleszar committed
330
331
#endif

332
    // If segment skip is not enabled code the mode.
333
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
334
      if (bsize >= BLOCK_8X8) {
335
        write_inter_mode(bc, mode, mv_ref_p);
336
        ++cm->counts.inter_mode[mi->mode_context[rf]]
337
                               [INTER_OFFSET(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
338
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
339
    }
340

341
    if (cm->mcomp_filter_type == SWITCHABLE) {
342
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
343
      write_token(bc, vp9_switchable_interp_tree,
344
                  cm->fc.switchable_interp_prob[ctx],
345
                  &switchable_interp_encodings[mi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
346
    } else {
347
      assert(mi->interp_filter == cm->mcomp_filter_type);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
348
    }
349

350
    if (bsize < BLOCK_8X8) {
Jim Bankoski's avatar
Jim Bankoski committed
351
352
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
353
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
354
355
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
356
          const int j = idy * 2 + idx;
357
          const MB_PREDICTION_MODE blockmode = m->bmi[j].as_mode;
358
          write_inter_mode(bc, blockmode, mv_ref_p);
359
          ++cm->counts.inter_mode[mi->mode_context[rf]]
360
                                 [INTER_OFFSET(blockmode)];
361

Ronald S. Bultje's avatar
Ronald S. Bultje committed
362
          if (blockmode == NEWMV) {
363
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
364
            active_section = 11;
365
#endif
366
367
368
369
370
371
            vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[0].as_mv,
                          &mi->best_mv[0].as_mv, nmvc, allow_hp);

            if (has_second_ref(mi))
              vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[1].as_mv,
                            &mi->best_mv[1].as_mv, nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
372
          }
373
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
374
375
376
377
378
      }
    } else if (mode == NEWMV) {
#ifdef ENTROPY_STATS
      active_section = 5;
#endif
379
380
      vp9_encode_mv(cpi, bc, &mi->mv[0].as_mv,
                    &mi->best_mv[0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
381

382
383
384
      if (has_second_ref(mi))
        vp9_encode_mv(cpi, bc, &mi->mv[1].as_mv,
                      &mi->best_mv[1].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
385
386
    }
  }
John Koleszar's avatar
John Koleszar committed
387
}
388

389
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
390
                              vp9_writer *bc) {
391
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
392
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
393
  const struct segmentation *const seg = &cm->seg;
394
  MODE_INFO *m = mi_8x8[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
395
396
  const int ym = m->mbmi.mode;
  const int segment_id = m->mbmi.segment_id;
397
  MODE_INFO *above_mi = mi_8x8[-xd->mode_info_stride];
398
  MODE_INFO *left_mi = xd->left_available ? mi_8x8[-1] : NULL;
399

400
401
  if (seg->update_map)
    write_segment_id(bc, seg, m->mbmi.segment_id);
402

403
  write_skip_coeff(cpi, segment_id, m, bc);
404

405
  if (m->mbmi.sb_type >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
406
    write_selected_tx_size(cpi, m, m->mbmi.tx_size, m->mbmi.sb_type, bc);
407

408
  if (m->mbmi.sb_type >= BLOCK_8X8) {
409
    const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, 0);
410
    const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, 0);
411
    write_intra_mode(bc, ym, vp9_kf_y_mode_prob[A][L]);
412
  } else {
413
    int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
414
415
    const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[m->mbmi.sb_type];
    const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[m->mbmi.sb_type];
Jim Bankoski's avatar
Jim Bankoski committed
416
417
    for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
      for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
418
419
        int i = idy * 2 + idx;
        const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, i);
420
        const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, i);
421
        const int bm = m->bmi[i].as_mode;
422
423
424
#ifdef ENTROPY_STATS
        ++intra_mode_stats[A][L][bm];
#endif
425
        write_intra_mode(bc, bm, vp9_kf_y_mode_prob[A][L]);
426
427
      }
    }
428
429
  }

430
  write_intra_mode(bc, m->mbmi.uv_mode, vp9_kf_uv_mode_prob[ym]);
431
432
}

James Zern's avatar
James Zern committed
433
static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile,
434
435
                          vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                          int mi_row, int mi_col) {
436
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
437
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
438
  MODE_INFO *m;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
439

440
441
  xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col);
  m = xd->mi_8x8[0];
442

James Zern's avatar
James Zern committed
443
  set_mi_row_col(xd, tile,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
444
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
James Zern's avatar
James Zern committed
445
446
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type],
                 cm->mi_rows, cm->mi_cols);
447
  if (frame_is_intra_only(cm)) {
448
    write_mb_modes_kf(cpi, xd->mi_8x8, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
449
450
451
452
#ifdef ENTROPY_STATS
    active_section = 8;
#endif
  } else {
453
    pack_inter_mode_mvs(cpi, m, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
454
455
456
457
458
459
#ifdef ENTROPY_STATS
    active_section = 1;
#endif
  }

  assert(*tok < tok_end);
460
  pack_mb_tokens(w, tok, tok_end);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
461
462
}

463
464
465
466
467
468
469
470
471
static void write_partition(VP9_COMP *cpi, int hbs, int mi_row, int mi_col,
                            PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
  VP9_COMMON *const cm = &cpi->common;
  const int ctx = partition_plane_context(cpi->above_seg_context,
                                          cpi->left_seg_context,
                                          mi_row, mi_col, bsize);
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
472
473

  if (has_rows && has_cols) {
474
    write_token(w, vp9_partition_tree, probs, &partition_encodings[p]);
475
  } else if (!has_rows && has_cols) {
476
477
    assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
    vp9_write(w, p == PARTITION_SPLIT, probs[1]);
478
  } else if (has_rows && !has_cols) {
479
480
    assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
    vp9_write(w, p == PARTITION_SPLIT, probs[2]);
481
  } else {
482
    assert(p == PARTITION_SPLIT);
483
484
485
  }
}

James Zern's avatar
James Zern committed
486
static void write_modes_sb(VP9_COMP *cpi, const TileInfo *const tile,
487
488
                           vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                           int mi_row, int mi_col, BLOCK_SIZE bsize) {
489
  VP9_COMMON *const cm = &cpi->common;
490
491
492
  const int bsl = b_width_log2(bsize);
  const int bs = (1 << bsl) / 4;
  PARTITION_TYPE partition;
493
  BLOCK_SIZE subsize;
494
  MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col];
495

496
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
497
498
    return;

Jim Bankoski's avatar
Jim Bankoski committed
499
  partition = partition_lookup[bsl][m->mbmi.sb_type];
500
  write_partition(cpi, bs, mi_row, mi_col, partition, bsize, w);
501
  subsize = get_subsize(bsize, partition);
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
  if (subsize < BLOCK_8X8) {
    write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
  } else {
    switch (partition) {
      case PARTITION_NONE:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        break;
      case PARTITION_HORZ:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_row + bs < cm->mi_rows)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col);
        break;
      case PARTITION_VERT:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_col + bs < cm->mi_cols)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs);
        break;
      case PARTITION_SPLIT:
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs,
                       subsize);
        break;
      default:
        assert(0);
    }
531
  }
532
533

  // update partition context
534
  if (bsize >= BLOCK_8X8 &&
535
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
536
    update_partition_context(cpi->above_seg_context, cpi->left_seg_context,
537
                             mi_row, mi_col, subsize, bsize);
538
539
}

James Zern's avatar
James Zern committed
540
static void write_modes(VP9_COMP *cpi, const TileInfo *const tile,
541
                        vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
542
  int mi_row, mi_col;
543

James Zern's avatar
James Zern committed
544
  for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
545
546
       mi_row += MI_BLOCK_SIZE) {
      vp9_zero(cpi->left_seg_context);
James Zern's avatar
James Zern committed
547
    for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
548
549
         mi_col += MI_BLOCK_SIZE)
      write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, BLOCK_64X64);
John Koleszar's avatar
John Koleszar committed
550
  }
John Koleszar's avatar
John Koleszar committed
551
}
552

553
554
555
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
556
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][PREV_COEF_CONTEXTS] =
557
558
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
559
  int i, j, k, l, m;
560

561
  for (i = 0; i < BLOCK_TYPES; ++i) {
562
563
564
565
566
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
        for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
          if (l >= 3 && k == 0)
            continue;
567
          vp9_tree_probs_from_distribution(vp9_coef_tree,
568
                                           coef_branch_ct[i][j][k][l],
569
                                           coef_counts[i][j][k][l]);
570
571
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
572
573
574
575
          for (m = 0; m < UNCONSTRAINED_NODES; ++m)
            coef_probs[i][j][k][l][m] = get_binary_prob(
                                            coef_branch_ct[i][j][k][l][m][0],
                                            coef_branch_ct[i][j][k][l][m][1]);
576
#ifdef ENTROPY_STATS
577
          if (!cpi->dummy_packing) {
578
            int t;
579
            for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
580
              context_counters[tx_size][i][j][k][l][t] +=
581
                  coef_counts[i][j][k][l][t];
582
            context_counters[tx_size][i][j][k][l][MAX_ENTROPY_TOKENS] +=
583
584
                eob_branch_ct[i][j][k][l];
          }
John Koleszar's avatar
John Koleszar committed
585
#endif
586
        }
Daniel Kang's avatar
Daniel Kang committed
587
588
589
      }
    }
  }
590
591
}

592
593
594
595
596
597
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
598
  const vp9_prob upd = DIFF_UPDATE_PROB;
599
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;

                if (l >= 3 && k == 0)
                  continue;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
637

638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
653
                const vp9_prob upd = DIFF_UPDATE_PROB;
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
                int s;
                int u = 0;
                if (l >= 3 && k == 0)
                  continue;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
680
          }
Daniel Kang's avatar
Daniel Kang committed
681
682
        }
      }
683
      return;
Daniel Kang's avatar
Daniel Kang committed
684
    }
John Koleszar's avatar
John Koleszar committed
685

686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
    case 1:
    case 2: {
      const int prev_coef_contexts_to_update =
          (cpi->sf.use_fast_coef_updates == 2 ?
           PREV_COEF_CONTEXTS >> 1 : PREV_COEF_CONTEXTS);
      const int coef_band_to_update =
          (cpi->sf.use_fast_coef_updates == 2 ?
           COEF_BANDS >> 1 : COEF_BANDS);
      int updates = 0;
      int noupdates_before_first = 0;
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
                int s;
                int u = 0;
                if (l >= 3 && k == 0)
                  continue;
                if (l >= prev_coef_contexts_to_update ||
                    k >= coef_band_to_update) {
                  u = 0;
                } else {
                  if (t == PIVOT_NODE)
                    s = vp9_prob_diff_update_savings_search_model(
                        frame_branch_ct[i][j][k][l][0],
                        old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                  else
                    s = vp9_prob_diff_update_savings_search(
                        frame_branch_ct[i][j][k][l][t],
                        *oldp, &newp, upd);
                  if (s > 0 && newp != *oldp)
                    u = 1;
                }
                updates += u;
                if (u == 0 && updates == 0) {
                  noupdates_before_first++;
726
#ifdef ENTROPY_STATS
727
728
                  if (!cpi->dummy_packing)
                    ++tree_update_hist[tx_size][i][j][k][l][t][u];
729
#endif
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
                  continue;
                }
                if (u == 1 && updates == 1) {
                  int v;
                  // first update
                  vp9_write_bit(bc, 1);
                  for (v = 0; v < noupdates_before_first; ++v)
                    vp9_write(bc, 0, upd);
                }
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
John Koleszar's avatar
John Koleszar committed
750
            }
Daniel Kang's avatar
Daniel Kang committed
751
752
753
          }
        }
      }
754
755
756
757
      if (updates == 0) {
        vp9_write_bit(bc, 0);  // no updates
      }
      return;
Daniel Kang's avatar
Daniel Kang committed
758
    }
759
760
761

    default:
      assert(0);
John Koleszar's avatar
John Koleszar committed
762
  }
763
}
John Koleszar's avatar
John Koleszar committed
764

765
static void update_coef_probs(VP9_COMP* cpi, vp9_writer* w) {
766
  const TX_MODE tx_mode = cpi->common.tx_mode;
767
768
  const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
  TX_SIZE tx_size;
769
  vp9_clear_system_state();
770

771
772
  for (tx_size = TX_4X4; tx_size <= TX_32X32; ++tx_size)
    build_tree_distribution(cpi, tx_size);
773

774
775
  for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
    update_coef_probs_common(w, cpi, tx_size);
John Koleszar's avatar
John Koleszar committed
776
}
777

778
static void encode_loopfilter(struct loopfilter *lf,
779
                              struct vp9_write_bit_buffer *wb) {
780
781
  int i;

782
  // Encode the loop filter level and type
783
784
  vp9_wb_write_literal(wb, lf->filter_level, 6);
  vp9_wb_write_literal(wb, lf->sharpness_level, 3);
785

786
787
  // Write out loop filter deltas applied at the MB level based on mode or
  // ref frame (if they are enabled).
788
  vp9_wb_write_bit(wb, lf->mode_ref_delta_enabled);
789

790
  if (lf->mode_ref_delta_enabled) {
791
    // Do the deltas need to be updated
792
793
    vp9_wb_write_bit(wb, lf->mode_ref_delta_update);
    if (lf->mode_ref_delta_update) {
794
795
      // Send update
      for (i = 0; i < MAX_REF_LF_DELTAS; i++) {
796
        const int delta = lf->ref_deltas[i];
797
798

        // Frame level data
799
800
        if (delta != lf->last_ref_deltas[i]) {
          lf->last_ref_deltas[i] = delta;
801
          vp9_wb_write_bit(wb, 1);
802

803
804
805
          assert(delta != 0);
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
806
        } else {
807
          vp9_wb_write_bit(wb, 0);
808
809
810
811
812
        }
      }

      // Send update
      for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
813
814
815
        const int delta = lf->mode_deltas[i];
        if (delta != lf->last_mode_deltas[i]) {
          lf->last_mode_deltas[i] = delta;
816
          vp9_wb_write_bit(wb, 1);
817

818
819
820
          assert(delta != 0);
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
821
        } else {
822
          vp9_wb_write_bit(wb, 0);
823
824
825
826
827
828
        }
      }
    }
  }
}

829
static void write_delta_q(struct vp9_write_bit_buffer *wb, int delta_q) {
830
  if (delta_q != 0) {
831
832
833
    vp9_wb_write_bit(wb, 1);
    vp9_wb_write_literal(wb, abs(delta_q), 4);
    vp9_wb_write_bit(wb, delta_q < 0);
834
  } else {
835
    vp9_wb_write_bit(wb, 0);
836
837
838
  }
}

839
840
841
842
843
844
static void encode_quantization(VP9_COMMON *cm,
                                struct vp9_write_bit_buffer *wb) {
  vp9_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
  write_delta_q(wb, cm->y_dc_delta_q);
  write_delta_q(wb, cm->uv_dc_delta_q);
  write_delta_q(wb, cm->uv_ac_delta_q);
845
846
847
}


848
static void encode_segmentation(VP9_COMP *cpi,
849
                                struct vp9_write_bit_buffer *wb) {
John Koleszar's avatar
John Koleszar committed
850
  int i, j;
851

852
  struct segmentation *seg = &cpi->common.seg;
853
854
855

  vp9_wb_write_bit(wb, seg->enabled);
  if (!seg->enabled)
856
857
858
    return;

  // Segmentation map
859
860
  vp9_wb_write_bit(wb, seg->update_map);
  if (seg->update_map) {
861
862
863
    // Select the coding strategy (temporal or spatial)
    vp9_choose_segmap_coding_method(cpi);
    // Write out probabilities used to decode unpredicted  macro-block segments
Paul Wilkins's avatar
Paul Wilkins committed
864
    for (i = 0; i < SEG_TREE_PROBS; i++) {