vp9_bitstream.c 48.1 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
17
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"

18
#include "vp9/common/vp9_entropymode.h"
19
#include "vp9/common/vp9_entropymv.h"
20
#include "vp9/common/vp9_findnearmv.h"
21
#include "vp9/common/vp9_tile_common.h"
22
23
24
25
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_entropy.h"
#include "vp9/common/vp9_mvref_common.h"
26
#include "vp9/common/vp9_treecoder.h"
27
28
29
30
31
32
33
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_pragmas.h"

#include "vp9/encoder/vp9_mcomp.h"
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_bitstream.h"
#include "vp9/encoder/vp9_segmentation.h"
34
#include "vp9/encoder/vp9_subexp.h"
35
#include "vp9/encoder/vp9_tokenize.h"
36
37
#include "vp9/encoder/vp9_write_bit_buffer.h"

Paul Wilkins's avatar
Paul Wilkins committed
38

John Koleszar's avatar
John Koleszar committed
39
40
41
42
43
#if defined(SECTIONBITS_OUTPUT)
unsigned __int64 Sectionbits[500];
#endif

#ifdef ENTROPY_STATS
44
45
46
int intra_mode_stats[INTRA_MODES]
                    [INTRA_MODES]
                    [INTRA_MODES];
47
vp9_coeff_stats tree_update_hist[TX_SIZES][BLOCK_TYPES];
48

John Koleszar's avatar
John Koleszar committed
49
50
51
extern unsigned int active_section;
#endif

52

53
#ifdef MODE_STATS
54
55
56
int64_t tx_count_32x32p_stats[TX_SIZE_CONTEXTS][TX_SIZES];
int64_t tx_count_16x16p_stats[TX_SIZE_CONTEXTS][TX_SIZES - 1];
int64_t tx_count_8x8p_stats[TX_SIZE_CONTEXTS][TX_SIZES - 2];
57
int64_t switchable_interp_stats[SWITCHABLE_FILTER_CONTEXTS][SWITCHABLE_FILTERS];
58
59
60
61
62
63
64

void init_tx_count_stats() {
  vp9_zero(tx_count_32x32p_stats);
  vp9_zero(tx_count_16x16p_stats);
  vp9_zero(tx_count_8x8p_stats);
}

65
66
67
68
void init_switchable_interp_stats() {
  vp9_zero(switchable_interp_stats);
}

69
70
static void update_tx_count_stats(VP9_COMMON *cm) {
  int i, j;
71
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
72
    for (j = 0; j < TX_SIZES; j++) {
73
74
75
      tx_count_32x32p_stats[i][j] += cm->fc.tx_count_32x32p[i][j];
    }
  }
76
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
77
    for (j = 0; j < TX_SIZES - 1; j++) {
78
79
80
      tx_count_16x16p_stats[i][j] += cm->fc.tx_count_16x16p[i][j];
    }
  }
81
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
82
    for (j = 0; j < TX_SIZES - 2; j++) {
83
84
85
86
87
      tx_count_8x8p_stats[i][j] += cm->fc.tx_count_8x8p[i][j];
    }
  }
}

88
89
static void update_switchable_interp_stats(VP9_COMMON *cm) {
  int i, j;
90
91
  for (i = 0; i < SWITCHABLE_FILTER_CONTEXTS; ++i)
    for (j = 0; j < SWITCHABLE_FILTERS; ++j)
92
93
94
      switchable_interp_stats[i][j] += cm->fc.switchable_interp_count[i][j];
}

95
96
97
98
99
100
101
102
void write_tx_count_stats() {
  int i, j;
  FILE *fp = fopen("tx_count.bin", "wb");
  fwrite(tx_count_32x32p_stats, sizeof(tx_count_32x32p_stats), 1, fp);
  fwrite(tx_count_16x16p_stats, sizeof(tx_count_16x16p_stats), 1, fp);
  fwrite(tx_count_8x8p_stats, sizeof(tx_count_8x8p_stats), 1, fp);
  fclose(fp);

103
  printf(
104
      "vp9_default_tx_count_32x32p[TX_SIZE_CONTEXTS][TX_SIZES] = {\n");
105
106
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
    printf("  { ");
107
    for (j = 0; j < TX_SIZES; j++) {
108
109
110
111
112
      printf("%"PRId64", ", tx_count_32x32p_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
113
  printf(
114
      "vp9_default_tx_count_16x16p[TX_SIZE_CONTEXTS][TX_SIZES-1] = {\n");
115
116
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
    printf("  { ");
117
    for (j = 0; j < TX_SIZES - 1; j++) {
118
119
120
121
122
      printf("%"PRId64", ", tx_count_16x16p_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
123
  printf(
124
      "vp9_default_tx_count_8x8p[TX_SIZE_CONTEXTS][TX_SIZES-2] = {\n");
125
126
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
    printf("  { ");
127
    for (j = 0; j < TX_SIZES - 2; j++) {
128
129
130
131
132
133
      printf("%"PRId64", ", tx_count_8x8p_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
}
134
135
136
137
138
139
140
141

void write_switchable_interp_stats() {
  int i, j;
  FILE *fp = fopen("switchable_interp.bin", "wb");
  fwrite(switchable_interp_stats, sizeof(switchable_interp_stats), 1, fp);
  fclose(fp);

  printf(
142
      "vp9_default_switchable_filter_count[SWITCHABLE_FILTER_CONTEXTS]"
143
      "[SWITCHABLE_FILTERS] = {\n");
144
  for (i = 0; i < SWITCHABLE_FILTER_CONTEXTS; i++) {
145
    printf("  { ");
146
    for (j = 0; j < SWITCHABLE_FILTERS; j++) {
147
148
149
150
151
152
      printf("%"PRId64", ", switchable_interp_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
}
153
154
#endif

155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
static struct vp9_token intra_mode_encodings[INTRA_MODES];
static struct vp9_token switchable_interp_encodings[SWITCHABLE_FILTERS];
static struct vp9_token partition_encodings[PARTITION_TYPES];
static struct vp9_token inter_mode_encodings[INTER_MODES];

void vp9_entropy_mode_init() {
  vp9_tokens_from_tree(intra_mode_encodings, vp9_intra_mode_tree);
  vp9_tokens_from_tree(switchable_interp_encodings, vp9_switchable_interp_tree);
  vp9_tokens_from_tree(partition_encodings, vp9_partition_tree);
  vp9_tokens_from_tree(inter_mode_encodings, vp9_inter_mode_tree);
}

static void write_intra_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  write_token(w, vp9_intra_mode_tree, probs, &intra_mode_encodings[mode]);
}

static void write_inter_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  assert(is_inter_mode(mode));
  write_token(w, vp9_inter_mode_tree, probs,
              &inter_mode_encodings[INTER_OFFSET(mode)]);
}

179
180
181
182
183
static INLINE void write_be32(uint8_t *p, int value) {
  p[0] = value >> 24;
  p[1] = value >> 16;
  p[2] = value >> 8;
  p[3] = value;
184
185
}

186
187
188
189
190
void vp9_encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                             int data, int max) {
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

191
192
193
194
195
196
static void prob_diff_update(const vp9_tree_index *tree,
                             vp9_prob probs[/*n - 1*/],
                             const unsigned int counts[/*n - 1*/],
                             int n, vp9_writer *w) {
  int i;
  unsigned int branch_ct[32][2];
197
198

  // Assuming max number of probabilities <= 32
199
  assert(n <= 32);
200

201
  vp9_tree_probs_from_distribution(tree, branch_ct, counts);
202
  for (i = 0; i < n - 1; ++i)
203
    vp9_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
204
205
}

206
207
208
static void write_selected_tx_size(const VP9_COMP *cpi, MODE_INFO *m,
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
209
  const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
210
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
211
212
  const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
                                                 &cpi->common.fc.tx_probs);
213
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
214
  if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
215
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
216
    if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
217
218
219
220
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

221
222
223
static int write_skip_coeff(const VP9_COMP *cpi, int segment_id, MODE_INFO *m,
                            vp9_writer *w) {
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
224
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
225
226
    return 1;
  } else {
Paul Wilkins's avatar
Paul Wilkins committed
227
    const int skip_coeff = m->mbmi.skip_coeff;
228
    vp9_write(w, skip_coeff, vp9_get_pred_prob_mbskip(&cpi->common, xd));
229
230
231
232
    return skip_coeff;
  }
}

233
void vp9_update_skip_probs(VP9_COMP *cpi, vp9_writer *w) {
234
  VP9_COMMON *cm = &cpi->common;
235
236
  int k;

237
  for (k = 0; k < MBSKIP_CONTEXTS; ++k)
238
    vp9_cond_prob_diff_update(w, &cm->fc.mbskip_probs[k], cm->counts.mbskip[k]);
239
240
}

241
static void update_switchable_interp_probs(VP9_COMP *cpi, vp9_writer *w) {
242
  VP9_COMMON *const cm = &cpi->common;
243
244
245
246
247
  int j;
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
    prob_diff_update(vp9_switchable_interp_tree,
                     cm->fc.switchable_interp_prob[j],
                     cm->counts.switchable_interp[j], SWITCHABLE_FILTERS, w);
248

249
250
#ifdef MODE_STATS
  if (!cpi->dummy_packing)
251
    update_switchable_interp_stats(cm);
252
#endif
253
254
}

255
static void pack_mb_tokens(vp9_writer* const w,
256
257
258
                           TOKENEXTRA **tp,
                           const TOKENEXTRA *const stop) {
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
259

260
  while (p < stop && p->token != EOSB_TOKEN) {
261
    const int t = p->token;
262
263
    const struct vp9_token *const a = &vp9_coef_encodings[t];
    const vp9_extra_bit *const b = &vp9_extra_bits[t];
John Koleszar's avatar
John Koleszar committed
264
265
    int i = 0;
    int v = a->value;
266
    int n = a->len;
267

John Koleszar's avatar
John Koleszar committed
268
269
270
271
272
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
273

274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
    // TODO(jbb): expanding this can lead to big gains.  It allows
    // much better branch prediction and would enable us to avoid numerous
    // lookups and compares.

    // If we have a token that's in the constrained set, the coefficient tree
    // is split into two treed writes.  The first treed write takes care of the
    // unconstrained nodes.  The second treed write takes care of the
    // constrained nodes.
    if (t >= TWO_TOKEN && t < DCT_EOB_TOKEN) {
      int len = UNCONSTRAINED_NODES - p->skip_eob_node;
      int bits = v >> (n - len);
      treed_write(w, vp9_coef_tree, p->context_tree, bits, len, i);
      treed_write(w, vp9_coef_con_tree,
                  vp9_pareto8_full[p->context_tree[PIVOT_NODE] - 1], v, n - len,
                  0);
    } else {
      treed_write(w, vp9_coef_tree, p->context_tree, v, n, i);
    }
John Koleszar's avatar
John Koleszar committed
292

John Koleszar's avatar
John Koleszar committed
293
    if (b->base_val) {
294
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
295

296
      if (l) {
297
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
298
        int v = e >> 1;
299
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
300
        int i = 0;
John Koleszar's avatar
John Koleszar committed
301

John Koleszar's avatar
John Koleszar committed
302
303
        do {
          const int bb = (v >> --n) & 1;
304
          vp9_write(w, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
305
306
307
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
308

309
      vp9_write_bit(w, e & 1);
John Koleszar's avatar
John Koleszar committed
310
    }
John Koleszar's avatar
John Koleszar committed
311
312
313
    ++p;
  }

314
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
315
316
}

317
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
318
                             int segment_id) {
319
  if (seg->enabled && seg->update_map)
320
    treed_write(w, vp9_segment_tree, seg->tree_probs, segment_id, 3, 0);
John Koleszar's avatar
John Koleszar committed
321
322
}

Paul Wilkins's avatar
Paul Wilkins committed
323
// This function encodes the reference frame
Ronald S. Bultje's avatar
Ronald S. Bultje committed
324
static void encode_ref_frame(VP9_COMP *cpi, vp9_writer *bc) {
325
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
326
327
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
328
  MB_MODE_INFO *mi = &xd->mi_8x8[0]->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
329
  const int segment_id = mi->segment_id;
330
  int seg_ref_active = vp9_segfeature_active(&cm->seg, segment_id,
Ronald S. Bultje's avatar
Ronald S. Bultje committed
331
                                             SEG_LVL_REF_FRAME);
John Koleszar's avatar
John Koleszar committed
332
333
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
334
  if (!seg_ref_active) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
335
336
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
337
    if (cm->comp_pred_mode == REFERENCE_MODE_SELECT) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
338
      vp9_write(bc, mi->ref_frame[1] > INTRA_FRAME,
339
                vp9_get_pred_prob_comp_inter_inter(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
340
341
    } else {
      assert((mi->ref_frame[1] <= INTRA_FRAME) ==
342
                 (cm->comp_pred_mode == SINGLE_REFERENCE));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
343
    }
344

Ronald S. Bultje's avatar
Ronald S. Bultje committed
345
346
    if (mi->ref_frame[1] > INTRA_FRAME) {
      vp9_write(bc, mi->ref_frame[0] == GOLDEN_FRAME,
347
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
348
349
    } else {
      vp9_write(bc, mi->ref_frame[0] != LAST_FRAME,
350
                vp9_get_pred_prob_single_ref_p1(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
351
352
      if (mi->ref_frame[0] != LAST_FRAME)
        vp9_write(bc, mi->ref_frame[0] != GOLDEN_FRAME,
353
                  vp9_get_pred_prob_single_ref_p2(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
354
    }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
355
356
  } else {
    assert(mi->ref_frame[1] <= INTRA_FRAME);
357
    assert(vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) ==
358
           mi->ref_frame[0]);
John Koleszar's avatar
John Koleszar committed
359
  }
Paul Wilkins's avatar
Paul Wilkins committed
360

Jingning Han's avatar
Jingning Han committed
361
362
  // If using the prediction model we have nothing further to do because
  // the reference frame is fully coded by the segment.
Paul Wilkins's avatar
Paul Wilkins committed
363
}
John Koleszar's avatar
John Koleszar committed
364

365
static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) {
366
367
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
368
369
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
370
  struct segmentation *seg = &cm->seg;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
371
  MB_MODE_INFO *const mi = &m->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
372
  const MV_REFERENCE_FRAME rf = mi->ref_frame[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
373
374
375
  const MB_PREDICTION_MODE mode = mi->mode;
  const int segment_id = mi->segment_id;
  int skip_coeff;
376
  const BLOCK_SIZE bsize = mi->sb_type;
377
  const int allow_hp = cm->allow_high_precision_mv;
Adrian Grange's avatar
Adrian Grange committed
378

Ronald S. Bultje's avatar
Ronald S. Bultje committed
379
380
#ifdef ENTROPY_STATS
  active_section = 9;
381
#endif
382

383
384
  if (seg->update_map) {
    if (seg->temporal_update) {
Scott LaVarnway's avatar
Scott LaVarnway committed
385
      const int pred_flag = mi->seg_id_predicted;
386
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
387
388
389
      vp9_write(bc, pred_flag, pred_prob);
      if (!pred_flag)
        write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
390
    } else {
391
      write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
392
393
    }
  }
394

395
  skip_coeff = write_skip_coeff(cpi, segment_id, m, bc);
John Koleszar's avatar
John Koleszar committed
396

397
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
398
    vp9_write(bc, rf != INTRA_FRAME,
399
              vp9_get_pred_prob_intra_inter(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
400

401
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
402
      !(rf != INTRA_FRAME &&
403
        (skip_coeff || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
404
    write_selected_tx_size(cpi, m, mi->tx_size, bsize, bc);
405
406
  }

Ronald S. Bultje's avatar
Ronald S. Bultje committed
407
  if (rf == INTRA_FRAME) {
408
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
409
    active_section = 6;
410
#endif
Paul Wilkins's avatar
Paul Wilkins committed
411

412
    if (bsize >= BLOCK_8X8) {
413
      write_intra_mode(bc, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
414
    } else {
415
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
416
417
418
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
Jim Bankoski's avatar
Jim Bankoski committed
419
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
420
          const MB_PREDICTION_MODE bm = m->bmi[idy * 2 + idx].as_mode;
421
          write_intra_mode(bc, bm, cm->fc.y_mode_prob[0]);
422
        }
Jim Bankoski's avatar
Jim Bankoski committed
423
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
424
    }
425
    write_intra_mode(bc, mi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
426
  } else {
427
    vp9_prob *mv_ref_p;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
428
    encode_ref_frame(cpi, bc);
Paul Wilkins's avatar
Paul Wilkins committed
429
    mv_ref_p = cpi->common.fc.inter_mode_probs[mi->mode_context[rf]];
Yaowu Xu's avatar
Yaowu Xu committed
430

John Koleszar's avatar
John Koleszar committed
431
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
432
    active_section = 3;
John Koleszar's avatar
John Koleszar committed
433
434
#endif

435
    // If segment skip is not enabled code the mode.
436
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
437
      if (bsize >= BLOCK_8X8) {
438
        write_inter_mode(bc, mode, mv_ref_p);
439
        ++cm->counts.inter_mode[mi->mode_context[rf]]
440
                               [INTER_OFFSET(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
441
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
442
    }
443

444
    if (cm->mcomp_filter_type == SWITCHABLE) {
445
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
446
      write_token(bc, vp9_switchable_interp_tree,
447
                  cm->fc.switchable_interp_prob[ctx],
448
                  &switchable_interp_encodings[mi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
449
    } else {
450
      assert(mi->interp_filter == cm->mcomp_filter_type);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
451
    }
452

453
    if (bsize < BLOCK_8X8) {
Jim Bankoski's avatar
Jim Bankoski committed
454
455
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
456
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
457
458
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
459
          const int j = idy * 2 + idx;
460
          const MB_PREDICTION_MODE blockmode = m->bmi[j].as_mode;
461
          write_inter_mode(bc, blockmode, mv_ref_p);
462
          ++cm->counts.inter_mode[mi->mode_context[rf]]
463
                                 [INTER_OFFSET(blockmode)];
464

Ronald S. Bultje's avatar
Ronald S. Bultje committed
465
          if (blockmode == NEWMV) {
466
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
467
            active_section = 11;
468
#endif
469
470
471
472
473
474
            vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[0].as_mv,
                          &mi->best_mv[0].as_mv, nmvc, allow_hp);

            if (has_second_ref(mi))
              vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[1].as_mv,
                            &mi->best_mv[1].as_mv, nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
475
          }
476
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
477
478
479
480
481
      }
    } else if (mode == NEWMV) {
#ifdef ENTROPY_STATS
      active_section = 5;
#endif
482
483
      vp9_encode_mv(cpi, bc, &mi->mv[0].as_mv,
                    &mi->best_mv[0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
484

485
486
487
      if (has_second_ref(mi))
        vp9_encode_mv(cpi, bc, &mi->mv[1].as_mv,
                      &mi->best_mv[1].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
488
489
    }
  }
John Koleszar's avatar
John Koleszar committed
490
}
491

492
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
493
                              vp9_writer *bc) {
494
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
495
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
496
  const struct segmentation *const seg = &cm->seg;
497
  MODE_INFO *m = mi_8x8[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
498
499
  const int ym = m->mbmi.mode;
  const int segment_id = m->mbmi.segment_id;
500
  MODE_INFO *above_mi = mi_8x8[-xd->mode_info_stride];
501
  MODE_INFO *left_mi = xd->left_available ? mi_8x8[-1] : NULL;
502

503
504
  if (seg->update_map)
    write_segment_id(bc, seg, m->mbmi.segment_id);
505

506
  write_skip_coeff(cpi, segment_id, m, bc);
507

508
  if (m->mbmi.sb_type >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
509
    write_selected_tx_size(cpi, m, m->mbmi.tx_size, m->mbmi.sb_type, bc);
510

511
  if (m->mbmi.sb_type >= BLOCK_8X8) {
512
    const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, 0);
513
    const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, 0);
514
    write_intra_mode(bc, ym, vp9_kf_y_mode_prob[A][L]);
515
  } else {
516
    int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
517
518
    const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[m->mbmi.sb_type];
    const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[m->mbmi.sb_type];
Jim Bankoski's avatar
Jim Bankoski committed
519
520
    for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
      for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
521
522
        int i = idy * 2 + idx;
        const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, i);
523
        const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, i);
524
        const int bm = m->bmi[i].as_mode;
525
526
527
#ifdef ENTROPY_STATS
        ++intra_mode_stats[A][L][bm];
#endif
528
        write_intra_mode(bc, bm, vp9_kf_y_mode_prob[A][L]);
529
530
      }
    }
531
532
  }

533
  write_intra_mode(bc, m->mbmi.uv_mode, vp9_kf_uv_mode_prob[ym]);
534
535
}

James Zern's avatar
James Zern committed
536
static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile,
537
538
                          vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                          int mi_row, int mi_col) {
539
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
540
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
541
  MODE_INFO *m;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
542

543
544
  xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col);
  m = xd->mi_8x8[0];
545

James Zern's avatar
James Zern committed
546
  set_mi_row_col(xd, tile,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
547
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
James Zern's avatar
James Zern committed
548
549
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type],
                 cm->mi_rows, cm->mi_cols);
550
  if (frame_is_intra_only(cm)) {
551
    write_mb_modes_kf(cpi, xd->mi_8x8, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
552
553
554
555
#ifdef ENTROPY_STATS
    active_section = 8;
#endif
  } else {
556
    pack_inter_mode_mvs(cpi, m, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
557
558
559
560
561
562
#ifdef ENTROPY_STATS
    active_section = 1;
#endif
  }

  assert(*tok < tok_end);
563
  pack_mb_tokens(w, tok, tok_end);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
564
565
}

566
567
568
569
570
571
572
573
574
static void write_partition(VP9_COMP *cpi, int hbs, int mi_row, int mi_col,
                            PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
  VP9_COMMON *const cm = &cpi->common;
  const int ctx = partition_plane_context(cpi->above_seg_context,
                                          cpi->left_seg_context,
                                          mi_row, mi_col, bsize);
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
575
576

  if (has_rows && has_cols) {
577
    write_token(w, vp9_partition_tree, probs, &partition_encodings[p]);
578
  } else if (!has_rows && has_cols) {
579
580
    assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
    vp9_write(w, p == PARTITION_SPLIT, probs[1]);
581
  } else if (has_rows && !has_cols) {
582
583
    assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
    vp9_write(w, p == PARTITION_SPLIT, probs[2]);
584
  } else {
585
    assert(p == PARTITION_SPLIT);
586
587
588
  }
}

James Zern's avatar
James Zern committed
589
static void write_modes_sb(VP9_COMP *cpi, const TileInfo *const tile,
590
591
                           vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                           int mi_row, int mi_col, BLOCK_SIZE bsize) {
592
  VP9_COMMON *const cm = &cpi->common;
593
594
595
  const int bsl = b_width_log2(bsize);
  const int bs = (1 << bsl) / 4;
  PARTITION_TYPE partition;
596
  BLOCK_SIZE subsize;
597
  MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col];
598

599
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
600
601
    return;

Jim Bankoski's avatar
Jim Bankoski committed
602
  partition = partition_lookup[bsl][m->mbmi.sb_type];
603
  write_partition(cpi, bs, mi_row, mi_col, partition, bsize, w);
604
  subsize = get_subsize(bsize, partition);
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
  if (subsize < BLOCK_8X8) {
    write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
  } else {
    switch (partition) {
      case PARTITION_NONE:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        break;
      case PARTITION_HORZ:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_row + bs < cm->mi_rows)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col);
        break;
      case PARTITION_VERT:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_col + bs < cm->mi_cols)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs);
        break;
      case PARTITION_SPLIT:
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs,
                       subsize);
        break;
      default:
        assert(0);
    }
634
  }
635
636

  // update partition context
637
  if (bsize >= BLOCK_8X8 &&
638
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
639
    update_partition_context(cpi->above_seg_context, cpi->left_seg_context,
640
                             mi_row, mi_col, subsize, bsize);
641
642
}

James Zern's avatar
James Zern committed
643
static void write_modes(VP9_COMP *cpi, const TileInfo *const tile,
644
                        vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
645
  int mi_row, mi_col;
646

James Zern's avatar
James Zern committed
647
  for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
648
649
       mi_row += MI_BLOCK_SIZE) {
      vp9_zero(cpi->left_seg_context);
James Zern's avatar
James Zern committed
650
    for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
651
652
         mi_col += MI_BLOCK_SIZE)
      write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, BLOCK_64X64);
John Koleszar's avatar
John Koleszar committed
653
  }
John Koleszar's avatar
John Koleszar committed
654
}
655

656
657
658
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
659
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][PREV_COEF_CONTEXTS] =
660
661
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
662
  int i, j, k, l, m;
663

664
  for (i = 0; i < BLOCK_TYPES; ++i) {
665
666
667
668
669
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
        for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
          if (l >= 3 && k == 0)
            continue;
670
          vp9_tree_probs_from_distribution(vp9_coef_tree,
671
                                           coef_branch_ct[i][j][k][l],
672
                                           coef_counts[i][j][k][l]);
673
674
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
675
676
677
678
          for (m = 0; m < UNCONSTRAINED_NODES; ++m)
            coef_probs[i][j][k][l][m] = get_binary_prob(
                                            coef_branch_ct[i][j][k][l][m][0],
                                            coef_branch_ct[i][j][k][l][m][1]);
679
#ifdef ENTROPY_STATS
680
          if (!cpi->dummy_packing) {
681
            int t;
682
            for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
683
              context_counters[tx_size][i][j][k][l][t] +=
684
                  coef_counts[i][j][k][l][t];
685
            context_counters[tx_size][i][j][k][l][MAX_ENTROPY_TOKENS] +=
686
687
                eob_branch_ct[i][j][k][l];
          }
John Koleszar's avatar
John Koleszar committed
688
#endif
689
        }
Daniel Kang's avatar
Daniel Kang committed
690
691
692
      }
    }
  }
693
694
}

695
696
697
698
699
700
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
701
  const vp9_prob upd = DIFF_UPDATE_PROB;
702
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;

                if (l >= 3 && k == 0)
                  continue;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
740

741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
756
                const vp9_prob upd = DIFF_UPDATE_PROB;
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
                int s;
                int u = 0;
                if (l >= 3 && k == 0)
                  continue;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
783
          }
Daniel Kang's avatar
Daniel Kang committed
784
785
        }
      }
786
      return;
Daniel Kang's avatar
Daniel Kang committed
787
    }
John Koleszar's avatar
John Koleszar committed
788

789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
    case 1:
    case 2: {
      const int prev_coef_contexts_to_update =
          (cpi->sf.use_fast_coef_updates == 2 ?
           PREV_COEF_CONTEXTS >> 1 : PREV_COEF_CONTEXTS);
      const int coef_band_to_update =
          (cpi->sf.use_fast_coef_updates == 2 ?
           COEF_BANDS >> 1 : COEF_BANDS);
      int updates = 0;
      int noupdates_before_first = 0;
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
                int s;
                int u = 0;
                if (l >= 3 && k == 0)
                  continue;
                if (l >= prev_coef_contexts_to_update ||
                    k >= coef_band_to_update) {
                  u = 0;
                } else {
                  if (t == PIVOT_NODE)
                    s = vp9_prob_diff_update_savings_search_model(
                        frame_branch_ct[i][j][k][l][0],
                        old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                  else
                    s = vp9_prob_diff_update_savings_search(
                        frame_branch_ct[i][j][k][l][t],
                        *oldp, &newp, upd);
                  if (s > 0 && newp != *oldp)
                    u = 1;
                }
                updates += u;
                if (u == 0 && updates == 0) {
                  noupdates_before_first++;
829
#ifdef ENTROPY_STATS
830
831
                  if (!cpi->dummy_packing)
                    ++tree_update_hist[tx_size][i][j][k][l][t][u];
832
#endif
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
                  continue;
                }
                if (u == 1 && updates == 1) {
                  int v;
                  // first update
                  vp9_write_bit(bc, 1);
                  for (v = 0; v < noupdates_before_first; ++v)
                    vp9_write(bc, 0, upd);
                }
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
John Koleszar's avatar
John Koleszar committed
853
            }
Daniel Kang's avatar
Daniel Kang committed
854
855
856
          }
        }
      }
857
858
859
860
      if (updates == 0) {
        vp9_write_bit(bc, 0);  // no updates
      }
      return;
Daniel Kang's avatar
Daniel Kang committed
861
    }
862
863
864

    default:
      assert(0);
John Koleszar's avatar
John Koleszar committed
865
  }
866
}
John Koleszar's avatar
John Koleszar committed
867

868
static void update_coef_probs(VP9_COMP* cpi, vp9_writer* w) {
869
  const TX_MODE tx_mode = cpi->common.tx_mode;
870
871
  const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
  TX_SIZE tx_size;
872
  vp9_clear_system_state();
873

874
875
  for (tx_size = TX_4X4; tx_size <= TX_32X32; ++tx_size)
    build_tree_distribution(cpi, tx_size);
876

877
878
  for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
    update_coef_probs_common(w, cpi, tx_size);
John Koleszar's avatar
John Koleszar committed
879
}
880

881
static void encode_loopfilter(struct loopfilter *lf,
882
                              struct vp9_write_bit_buffer *wb) {
883
884
  int i;

885
  // Encode the loop filter level and type
886
887
  vp9_wb_write_literal(wb, lf->filter_level, 6);
  vp9_wb_write_literal(wb, lf->sharpness_level, 3);
888

889
890
  // Write out loop filter deltas applied at the MB level based on mode or
  // ref frame (if they are enabled).
891
  vp9_wb_write_bit(wb, lf->mode_ref_delta_enabled);
892

893
  if (lf->mode_ref_delta_enabled) {
894
    // Do the deltas need to be updated
895
896
    vp9_wb_write_bit(wb, lf->mode_ref_delta_update);
    if (lf->mode_ref_delta_update) {
897
898
      // Send update
      for (i = 0; i < MAX_REF_LF_DELTAS; i++) {
899
        const int delta = lf->ref_deltas[i];