vp9_bitstream.c 47.4 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
17
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"

18
#include "vp9/common/vp9_entropymode.h"
19
#include "vp9/common/vp9_entropymv.h"
20
#include "vp9/common/vp9_findnearmv.h"
21
#include "vp9/common/vp9_tile_common.h"
22
23
24
25
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_entropy.h"
#include "vp9/common/vp9_mvref_common.h"
26
#include "vp9/common/vp9_treecoder.h"
27
28
29
30
31
32
33
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_pragmas.h"

#include "vp9/encoder/vp9_mcomp.h"
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_bitstream.h"
#include "vp9/encoder/vp9_segmentation.h"
34
#include "vp9/encoder/vp9_subexp.h"
35
36
#include "vp9/encoder/vp9_write_bit_buffer.h"

Paul Wilkins's avatar
Paul Wilkins committed
37

John Koleszar's avatar
John Koleszar committed
38
39
40
41
42
#if defined(SECTIONBITS_OUTPUT)
unsigned __int64 Sectionbits[500];
#endif

#ifdef ENTROPY_STATS
43
44
45
int intra_mode_stats[INTRA_MODES]
                    [INTRA_MODES]
                    [INTRA_MODES];
46
vp9_coeff_stats tree_update_hist[TX_SIZES][BLOCK_TYPES];
47

John Koleszar's avatar
John Koleszar committed
48
49
50
extern unsigned int active_section;
#endif

51

52
#ifdef MODE_STATS
53
54
55
int64_t tx_count_32x32p_stats[TX_SIZE_CONTEXTS][TX_SIZES];
int64_t tx_count_16x16p_stats[TX_SIZE_CONTEXTS][TX_SIZES - 1];
int64_t tx_count_8x8p_stats[TX_SIZE_CONTEXTS][TX_SIZES - 2];
56
int64_t switchable_interp_stats[SWITCHABLE_FILTER_CONTEXTS][SWITCHABLE_FILTERS];
57
58
59
60
61
62
63

void init_tx_count_stats() {
  vp9_zero(tx_count_32x32p_stats);
  vp9_zero(tx_count_16x16p_stats);
  vp9_zero(tx_count_8x8p_stats);
}

64
65
66
67
void init_switchable_interp_stats() {
  vp9_zero(switchable_interp_stats);
}

68
69
static void update_tx_count_stats(VP9_COMMON *cm) {
  int i, j;
70
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
71
    for (j = 0; j < TX_SIZES; j++) {
72
73
74
      tx_count_32x32p_stats[i][j] += cm->fc.tx_count_32x32p[i][j];
    }
  }
75
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
76
    for (j = 0; j < TX_SIZES - 1; j++) {
77
78
79
      tx_count_16x16p_stats[i][j] += cm->fc.tx_count_16x16p[i][j];
    }
  }
80
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
81
    for (j = 0; j < TX_SIZES - 2; j++) {
82
83
84
85
86
      tx_count_8x8p_stats[i][j] += cm->fc.tx_count_8x8p[i][j];
    }
  }
}

87
88
static void update_switchable_interp_stats(VP9_COMMON *cm) {
  int i, j;
89
90
  for (i = 0; i < SWITCHABLE_FILTER_CONTEXTS; ++i)
    for (j = 0; j < SWITCHABLE_FILTERS; ++j)
91
92
93
      switchable_interp_stats[i][j] += cm->fc.switchable_interp_count[i][j];
}

94
95
96
97
98
99
100
101
void write_tx_count_stats() {
  int i, j;
  FILE *fp = fopen("tx_count.bin", "wb");
  fwrite(tx_count_32x32p_stats, sizeof(tx_count_32x32p_stats), 1, fp);
  fwrite(tx_count_16x16p_stats, sizeof(tx_count_16x16p_stats), 1, fp);
  fwrite(tx_count_8x8p_stats, sizeof(tx_count_8x8p_stats), 1, fp);
  fclose(fp);

102
  printf(
103
      "vp9_default_tx_count_32x32p[TX_SIZE_CONTEXTS][TX_SIZES] = {\n");
104
105
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
    printf("  { ");
106
    for (j = 0; j < TX_SIZES; j++) {
107
108
109
110
111
      printf("%"PRId64", ", tx_count_32x32p_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
112
  printf(
113
      "vp9_default_tx_count_16x16p[TX_SIZE_CONTEXTS][TX_SIZES-1] = {\n");
114
115
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
    printf("  { ");
116
    for (j = 0; j < TX_SIZES - 1; j++) {
117
118
119
120
121
      printf("%"PRId64", ", tx_count_16x16p_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
122
  printf(
123
      "vp9_default_tx_count_8x8p[TX_SIZE_CONTEXTS][TX_SIZES-2] = {\n");
124
125
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
    printf("  { ");
126
    for (j = 0; j < TX_SIZES - 2; j++) {
127
128
129
130
131
132
      printf("%"PRId64", ", tx_count_8x8p_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
}
133
134
135
136
137
138
139
140

void write_switchable_interp_stats() {
  int i, j;
  FILE *fp = fopen("switchable_interp.bin", "wb");
  fwrite(switchable_interp_stats, sizeof(switchable_interp_stats), 1, fp);
  fclose(fp);

  printf(
141
      "vp9_default_switchable_filter_count[SWITCHABLE_FILTER_CONTEXTS]"
142
      "[SWITCHABLE_FILTERS] = {\n");
143
  for (i = 0; i < SWITCHABLE_FILTER_CONTEXTS; i++) {
144
    printf("  { ");
145
    for (j = 0; j < SWITCHABLE_FILTERS; j++) {
146
147
148
149
150
151
      printf("%"PRId64", ", switchable_interp_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
}
152
153
#endif

154
155
156
157
158
static INLINE void write_be32(uint8_t *p, int value) {
  p[0] = value >> 24;
  p[1] = value >> 16;
  p[2] = value >> 8;
  p[3] = value;
159
160
}

161
162
163
164
165
void vp9_encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                             int data, int max) {
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

166
167
168
169
170
171
172
static void prob_diff_update(const vp9_tree_index *tree,
                             vp9_prob probs[/*n - 1*/],
                             const unsigned int counts[/*n - 1*/],
                             int n, vp9_writer *w) {
  int i;
  unsigned int branch_ct[32][2];
  assert(n <= 32);
173

174
  vp9_tree_probs_from_distribution(tree, branch_ct, counts);
175
  for (i = 0; i < n - 1; ++i)
176
    vp9_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
177
178
}

179
180
181
static void write_selected_tx_size(const VP9_COMP *cpi, MODE_INFO *m,
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
182
  const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
183
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
184
185
  const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
                                                 &cpi->common.fc.tx_probs);
186
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
187
  if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
188
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
189
    if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
190
191
192
193
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

194
195
196
static int write_skip_coeff(const VP9_COMP *cpi, int segment_id, MODE_INFO *m,
                            vp9_writer *w) {
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
197
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
198
199
    return 1;
  } else {
Paul Wilkins's avatar
Paul Wilkins committed
200
    const int skip_coeff = m->mbmi.skip_coeff;
201
    vp9_write(w, skip_coeff, vp9_get_pred_prob_mbskip(&cpi->common, xd));
202
203
204
205
    return skip_coeff;
  }
}

206
void vp9_update_skip_probs(VP9_COMP *cpi, vp9_writer *w) {
207
  VP9_COMMON *cm = &cpi->common;
208
209
  int k;

210
  for (k = 0; k < MBSKIP_CONTEXTS; ++k)
211
    vp9_cond_prob_diff_update(w, &cm->fc.mbskip_probs[k], cm->counts.mbskip[k]);
212
213
214
215
216
217
}

static void write_intra_mode(vp9_writer *bc, int m, const vp9_prob *p) {
  write_token(bc, vp9_intra_mode_tree, p, vp9_intra_mode_encodings + m);
}

218
static void update_switchable_interp_probs(VP9_COMP *cpi, vp9_writer *w) {
219
  VP9_COMMON *const cm = &cpi->common;
220
221
222
223
224
  int j;
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
    prob_diff_update(vp9_switchable_interp_tree,
                     cm->fc.switchable_interp_prob[j],
                     cm->counts.switchable_interp[j], SWITCHABLE_FILTERS, w);
225

226
227
#ifdef MODE_STATS
  if (!cpi->dummy_packing)
228
    update_switchable_interp_stats(cm);
229
#endif
230
231
}

232
static void pack_mb_tokens(vp9_writer* const w,
233
234
235
                           TOKENEXTRA **tp,
                           const TOKENEXTRA *const stop) {
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
236

237
  while (p < stop && p->token != EOSB_TOKEN) {
238
    const int t = p->token;
239
240
    const struct vp9_token *const a = &vp9_coef_encodings[t];
    const vp9_extra_bit *const b = &vp9_extra_bits[t];
John Koleszar's avatar
John Koleszar committed
241
242
    int i = 0;
    int v = a->value;
243
    int n = a->len;
244

John Koleszar's avatar
John Koleszar committed
245
246
247
248
249
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
250

251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
    // TODO(jbb): expanding this can lead to big gains.  It allows
    // much better branch prediction and would enable us to avoid numerous
    // lookups and compares.

    // If we have a token that's in the constrained set, the coefficient tree
    // is split into two treed writes.  The first treed write takes care of the
    // unconstrained nodes.  The second treed write takes care of the
    // constrained nodes.
    if (t >= TWO_TOKEN && t < DCT_EOB_TOKEN) {
      int len = UNCONSTRAINED_NODES - p->skip_eob_node;
      int bits = v >> (n - len);
      treed_write(w, vp9_coef_tree, p->context_tree, bits, len, i);
      treed_write(w, vp9_coef_con_tree,
                  vp9_pareto8_full[p->context_tree[PIVOT_NODE] - 1], v, n - len,
                  0);
    } else {
      treed_write(w, vp9_coef_tree, p->context_tree, v, n, i);
    }
John Koleszar's avatar
John Koleszar committed
269

John Koleszar's avatar
John Koleszar committed
270
    if (b->base_val) {
271
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
272

273
      if (l) {
274
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
275
        int v = e >> 1;
276
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
277
        int i = 0;
John Koleszar's avatar
John Koleszar committed
278

John Koleszar's avatar
John Koleszar committed
279
280
        do {
          const int bb = (v >> --n) & 1;
281
          vp9_write(w, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
282
283
284
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
285

286
      vp9_write_bit(w, e & 1);
John Koleszar's avatar
John Koleszar committed
287
    }
John Koleszar's avatar
John Koleszar committed
288
289
290
    ++p;
  }

291
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
292
293
}

294
static void write_sb_mv_ref(vp9_writer *w, MB_PREDICTION_MODE mode,
295
                            const vp9_prob *p) {
296
  assert(is_inter_mode(mode));
297
  write_token(w, vp9_inter_mode_tree, p,
298
              &vp9_inter_mode_encodings[INTER_OFFSET(mode)]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
299
300
}

301

302
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
303
                             int segment_id) {
304
  if (seg->enabled && seg->update_map)
305
    treed_write(w, vp9_segment_tree, seg->tree_probs, segment_id, 3, 0);
John Koleszar's avatar
John Koleszar committed
306
307
}

Paul Wilkins's avatar
Paul Wilkins committed
308
// This function encodes the reference frame
Ronald S. Bultje's avatar
Ronald S. Bultje committed
309
static void encode_ref_frame(VP9_COMP *cpi, vp9_writer *bc) {
310
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
311
312
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
313
  MB_MODE_INFO *mi = &xd->mi_8x8[0]->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
314
  const int segment_id = mi->segment_id;
315
  int seg_ref_active = vp9_segfeature_active(&cm->seg, segment_id,
Ronald S. Bultje's avatar
Ronald S. Bultje committed
316
                                             SEG_LVL_REF_FRAME);
John Koleszar's avatar
John Koleszar committed
317
318
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
319
  if (!seg_ref_active) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
320
321
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
322
    if (cm->comp_pred_mode == REFERENCE_MODE_SELECT) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
323
      vp9_write(bc, mi->ref_frame[1] > INTRA_FRAME,
324
                vp9_get_pred_prob_comp_inter_inter(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
325
326
    } else {
      assert((mi->ref_frame[1] <= INTRA_FRAME) ==
327
                 (cm->comp_pred_mode == SINGLE_REFERENCE));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
328
    }
329

Ronald S. Bultje's avatar
Ronald S. Bultje committed
330
331
    if (mi->ref_frame[1] > INTRA_FRAME) {
      vp9_write(bc, mi->ref_frame[0] == GOLDEN_FRAME,
332
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
333
334
    } else {
      vp9_write(bc, mi->ref_frame[0] != LAST_FRAME,
335
                vp9_get_pred_prob_single_ref_p1(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
336
337
      if (mi->ref_frame[0] != LAST_FRAME)
        vp9_write(bc, mi->ref_frame[0] != GOLDEN_FRAME,
338
                  vp9_get_pred_prob_single_ref_p2(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
339
    }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
340
341
  } else {
    assert(mi->ref_frame[1] <= INTRA_FRAME);
342
    assert(vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) ==
343
           mi->ref_frame[0]);
John Koleszar's avatar
John Koleszar committed
344
  }
Paul Wilkins's avatar
Paul Wilkins committed
345

Jingning Han's avatar
Jingning Han committed
346
347
  // If using the prediction model we have nothing further to do because
  // the reference frame is fully coded by the segment.
Paul Wilkins's avatar
Paul Wilkins committed
348
}
John Koleszar's avatar
John Koleszar committed
349

350
static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) {
351
352
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
353
354
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
355
  struct segmentation *seg = &cm->seg;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
356
  MB_MODE_INFO *const mi = &m->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
357
  const MV_REFERENCE_FRAME rf = mi->ref_frame[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
358
359
360
  const MB_PREDICTION_MODE mode = mi->mode;
  const int segment_id = mi->segment_id;
  int skip_coeff;
361
  const BLOCK_SIZE bsize = mi->sb_type;
362
  const int allow_hp = cm->allow_high_precision_mv;
Adrian Grange's avatar
Adrian Grange committed
363

Ronald S. Bultje's avatar
Ronald S. Bultje committed
364
365
#ifdef ENTROPY_STATS
  active_section = 9;
366
#endif
367

368
369
  if (seg->update_map) {
    if (seg->temporal_update) {
Scott LaVarnway's avatar
Scott LaVarnway committed
370
      const int pred_flag = mi->seg_id_predicted;
371
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
372
373
374
      vp9_write(bc, pred_flag, pred_prob);
      if (!pred_flag)
        write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
375
    } else {
376
      write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
377
378
    }
  }
379

380
  skip_coeff = write_skip_coeff(cpi, segment_id, m, bc);
John Koleszar's avatar
John Koleszar committed
381

382
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
383
    vp9_write(bc, rf != INTRA_FRAME,
384
              vp9_get_pred_prob_intra_inter(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
385

386
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
387
      !(rf != INTRA_FRAME &&
388
        (skip_coeff || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
389
    write_selected_tx_size(cpi, m, mi->tx_size, bsize, bc);
390
391
  }

Ronald S. Bultje's avatar
Ronald S. Bultje committed
392
  if (rf == INTRA_FRAME) {
393
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
394
    active_section = 6;
395
#endif
Paul Wilkins's avatar
Paul Wilkins committed
396

397
    if (bsize >= BLOCK_8X8) {
398
      write_intra_mode(bc, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
399
    } else {
400
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
401
402
403
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
Jim Bankoski's avatar
Jim Bankoski committed
404
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
405
          const MB_PREDICTION_MODE bm = m->bmi[idy * 2 + idx].as_mode;
406
          write_intra_mode(bc, bm, cm->fc.y_mode_prob[0]);
407
        }
Jim Bankoski's avatar
Jim Bankoski committed
408
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
409
    }
410
    write_intra_mode(bc, mi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
411
  } else {
412
    vp9_prob *mv_ref_p;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
413
    encode_ref_frame(cpi, bc);
Paul Wilkins's avatar
Paul Wilkins committed
414
    mv_ref_p = cpi->common.fc.inter_mode_probs[mi->mode_context[rf]];
Yaowu Xu's avatar
Yaowu Xu committed
415

John Koleszar's avatar
John Koleszar committed
416
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
417
    active_section = 3;
John Koleszar's avatar
John Koleszar committed
418
419
#endif

420
    // If segment skip is not enabled code the mode.
421
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
422
      if (bsize >= BLOCK_8X8) {
423
        write_sb_mv_ref(bc, mode, mv_ref_p);
424
        ++cm->counts.inter_mode[mi->mode_context[rf]]
425
                               [INTER_OFFSET(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
426
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
427
    }
428

429
    if (cm->mcomp_filter_type == SWITCHABLE) {
430
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
431
      write_token(bc, vp9_switchable_interp_tree,
432
                  cm->fc.switchable_interp_prob[ctx],
433
                  &vp9_switchable_interp_encodings[mi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
434
    } else {
435
      assert(mi->interp_filter == cm->mcomp_filter_type);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
436
    }
437

438
    if (bsize < BLOCK_8X8) {
Jim Bankoski's avatar
Jim Bankoski committed
439
440
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
441
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
442
443
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
444
          const int j = idy * 2 + idx;
445
          const MB_PREDICTION_MODE blockmode = m->bmi[j].as_mode;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
446
          write_sb_mv_ref(bc, blockmode, mv_ref_p);
447
          ++cm->counts.inter_mode[mi->mode_context[rf]]
448
                                 [INTER_OFFSET(blockmode)];
449

Ronald S. Bultje's avatar
Ronald S. Bultje committed
450
          if (blockmode == NEWMV) {
451
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
452
            active_section = 11;
453
#endif
454
455
456
457
458
459
            vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[0].as_mv,
                          &mi->best_mv[0].as_mv, nmvc, allow_hp);

            if (has_second_ref(mi))
              vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[1].as_mv,
                            &mi->best_mv[1].as_mv, nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
460
          }
461
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
462
463
464
465
466
      }
    } else if (mode == NEWMV) {
#ifdef ENTROPY_STATS
      active_section = 5;
#endif
467
468
      vp9_encode_mv(cpi, bc, &mi->mv[0].as_mv,
                    &mi->best_mv[0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
469

470
471
472
      if (has_second_ref(mi))
        vp9_encode_mv(cpi, bc, &mi->mv[1].as_mv,
                      &mi->best_mv[1].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
473
474
    }
  }
John Koleszar's avatar
John Koleszar committed
475
}
476

477
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
478
                              vp9_writer *bc) {
479
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
480
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
481
  const struct segmentation *const seg = &cm->seg;
482
  MODE_INFO *m = mi_8x8[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
483
484
  const int ym = m->mbmi.mode;
  const int segment_id = m->mbmi.segment_id;
485
  MODE_INFO *above_mi = mi_8x8[-xd->mode_info_stride];
486
  MODE_INFO *left_mi = xd->left_available ? mi_8x8[-1] : NULL;
487

488
489
  if (seg->update_map)
    write_segment_id(bc, seg, m->mbmi.segment_id);
490

491
  write_skip_coeff(cpi, segment_id, m, bc);
492

493
  if (m->mbmi.sb_type >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
494
    write_selected_tx_size(cpi, m, m->mbmi.tx_size, m->mbmi.sb_type, bc);
495

496
  if (m->mbmi.sb_type >= BLOCK_8X8) {
497
    const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, 0);
498
    const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, 0);
499
    write_intra_mode(bc, ym, vp9_kf_y_mode_prob[A][L]);
500
  } else {
501
    int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
502
503
    const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[m->mbmi.sb_type];
    const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[m->mbmi.sb_type];
Jim Bankoski's avatar
Jim Bankoski committed
504
505
    for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
      for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
506
507
        int i = idy * 2 + idx;
        const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, i);
508
        const MB_PREDICTION_MODE L = left_block_mode(m, left_mi, i);
509
        const int bm = m->bmi[i].as_mode;
510
511
512
#ifdef ENTROPY_STATS
        ++intra_mode_stats[A][L][bm];
#endif
513
        write_intra_mode(bc, bm, vp9_kf_y_mode_prob[A][L]);
514
515
      }
    }
516
517
  }

518
  write_intra_mode(bc, m->mbmi.uv_mode, vp9_kf_uv_mode_prob[ym]);
519
520
}

James Zern's avatar
James Zern committed
521
static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile,
522
523
                          vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                          int mi_row, int mi_col) {
524
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
525
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
526
  MODE_INFO *m;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
527

528
529
  xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col);
  m = xd->mi_8x8[0];
530

James Zern's avatar
James Zern committed
531
  set_mi_row_col(xd, tile,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
532
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
James Zern's avatar
James Zern committed
533
534
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type],
                 cm->mi_rows, cm->mi_cols);
535
  if (frame_is_intra_only(cm)) {
536
    write_mb_modes_kf(cpi, xd->mi_8x8, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
537
538
539
540
#ifdef ENTROPY_STATS
    active_section = 8;
#endif
  } else {
541
    pack_inter_mode_mvs(cpi, m, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
542
543
544
545
546
547
#ifdef ENTROPY_STATS
    active_section = 1;
#endif
  }

  assert(*tok < tok_end);
548
  pack_mb_tokens(w, tok, tok_end);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
549
550
}

551
552
553
554
555
556
557
558
559
static void write_partition(VP9_COMP *cpi, int hbs, int mi_row, int mi_col,
                            PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
  VP9_COMMON *const cm = &cpi->common;
  const int ctx = partition_plane_context(cpi->above_seg_context,
                                          cpi->left_seg_context,
                                          mi_row, mi_col, bsize);
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
560
561

  if (has_rows && has_cols) {
562
    write_token(w, vp9_partition_tree, probs, &vp9_partition_encodings[p]);
563
  } else if (!has_rows && has_cols) {
564
565
    assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
    vp9_write(w, p == PARTITION_SPLIT, probs[1]);
566
  } else if (has_rows && !has_cols) {
567
568
    assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
    vp9_write(w, p == PARTITION_SPLIT, probs[2]);
569
  } else {
570
    assert(p == PARTITION_SPLIT);
571
572
573
  }
}

James Zern's avatar
James Zern committed
574
static void write_modes_sb(VP9_COMP *cpi, const TileInfo *const tile,
575
576
                           vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                           int mi_row, int mi_col, BLOCK_SIZE bsize) {
577
  VP9_COMMON *const cm = &cpi->common;
578
579
580
  const int bsl = b_width_log2(bsize);
  const int bs = (1 << bsl) / 4;
  PARTITION_TYPE partition;
581
  BLOCK_SIZE subsize;
582
  MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col];
583

584
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
585
586
    return;

Jim Bankoski's avatar
Jim Bankoski committed
587
  partition = partition_lookup[bsl][m->mbmi.sb_type];
588
  write_partition(cpi, bs, mi_row, mi_col, partition, bsize, w);
589
  subsize = get_subsize(bsize, partition);
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
  if (subsize < BLOCK_8X8) {
    write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
  } else {
    switch (partition) {
      case PARTITION_NONE:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        break;
      case PARTITION_HORZ:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_row + bs < cm->mi_rows)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col);
        break;
      case PARTITION_VERT:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_col + bs < cm->mi_cols)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs);
        break;
      case PARTITION_SPLIT:
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs,
                       subsize);
        break;
      default:
        assert(0);
    }
619
  }
620
621

  // update partition context
622
  if (bsize >= BLOCK_8X8 &&
623
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
624
    update_partition_context(cpi->above_seg_context, cpi->left_seg_context,
625
                             mi_row, mi_col, subsize, bsize);
626
627
}

James Zern's avatar
James Zern committed
628
static void write_modes(VP9_COMP *cpi, const TileInfo *const tile,
629
                        vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
630
  int mi_row, mi_col;
631

James Zern's avatar
James Zern committed
632
  for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
633
634
       mi_row += MI_BLOCK_SIZE) {
      vp9_zero(cpi->left_seg_context);
James Zern's avatar
James Zern committed
635
    for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
636
637
         mi_col += MI_BLOCK_SIZE)
      write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, BLOCK_64X64);
John Koleszar's avatar
John Koleszar committed
638
  }
John Koleszar's avatar
John Koleszar committed
639
}
640

641
642
643
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
644
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][PREV_COEF_CONTEXTS] =
645
646
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
647
  int i, j, k, l, m;
648

649
  for (i = 0; i < BLOCK_TYPES; ++i) {
650
651
652
653
654
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
        for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
          if (l >= 3 && k == 0)
            continue;
655
          vp9_tree_probs_from_distribution(vp9_coef_tree,
656
                                           coef_branch_ct[i][j][k][l],
657
                                           coef_counts[i][j][k][l]);
658
659
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
660
661
662
663
          for (m = 0; m < UNCONSTRAINED_NODES; ++m)
            coef_probs[i][j][k][l][m] = get_binary_prob(
                                            coef_branch_ct[i][j][k][l][m][0],
                                            coef_branch_ct[i][j][k][l][m][1]);
664
#ifdef ENTROPY_STATS
665
          if (!cpi->dummy_packing) {
666
            int t;
667
            for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
668
              context_counters[tx_size][i][j][k][l][t] +=
669
                  coef_counts[i][j][k][l][t];
670
            context_counters[tx_size][i][j][k][l][MAX_ENTROPY_TOKENS] +=
671
672
                eob_branch_ct[i][j][k][l];
          }
John Koleszar's avatar
John Koleszar committed
673
#endif
674
        }
Daniel Kang's avatar
Daniel Kang committed
675
676
677
      }
    }
  }
678
679
}

680
681
682
683
684
685
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
686
  const vp9_prob upd = DIFF_UPDATE_PROB;
687
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;

                if (l >= 3 && k == 0)
                  continue;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
725

726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
741
                const vp9_prob upd = DIFF_UPDATE_PROB;
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
                int s;
                int u = 0;
                if (l >= 3 && k == 0)
                  continue;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
768
          }
Daniel Kang's avatar
Daniel Kang committed
769
770
        }
      }
771
      return;
Daniel Kang's avatar
Daniel Kang committed
772
    }
John Koleszar's avatar
John Koleszar committed
773

774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
    case 1:
    case 2: {
      const int prev_coef_contexts_to_update =
          (cpi->sf.use_fast_coef_updates == 2 ?
           PREV_COEF_CONTEXTS >> 1 : PREV_COEF_CONTEXTS);
      const int coef_band_to_update =
          (cpi->sf.use_fast_coef_updates == 2 ?
           COEF_BANDS >> 1 : COEF_BANDS);
      int updates = 0;
      int noupdates_before_first = 0;
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
                int s;
                int u = 0;
                if (l >= 3 && k == 0)
                  continue;
                if (l >= prev_coef_contexts_to_update ||
                    k >= coef_band_to_update) {
                  u = 0;
                } else {
                  if (t == PIVOT_NODE)
                    s = vp9_prob_diff_update_savings_search_model(
                        frame_branch_ct[i][j][k][l][0],
                        old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                  else
                    s = vp9_prob_diff_update_savings_search(
                        frame_branch_ct[i][j][k][l][t],
                        *oldp, &newp, upd);
                  if (s > 0 && newp != *oldp)
                    u = 1;
                }
                updates += u;
                if (u == 0 && updates == 0) {
                  noupdates_before_first++;
814
#ifdef ENTROPY_STATS
815
816
                  if (!cpi->dummy_packing)
                    ++tree_update_hist[tx_size][i][j][k][l][t][u];
817
#endif
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
                  continue;
                }
                if (u == 1 && updates == 1) {
                  int v;
                  // first update
                  vp9_write_bit(bc, 1);
                  for (v = 0; v < noupdates_before_first; ++v)
                    vp9_write(bc, 0, upd);
                }
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
John Koleszar's avatar
John Koleszar committed
838
            }
Daniel Kang's avatar
Daniel Kang committed
839
840
841
          }
        }
      }
842
843
844
845
      if (updates == 0) {
        vp9_write_bit(bc, 0);  // no updates
      }
      return;
Daniel Kang's avatar
Daniel Kang committed
846
    }
847
848
849

    default:
      assert(0);
John Koleszar's avatar
John Koleszar committed
850
  }
851
}
John Koleszar's avatar
John Koleszar committed
852

853
static void update_coef_probs(VP9_COMP* cpi, vp9_writer* w) {
854
  const TX_MODE tx_mode = cpi->common.tx_mode;
855
856
  const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
  TX_SIZE tx_size;
857
  vp9_clear_system_state();
858

859
860
  for (tx_size = TX_4X4; tx_size <= TX_32X32; ++tx_size)
    build_tree_distribution(cpi, tx_size);
861

862
863
  for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
    update_coef_probs_common(w, cpi, tx_size);
John Koleszar's avatar
John Koleszar committed
864
}
865