vp9_bitstream.c 50.3 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
17
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"

18
#include "vp9/common/vp9_entropymode.h"
19
#include "vp9/common/vp9_entropymv.h"
20
#include "vp9/common/vp9_findnearmv.h"
21
#include "vp9/common/vp9_tile_common.h"
22
23
24
25
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_entropy.h"
#include "vp9/common/vp9_mvref_common.h"
26
#include "vp9/common/vp9_treecoder.h"
27
28
29
30
31
32
33
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_pragmas.h"

#include "vp9/encoder/vp9_mcomp.h"
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_bitstream.h"
#include "vp9/encoder/vp9_segmentation.h"
34
#include "vp9/encoder/vp9_subexp.h"
35
36
#include "vp9/encoder/vp9_write_bit_buffer.h"

Paul Wilkins's avatar
Paul Wilkins committed
37

John Koleszar's avatar
John Koleszar committed
38
39
40
41
42
#if defined(SECTIONBITS_OUTPUT)
unsigned __int64 Sectionbits[500];
#endif

#ifdef ENTROPY_STATS
43
44
45
int intra_mode_stats[INTRA_MODES]
                    [INTRA_MODES]
                    [INTRA_MODES];
46
vp9_coeff_stats tree_update_hist[TX_SIZES][BLOCK_TYPES];
47

John Koleszar's avatar
John Koleszar committed
48
49
50
extern unsigned int active_section;
#endif

51

52
#ifdef MODE_STATS
53
54
55
int64_t tx_count_32x32p_stats[TX_SIZE_CONTEXTS][TX_SIZES];
int64_t tx_count_16x16p_stats[TX_SIZE_CONTEXTS][TX_SIZES - 1];
int64_t tx_count_8x8p_stats[TX_SIZE_CONTEXTS][TX_SIZES - 2];
56
57
int64_t switchable_interp_stats[SWITCHABLE_FILTERS+1]
                               [SWITCHABLE_FILTERS];
58
59
60
61
62
63
64

void init_tx_count_stats() {
  vp9_zero(tx_count_32x32p_stats);
  vp9_zero(tx_count_16x16p_stats);
  vp9_zero(tx_count_8x8p_stats);
}

65
66
67
68
void init_switchable_interp_stats() {
  vp9_zero(switchable_interp_stats);
}

69
70
static void update_tx_count_stats(VP9_COMMON *cm) {
  int i, j;
71
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
72
    for (j = 0; j < TX_SIZES; j++) {
73
74
75
      tx_count_32x32p_stats[i][j] += cm->fc.tx_count_32x32p[i][j];
    }
  }
76
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
77
    for (j = 0; j < TX_SIZES - 1; j++) {
78
79
80
      tx_count_16x16p_stats[i][j] += cm->fc.tx_count_16x16p[i][j];
    }
  }
81
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
82
    for (j = 0; j < TX_SIZES - 2; j++) {
83
84
85
86
87
      tx_count_8x8p_stats[i][j] += cm->fc.tx_count_8x8p[i][j];
    }
  }
}

88
89
static void update_switchable_interp_stats(VP9_COMMON *cm) {
  int i, j;
90
91
  for (i = 0; i < SWITCHABLE_FILTERS+1; ++i)
    for (j = 0; j < SWITCHABLE_FILTERS; ++j) {
92
93
94
95
      switchable_interp_stats[i][j] += cm->fc.switchable_interp_count[i][j];
    }
}

96
97
98
99
100
101
102
103
void write_tx_count_stats() {
  int i, j;
  FILE *fp = fopen("tx_count.bin", "wb");
  fwrite(tx_count_32x32p_stats, sizeof(tx_count_32x32p_stats), 1, fp);
  fwrite(tx_count_16x16p_stats, sizeof(tx_count_16x16p_stats), 1, fp);
  fwrite(tx_count_8x8p_stats, sizeof(tx_count_8x8p_stats), 1, fp);
  fclose(fp);

104
  printf(
105
      "vp9_default_tx_count_32x32p[TX_SIZE_CONTEXTS][TX_SIZES] = {\n");
106
107
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
    printf("  { ");
108
    for (j = 0; j < TX_SIZES; j++) {
109
110
111
112
113
      printf("%"PRId64", ", tx_count_32x32p_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
114
  printf(
115
      "vp9_default_tx_count_16x16p[TX_SIZE_CONTEXTS][TX_SIZES-1] = {\n");
116
117
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
    printf("  { ");
118
    for (j = 0; j < TX_SIZES - 1; j++) {
119
120
121
122
123
      printf("%"PRId64", ", tx_count_16x16p_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
124
  printf(
125
      "vp9_default_tx_count_8x8p[TX_SIZE_CONTEXTS][TX_SIZES-2] = {\n");
126
127
  for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
    printf("  { ");
128
    for (j = 0; j < TX_SIZES - 2; j++) {
129
130
131
132
133
134
      printf("%"PRId64", ", tx_count_8x8p_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
}
135
136
137
138
139
140
141
142

void write_switchable_interp_stats() {
  int i, j;
  FILE *fp = fopen("switchable_interp.bin", "wb");
  fwrite(switchable_interp_stats, sizeof(switchable_interp_stats), 1, fp);
  fclose(fp);

  printf(
143
144
145
      "vp9_default_switchable_filter_count[SWITCHABLE_FILTERS+1]"
      "[SWITCHABLE_FILTERS] = {\n");
  for (i = 0; i < SWITCHABLE_FILTERS+1; i++) {
146
    printf("  { ");
147
    for (j = 0; j < SWITCHABLE_FILTERS; j++) {
148
149
150
151
152
153
      printf("%"PRId64", ", switchable_interp_stats[i][j]);
    }
    printf("},\n");
  }
  printf("};\n");
}
154
155
#endif

156
157
158
159
160
static INLINE void write_be32(uint8_t *p, int value) {
  p[0] = value >> 24;
  p[1] = value >> 16;
  p[2] = value >> 8;
  p[3] = value;
161
162
}

163
164
165
166
167
void vp9_encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                             int data, int max) {
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

168
169
170
171
172
173
174
175
176
177
178
179
180
181
static void update_mode(
  vp9_writer *w,
  int n,
  vp9_tree tree,
  vp9_prob Pnew[/* n-1 */],
  vp9_prob Pcur[/* n-1 */],
  unsigned int bct[/* n-1 */] [2],
  const unsigned int num_events[/* n */]
) {
  int i = 0;

  vp9_tree_probs_from_distribution(tree, Pnew, bct, num_events, 0);
  n--;

182
183
  for (i = 0; i < n; ++i)
    vp9_cond_prob_diff_update(w, &Pcur[i], bct[i]);
184
185
186
187
188
}

static void update_mbintra_mode_probs(VP9_COMP* const cpi,
                                      vp9_writer* const bc) {
  VP9_COMMON *const cm = &cpi->common;
189
  int j;
190
191
  vp9_prob pnew[INTRA_MODES - 1];
  unsigned int bct[INTRA_MODES - 1][2];
192

193
  for (j = 0; j < BLOCK_SIZE_GROUPS; j++)
194
    update_mode(bc, INTRA_MODES, vp9_intra_mode_tree, pnew,
195
196
                cm->fc.y_mode_prob[j], bct,
                (unsigned int *)cpi->y_mode_count[j]);
197
198
}

199
200
201
static void write_selected_tx_size(const VP9_COMP *cpi, MODE_INFO *m,
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
202
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
203
  const vp9_prob *tx_probs = get_tx_probs2(xd, &cpi->common.fc.tx_probs, m);
204
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
205
  if (bsize >= BLOCK_16X16 && tx_size != TX_4X4) {
206
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
207
    if (bsize >= BLOCK_32X32 && tx_size != TX_8X8)
208
209
210
211
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

212
213
214
static int write_skip_coeff(const VP9_COMP *cpi, int segment_id, MODE_INFO *m,
                            vp9_writer *w) {
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
215
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
216
217
    return 1;
  } else {
Paul Wilkins's avatar
Paul Wilkins committed
218
    const int skip_coeff = m->mbmi.skip_coeff;
219
    vp9_write(w, skip_coeff, vp9_get_pred_prob_mbskip(&cpi->common, xd));
220
221
222
223
    return skip_coeff;
  }
}

224
void vp9_update_skip_probs(VP9_COMP *cpi, vp9_writer *w) {
225
  VP9_COMMON *cm = &cpi->common;
226
227
  int k;

228
  for (k = 0; k < MBSKIP_CONTEXTS; ++k)
229
    vp9_cond_prob_diff_update(w, &cm->fc.mbskip_probs[k], cm->counts.mbskip[k]);
230
231
232
233
234
235
}

static void write_intra_mode(vp9_writer *bc, int m, const vp9_prob *p) {
  write_token(bc, vp9_intra_mode_tree, p, vp9_intra_mode_encodings + m);
}

236
static void update_switchable_interp_probs(VP9_COMP *const cpi,
237
                                           vp9_writer* const bc) {
238
  VP9_COMMON *const cm = &cpi->common;
239
240
241
  unsigned int branch_ct[SWITCHABLE_FILTERS + 1]
                        [SWITCHABLE_FILTERS - 1][2];
  vp9_prob new_prob[SWITCHABLE_FILTERS + 1][SWITCHABLE_FILTERS - 1];
242
  int i, j;
243
  for (j = 0; j <= SWITCHABLE_FILTERS; ++j) {
244
245
246
    vp9_tree_probs_from_distribution(
        vp9_switchable_interp_tree,
        new_prob[j], branch_ct[j],
247
        cm->counts.switchable_interp[j], 0);
248
  }
249
250
  for (j = 0; j <= SWITCHABLE_FILTERS; ++j) {
    for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i) {
251
      vp9_cond_prob_diff_update(bc, &cm->fc.switchable_interp_prob[j][i],
252
                                branch_ct[j][i]);
253
254
    }
  }
255
256
#ifdef MODE_STATS
  if (!cpi->dummy_packing)
257
    update_switchable_interp_stats(cm);
258
#endif
259
260
}

261
static void update_inter_mode_probs(VP9_COMMON *cm, vp9_writer* const bc) {
262
263
  int i, j;

264
  for (i = 0; i < INTER_MODE_CONTEXTS; ++i) {
265
266
    unsigned int branch_ct[INTER_MODES - 1][2];
    vp9_prob new_prob[INTER_MODES - 1];
267
268
269

    vp9_tree_probs_from_distribution(vp9_inter_mode_tree,
                                     new_prob, branch_ct,
270
                                     cm->counts.inter_mode[i], NEARESTMV);
271

272
    for (j = 0; j < INTER_MODES - 1; ++j)
273
      vp9_cond_prob_diff_update(bc, &cm->fc.inter_mode_probs[i][j],
274
                                branch_ct[j]);
275
276
277
  }
}

278
static void pack_mb_tokens(vp9_writer* const bc,
279
280
281
                           TOKENEXTRA **tp,
                           const TOKENEXTRA *const stop) {
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
282

283
  while (p < stop && p->token != EOSB_TOKEN) {
284
    const int t = p->token;
285
    const struct vp9_token *const a = vp9_coef_encodings + t;
286
    const vp9_extra_bit *const b = vp9_extra_bits + t;
John Koleszar's avatar
John Koleszar committed
287
    int i = 0;
288
    const vp9_prob *pp;
John Koleszar's avatar
John Koleszar committed
289
    int v = a->value;
290
    int n = a->len;
291
    vp9_prob probs[ENTROPY_NODES];
John Koleszar's avatar
John Koleszar committed
292

293
    if (t >= TWO_TOKEN) {
294
      vp9_model_to_full_probs(p->context_tree, probs);
295
296
297
298
      pp = probs;
    } else {
      pp = p->context_tree;
    }
299
    assert(pp != 0);
300

John Koleszar's avatar
John Koleszar committed
301
302
303
304
305
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
306

John Koleszar's avatar
John Koleszar committed
307
308
    do {
      const int bb = (v >> --n) & 1;
309
      vp9_write(bc, bb, pp[i >> 1]);
310
      i = vp9_coef_tree[i + bb];
311
    } while (n);
John Koleszar's avatar
John Koleszar committed
312

John Koleszar's avatar
John Koleszar committed
313
    if (b->base_val) {
314
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
315

316
      if (l) {
317
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
318
        int v = e >> 1;
319
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
320
        int i = 0;
John Koleszar's avatar
John Koleszar committed
321

John Koleszar's avatar
John Koleszar committed
322
323
        do {
          const int bb = (v >> --n) & 1;
324
          vp9_write(bc, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
325
326
327
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
328

329
      vp9_write_bit(bc, e & 1);
John Koleszar's avatar
John Koleszar committed
330
    }
John Koleszar's avatar
John Koleszar committed
331
332
333
    ++p;
  }

334
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
335
336
}

337
static void write_sb_mv_ref(vp9_writer *w, MB_PREDICTION_MODE mode,
338
                            const vp9_prob *p) {
339
  assert(is_inter_mode(mode));
340
  write_token(w, vp9_inter_mode_tree, p,
341
              &vp9_inter_mode_encodings[inter_mode_offset(mode)]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
342
343
}

344

345
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
346
                             int segment_id) {
347
348
  if (seg->enabled && seg->update_map)
    treed_write(w, vp9_segment_tree, seg->tree_probs, segment_id, 3);
John Koleszar's avatar
John Koleszar committed
349
350
}

Paul Wilkins's avatar
Paul Wilkins committed
351
// This function encodes the reference frame
Ronald S. Bultje's avatar
Ronald S. Bultje committed
352
static void encode_ref_frame(VP9_COMP *cpi, vp9_writer *bc) {
353
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
354
355
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
356
  MB_MODE_INFO *mi = &xd->mi_8x8[0]->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
357
  const int segment_id = mi->segment_id;
358
  int seg_ref_active = vp9_segfeature_active(&cm->seg, segment_id,
Ronald S. Bultje's avatar
Ronald S. Bultje committed
359
                                             SEG_LVL_REF_FRAME);
John Koleszar's avatar
John Koleszar committed
360
361
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
362
  if (!seg_ref_active) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
363
364
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
365
    if (cm->comp_pred_mode == HYBRID_PREDICTION) {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
366
      vp9_write(bc, mi->ref_frame[1] > INTRA_FRAME,
367
                vp9_get_pred_prob_comp_inter_inter(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
368
369
    } else {
      assert((mi->ref_frame[1] <= INTRA_FRAME) ==
370
                 (cm->comp_pred_mode == SINGLE_PREDICTION_ONLY));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
371
    }
372

Ronald S. Bultje's avatar
Ronald S. Bultje committed
373
374
    if (mi->ref_frame[1] > INTRA_FRAME) {
      vp9_write(bc, mi->ref_frame[0] == GOLDEN_FRAME,
375
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
376
377
    } else {
      vp9_write(bc, mi->ref_frame[0] != LAST_FRAME,
378
                vp9_get_pred_prob_single_ref_p1(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
379
380
      if (mi->ref_frame[0] != LAST_FRAME)
        vp9_write(bc, mi->ref_frame[0] != GOLDEN_FRAME,
381
                  vp9_get_pred_prob_single_ref_p2(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
382
    }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
383
384
  } else {
    assert(mi->ref_frame[1] <= INTRA_FRAME);
385
    assert(vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME) ==
386
           mi->ref_frame[0]);
John Koleszar's avatar
John Koleszar committed
387
  }
Paul Wilkins's avatar
Paul Wilkins committed
388

Jingning Han's avatar
Jingning Han committed
389
390
  // If using the prediction model we have nothing further to do because
  // the reference frame is fully coded by the segment.
Paul Wilkins's avatar
Paul Wilkins committed
391
}
John Koleszar's avatar
John Koleszar committed
392

393
static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) {
394
395
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
396
397
  MACROBLOCK *const x = &cpi->mb;
  MACROBLOCKD *const xd = &x->e_mbd;
398
  struct segmentation *seg = &cm->seg;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
399
  MB_MODE_INFO *const mi = &m->mbmi;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
400
  const MV_REFERENCE_FRAME rf = mi->ref_frame[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
401
402
403
  const MB_PREDICTION_MODE mode = mi->mode;
  const int segment_id = mi->segment_id;
  int skip_coeff;
404
  const BLOCK_SIZE bsize = mi->sb_type;
405
  const int allow_hp = xd->allow_high_precision_mv;
Adrian Grange's avatar
Adrian Grange committed
406

Ronald S. Bultje's avatar
Ronald S. Bultje committed
407
408
#ifdef ENTROPY_STATS
  active_section = 9;
409
#endif
410

411
412
  if (seg->update_map) {
    if (seg->temporal_update) {
Scott LaVarnway's avatar
Scott LaVarnway committed
413
      const int pred_flag = mi->seg_id_predicted;
414
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
415
416
417
      vp9_write(bc, pred_flag, pred_prob);
      if (!pred_flag)
        write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
418
    } else {
419
      write_segment_id(bc, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
420
421
    }
  }
422

423
  skip_coeff = write_skip_coeff(cpi, segment_id, m, bc);
John Koleszar's avatar
John Koleszar committed
424

425
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
426
    vp9_write(bc, rf != INTRA_FRAME,
427
              vp9_get_pred_prob_intra_inter(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
428

429
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
430
      !(rf != INTRA_FRAME &&
431
        (skip_coeff || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
432
    write_selected_tx_size(cpi, m, mi->tx_size, bsize, bc);
433
434
  }

Ronald S. Bultje's avatar
Ronald S. Bultje committed
435
  if (rf == INTRA_FRAME) {
436
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
437
    active_section = 6;
438
#endif
Paul Wilkins's avatar
Paul Wilkins committed
439

440
    if (bsize >= BLOCK_8X8) {
441
      write_intra_mode(bc, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
442
    } else {
443
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
444
445
446
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
Jim Bankoski's avatar
Jim Bankoski committed
447
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
448
          const MB_PREDICTION_MODE bm = m->bmi[idy * 2 + idx].as_mode;
449
          write_intra_mode(bc, bm, cm->fc.y_mode_prob[0]);
450
        }
Jim Bankoski's avatar
Jim Bankoski committed
451
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
452
    }
453
    write_intra_mode(bc, mi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
454
  } else {
455
    vp9_prob *mv_ref_p;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
456
    encode_ref_frame(cpi, bc);
Paul Wilkins's avatar
Paul Wilkins committed
457
    mv_ref_p = cpi->common.fc.inter_mode_probs[mi->mode_context[rf]];
Yaowu Xu's avatar
Yaowu Xu committed
458

John Koleszar's avatar
John Koleszar committed
459
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
460
    active_section = 3;
John Koleszar's avatar
John Koleszar committed
461
462
#endif

463
    // If segment skip is not enabled code the mode.
464
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
465
      if (bsize >= BLOCK_8X8) {
466
        write_sb_mv_ref(bc, mode, mv_ref_p);
467
        ++cm->counts.inter_mode[mi->mode_context[rf]]
468
                               [inter_mode_offset(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
469
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
470
    }
471

472
    if (cm->mcomp_filter_type == SWITCHABLE) {
473
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
474
      write_token(bc, vp9_switchable_interp_tree,
475
                  cm->fc.switchable_interp_prob[ctx],
476
                  &vp9_switchable_interp_encodings[mi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
477
    } else {
478
      assert(mi->interp_filter == cm->mcomp_filter_type);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
479
    }
480

481
    if (bsize < BLOCK_8X8) {
Jim Bankoski's avatar
Jim Bankoski committed
482
483
      const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
484
      int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
485
486
      for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
        for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
487
          const int j = idy * 2 + idx;
488
          const MB_PREDICTION_MODE blockmode = m->bmi[j].as_mode;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
489
          write_sb_mv_ref(bc, blockmode, mv_ref_p);
490
          ++cm->counts.inter_mode[mi->mode_context[rf]]
491
492
                                 [inter_mode_offset(blockmode)];

Ronald S. Bultje's avatar
Ronald S. Bultje committed
493
          if (blockmode == NEWMV) {
494
#ifdef ENTROPY_STATS
Ronald S. Bultje's avatar
Ronald S. Bultje committed
495
            active_section = 11;
496
#endif
497
498
499
500
501
502
            vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[0].as_mv,
                          &mi->best_mv[0].as_mv, nmvc, allow_hp);

            if (has_second_ref(mi))
              vp9_encode_mv(cpi, bc, &m->bmi[j].as_mv[1].as_mv,
                            &mi->best_mv[1].as_mv, nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
503
          }
504
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
505
506
507
508
509
      }
    } else if (mode == NEWMV) {
#ifdef ENTROPY_STATS
      active_section = 5;
#endif
510
511
      vp9_encode_mv(cpi, bc, &mi->mv[0].as_mv,
                    &mi->best_mv[0].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
512

513
514
515
      if (has_second_ref(mi))
        vp9_encode_mv(cpi, bc, &mi->mv[1].as_mv,
                      &mi->best_mv[1].as_mv, nmvc, allow_hp);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
516
517
    }
  }
John Koleszar's avatar
John Koleszar committed
518
}
519

520
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
521
                              vp9_writer *bc) {
522
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
523
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
524
  const struct segmentation *const seg = &cm->seg;
525
  MODE_INFO *m = mi_8x8[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
526
527
  const int ym = m->mbmi.mode;
  const int segment_id = m->mbmi.segment_id;
528
529
  MODE_INFO *above_mi = mi_8x8[-xd->mode_info_stride];
  MODE_INFO *left_mi = mi_8x8[-1];
530

531
532
  if (seg->update_map)
    write_segment_id(bc, seg, m->mbmi.segment_id);
533

534
  write_skip_coeff(cpi, segment_id, m, bc);
535

536
  if (m->mbmi.sb_type >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
537
    write_selected_tx_size(cpi, m, m->mbmi.tx_size, m->mbmi.sb_type, bc);
538

539
  if (m->mbmi.sb_type >= BLOCK_8X8) {
540
    const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, 0);
Yaowu Xu's avatar
Yaowu Xu committed
541
    const MB_PREDICTION_MODE L = xd->left_available ?
542
                                 left_block_mode(m, left_mi, 0) : DC_PRED;
543
    write_intra_mode(bc, ym, vp9_kf_y_mode_prob[A][L]);
544
  } else {
545
    int idx, idy;
Jim Bankoski's avatar
Jim Bankoski committed
546
547
    const int num_4x4_blocks_wide = num_4x4_blocks_wide_lookup[m->mbmi.sb_type];
    const int num_4x4_blocks_high = num_4x4_blocks_high_lookup[m->mbmi.sb_type];
Jim Bankoski's avatar
Jim Bankoski committed
548
549
    for (idy = 0; idy < 2; idy += num_4x4_blocks_high) {
      for (idx = 0; idx < 2; idx += num_4x4_blocks_wide) {
550
551
        int i = idy * 2 + idx;
        const MB_PREDICTION_MODE A = above_block_mode(m, above_mi, i);
Yaowu Xu's avatar
Yaowu Xu committed
552
        const MB_PREDICTION_MODE L = (xd->left_available || idx) ?
553
                                     left_block_mode(m, left_mi, i) : DC_PRED;
554
        const int bm = m->bmi[i].as_mode;
555
556
557
#ifdef ENTROPY_STATS
        ++intra_mode_stats[A][L][bm];
#endif
558
        write_intra_mode(bc, bm, vp9_kf_y_mode_prob[A][L]);
559
560
      }
    }
561
562
  }

563
  write_intra_mode(bc, m->mbmi.uv_mode, vp9_kf_uv_mode_prob[ym]);
564
565
}

566
static void write_modes_b(VP9_COMP *cpi, MODE_INFO **mi_8x8, vp9_writer *bc,
Ronald S. Bultje's avatar
Ronald S. Bultje committed
567
                          TOKENEXTRA **tok, TOKENEXTRA *tok_end,
568
                          int mi_row, int mi_col, int index) {
569
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
570
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
571
  MODE_INFO *m = mi_8x8[0];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
572

573
  if (m->mbmi.sb_type < BLOCK_8X8)
574
    if (index > 0)
575
      return;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
576

577
578
  xd->mi_8x8 = mi_8x8;

Dmitry Kovalev's avatar
Dmitry Kovalev committed
579
580
581
  set_mi_row_col(&cpi->common, xd,
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type]);
582
  if (frame_is_intra_only(cm)) {
583
    write_mb_modes_kf(cpi, mi_8x8, bc);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
584
585
586
587
#ifdef ENTROPY_STATS
    active_section = 8;
#endif
  } else {
588
    pack_inter_mode_mvs(cpi, m, bc);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
589
590
591
592
593
594
595
596
597
#ifdef ENTROPY_STATS
    active_section = 1;
#endif
  }

  assert(*tok < tok_end);
  pack_mb_tokens(bc, tok, tok_end);
}

598
static void write_modes_sb(VP9_COMP *cpi, MODE_INFO **mi_8x8, vp9_writer *bc,
599
                           TOKENEXTRA **tok, TOKENEXTRA *tok_end,
600
601
                           int mi_row, int mi_col, BLOCK_SIZE bsize,
                           int index) {
602
  VP9_COMMON *const cm = &cpi->common;
603
  MACROBLOCKD *xd = &cpi->mb.e_mbd;
604
  const int mis = cm->mode_info_stride;
605
606
  int bsl = b_width_log2(bsize);
  int bs = (1 << bsl) / 4;  // mode_info step for subsize
607
  int n;
608
  PARTITION_TYPE partition = PARTITION_NONE;
609
  BLOCK_SIZE subsize;
610
  MODE_INFO *m = mi_8x8[0];
611

612
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
613
614
    return;

Jim Bankoski's avatar
Jim Bankoski committed
615
  partition = partition_lookup[bsl][m->mbmi.sb_type];
616

617
618
  if (bsize < BLOCK_8X8) {
    if (index > 0)
619
      return;
620
  } else {
621
    int pl;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
622
623
    const int idx = check_bsize_coverage(bs, cm->mi_rows, cm->mi_cols,
                                         mi_row, mi_col);
624
    set_partition_seg_context(cm, xd, mi_row, mi_col);
625
    pl = partition_plane_context(xd, bsize);
626
    // encode the partition information
627
628
629
630
631
632
633
    if (idx == 0)
      write_token(bc, vp9_partition_tree,
                  cm->fc.partition_prob[cm->frame_type][pl],
                  vp9_partition_encodings + partition);
    else if (idx > 0)
      vp9_write(bc, partition == PARTITION_SPLIT,
                cm->fc.partition_prob[cm->frame_type][pl][idx]);
634
  }
635

636
637
  subsize = get_subsize(bsize, partition);

638
639
  switch (partition) {
    case PARTITION_NONE:
640
      write_modes_b(cpi, mi_8x8, bc, tok, tok_end, mi_row, mi_col, 0);
641
642
      break;
    case PARTITION_HORZ:
643
      write_modes_b(cpi, mi_8x8, bc, tok, tok_end, mi_row, mi_col, 0);
644
      if ((mi_row + bs) < cm->mi_rows)
645
        write_modes_b(cpi, mi_8x8 + bs * mis, bc, tok, tok_end, mi_row + bs,
646
                      mi_col, 1);
647
648
      break;
    case PARTITION_VERT:
649
      write_modes_b(cpi, mi_8x8, bc, tok, tok_end, mi_row, mi_col, 0);
650
      if ((mi_col + bs) < cm->mi_cols)
651
652
        write_modes_b(cpi, mi_8x8 + bs, bc, tok, tok_end, mi_row, mi_col + bs,
                      1);
653
654
655
      break;
    case PARTITION_SPLIT:
      for (n = 0; n < 4; n++) {
656
        const int j = n >> 1, i = n & 1;
657
        write_modes_sb(cpi, mi_8x8 + j * bs * mis + i * bs, bc, tok, tok_end,
658
                       mi_row + j * bs, mi_col + i * bs, subsize, n);
659
660
661
662
663
      }
      break;
    default:
      assert(0);
  }
664
665

  // update partition context
666
667
  if (bsize >= BLOCK_8X8 &&
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT)) {
668
669
670
    set_partition_seg_context(cm, xd, mi_row, mi_col);
    update_partition_context(xd, subsize, bsize);
  }
671
672
}

673
static void write_modes(VP9_COMP *cpi, vp9_writer* const bc,
Ronald S. Bultje's avatar
Ronald S. Bultje committed
674
                        TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
675
676
  VP9_COMMON *const cm = &cpi->common;
  const int mis = cm->mode_info_stride;
677
  int mi_row, mi_col;
678
679
  MODE_INFO **mi_8x8 = cm->mi_grid_visible;
  MODE_INFO **m_8x8;
John Koleszar's avatar
John Koleszar committed
680

681
  mi_8x8 += cm->cur_tile_mi_col_start + cm->cur_tile_mi_row_start * mis;
682

683
  for (mi_row = cm->cur_tile_mi_row_start; mi_row < cm->cur_tile_mi_row_end;
684
685
       mi_row += 8, mi_8x8 += 8 * mis) {
    m_8x8 = mi_8x8;
686
687
    vp9_zero(cm->left_seg_context);
    for (mi_col = cm->cur_tile_mi_col_start; mi_col < cm->cur_tile_mi_col_end;
688
689
         mi_col += MI_BLOCK_SIZE, m_8x8 += MI_BLOCK_SIZE) {
      write_modes_sb(cpi, m_8x8, bc, tok, tok_end, mi_row, mi_col,
690
                     BLOCK_64X64, 0);
691
    }
John Koleszar's avatar
John Koleszar committed
692
  }
John Koleszar's avatar
John Koleszar committed
693
}
694

695
696
697
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
698
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][PREV_COEF_CONTEXTS] =
699
700
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
701
  vp9_prob full_probs[ENTROPY_NODES];
702
  int i, j, k, l;
703

704
  for (i = 0; i < BLOCK_TYPES; ++i) {
705
706
707
708
709
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
        for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
          if (l >= 3 && k == 0)
            continue;
710
711
          vp9_tree_probs_from_distribution(vp9_coef_tree,
                                           full_probs,
712
                                           coef_branch_ct[i][j][k][l],
713
714
715
                                           coef_counts[i][j][k][l], 0);
          vpx_memcpy(coef_probs[i][j][k][l], full_probs,
                     sizeof(vp9_prob) * UNCONSTRAINED_NODES);
716
717
718
719
720
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
          coef_probs[i][j][k][l][0] =
              get_binary_prob(coef_branch_ct[i][j][k][l][0][0],
                              coef_branch_ct[i][j][k][l][0][1]);
721
#ifdef ENTROPY_STATS
722
          if (!cpi->dummy_packing) {
723
            int t;
724
            for (t = 0; t < MAX_ENTROPY_TOKENS; ++t)
725
              context_counters[tx_size][i][j][k][l][t] +=
726
                  coef_counts[i][j][k][l][t];
727
            context_counters[tx_size][i][j][k][l][MAX_ENTROPY_TOKENS] +=
728
729
                eob_branch_ct[i][j][k][l];
          }
John Koleszar's avatar
John Koleszar committed
730
#endif
731
        }
Daniel Kang's avatar
Daniel Kang committed
732
733
734
      }
    }
  }
735
736
737
}

static void build_coeff_contexts(VP9_COMP *cpi) {
738
739
740
  TX_SIZE t;
  for (t = TX_4X4; t <= TX_32X32; t++)
    build_tree_distribution(cpi, t);
John Koleszar's avatar
John Koleszar committed
741
742
}

743
744
745
746
747
748
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
749
  const vp9_prob upd = DIFF_UPDATE_PROB;
750
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;

                if (l >= 3 && k == 0)
                  continue;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
788

789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
      for (i = 0; i < BLOCK_TYPES; ++i) {
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
            for (l = 0; l < PREV_COEF_CONTEXTS; ++l) {
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
804
                const vp9_prob upd = DIFF_UPDATE_PROB;
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
                int s;
                int u = 0;
                if (l >= 3 && k == 0)
                  continue;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
                      old_frame_coef_probs[i][j][k][l], &newp, upd, i, j);
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
#ifdef ENTROPY_STATS
                if (!cpi->dummy_packing)
                  ++tree_update_hist[tx_size][i][j][k][l][t][u];
#endif
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
831
          }