vp9_bitstream.c 41.7 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5
6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9
10
 */

11
12
13
#include <assert.h>
#include <stdio.h>
#include <limits.h>
John Koleszar's avatar
John Koleszar committed
14

15
16
#include "vpx/vpx_encoder.h"
#include "vpx_mem/vpx_mem.h"
17
#include "vpx_ports/mem_ops.h"
18

19
#include "vp9/common/vp9_entropy.h"
20
#include "vp9/common/vp9_entropymode.h"
21
#include "vp9/common/vp9_entropymv.h"
22
#include "vp9/common/vp9_mvref_common.h"
23
#include "vp9/common/vp9_pragmas.h"
24
25
26
27
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_systemdependent.h"
#include "vp9/common/vp9_tile_common.h"
28

Dmitry Kovalev's avatar
Dmitry Kovalev committed
29
#include "vp9/encoder/vp9_cost.h"
30
#include "vp9/encoder/vp9_bitstream.h"
31
32
#include "vp9/encoder/vp9_encodemv.h"
#include "vp9/encoder/vp9_mcomp.h"
33
#include "vp9/encoder/vp9_segmentation.h"
34
#include "vp9/encoder/vp9_subexp.h"
35
#include "vp9/encoder/vp9_tokenize.h"
36
37
#include "vp9/encoder/vp9_write_bit_buffer.h"

38
39
40
41
42
43
44
45
46
47
48
49
50
51
static struct vp9_token intra_mode_encodings[INTRA_MODES];
static struct vp9_token switchable_interp_encodings[SWITCHABLE_FILTERS];
static struct vp9_token partition_encodings[PARTITION_TYPES];
static struct vp9_token inter_mode_encodings[INTER_MODES];

void vp9_entropy_mode_init() {
  vp9_tokens_from_tree(intra_mode_encodings, vp9_intra_mode_tree);
  vp9_tokens_from_tree(switchable_interp_encodings, vp9_switchable_interp_tree);
  vp9_tokens_from_tree(partition_encodings, vp9_partition_tree);
  vp9_tokens_from_tree(inter_mode_encodings, vp9_inter_mode_tree);
}

static void write_intra_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
52
  vp9_write_token(w, vp9_intra_mode_tree, probs, &intra_mode_encodings[mode]);
53
54
55
56
57
}

static void write_inter_mode(vp9_writer *w, MB_PREDICTION_MODE mode,
                             const vp9_prob *probs) {
  assert(is_inter_mode(mode));
58
59
  vp9_write_token(w, vp9_inter_mode_tree, probs,
                  &inter_mode_encodings[INTER_OFFSET(mode)]);
60
61
}

62
63
static void encode_unsigned_max(struct vp9_write_bit_buffer *wb,
                                int data, int max) {
64
65
66
  vp9_wb_write_literal(wb, data, get_unsigned_bits(max));
}

67
68
69
70
71
72
static void prob_diff_update(const vp9_tree_index *tree,
                             vp9_prob probs[/*n - 1*/],
                             const unsigned int counts[/*n - 1*/],
                             int n, vp9_writer *w) {
  int i;
  unsigned int branch_ct[32][2];
73
74

  // Assuming max number of probabilities <= 32
75
  assert(n <= 32);
76

77
  vp9_tree_probs_from_distribution(tree, branch_ct, counts);
78
  for (i = 0; i < n - 1; ++i)
79
    vp9_cond_prob_diff_update(w, &probs[i], branch_ct[i]);
80
81
}

82
static void write_selected_tx_size(const VP9_COMP *cpi,
83
84
                                   TX_SIZE tx_size, BLOCK_SIZE bsize,
                                   vp9_writer *w) {
85
  const TX_SIZE max_tx_size = max_txsize_lookup[bsize];
86
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
87
88
  const vp9_prob *const tx_probs = get_tx_probs2(max_tx_size, xd,
                                                 &cpi->common.fc.tx_probs);
89
  vp9_write(w, tx_size != TX_4X4, tx_probs[0]);
90
  if (tx_size != TX_4X4 && max_tx_size >= TX_16X16) {
91
    vp9_write(w, tx_size != TX_8X8, tx_probs[1]);
92
    if (tx_size != TX_8X8 && max_tx_size >= TX_32X32)
93
94
95
96
      vp9_write(w, tx_size != TX_16X16, tx_probs[2]);
  }
}

97
static int write_skip(const VP9_COMP *cpi, int segment_id, const MODE_INFO *mi,
98
                      vp9_writer *w) {
99
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
100
  if (vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) {
101
102
    return 1;
  } else {
103
    const int skip = mi->mbmi.skip;
104
105
    vp9_write(w, skip, vp9_get_skip_prob(&cpi->common, xd));
    return skip;
106
107
108
  }
}

109
static void update_skip_probs(VP9_COMMON *cm, vp9_writer *w) {
110
111
  int k;

Dmitry Kovalev's avatar
Dmitry Kovalev committed
112
113
  for (k = 0; k < SKIP_CONTEXTS; ++k)
    vp9_cond_prob_diff_update(w, &cm->fc.skip_probs[k], cm->counts.skip[k]);
114
115
}

116
static void update_switchable_interp_probs(VP9_COMMON *cm, vp9_writer *w) {
117
118
119
120
121
  int j;
  for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j)
    prob_diff_update(vp9_switchable_interp_tree,
                     cm->fc.switchable_interp_prob[j],
                     cm->counts.switchable_interp[j], SWITCHABLE_FILTERS, w);
122
123
}

124
125
static void pack_mb_tokens(vp9_writer *w,
                           TOKENEXTRA **tp, const TOKENEXTRA *stop) {
126
  TOKENEXTRA *p = *tp;
John Koleszar's avatar
John Koleszar committed
127

128
  while (p < stop && p->token != EOSB_TOKEN) {
129
    const int t = p->token;
130
131
    const struct vp9_token *const a = &vp9_coef_encodings[t];
    const vp9_extra_bit *const b = &vp9_extra_bits[t];
John Koleszar's avatar
John Koleszar committed
132
133
    int i = 0;
    int v = a->value;
134
    int n = a->len;
135

John Koleszar's avatar
John Koleszar committed
136
137
138
139
140
    /* skip one or two nodes */
    if (p->skip_eob_node) {
      n -= p->skip_eob_node;
      i = 2 * p->skip_eob_node;
    }
John Koleszar's avatar
John Koleszar committed
141

142
143
144
145
146
147
148
149
    // TODO(jbb): expanding this can lead to big gains.  It allows
    // much better branch prediction and would enable us to avoid numerous
    // lookups and compares.

    // If we have a token that's in the constrained set, the coefficient tree
    // is split into two treed writes.  The first treed write takes care of the
    // unconstrained nodes.  The second treed write takes care of the
    // constrained nodes.
150
    if (t >= TWO_TOKEN && t < EOB_TOKEN) {
151
152
      int len = UNCONSTRAINED_NODES - p->skip_eob_node;
      int bits = v >> (n - len);
153
154
155
156
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, bits, len, i);
      vp9_write_tree(w, vp9_coef_con_tree,
                     vp9_pareto8_full[p->context_tree[PIVOT_NODE] - 1],
                     v, n - len, 0);
157
    } else {
158
      vp9_write_tree(w, vp9_coef_tree, p->context_tree, v, n, i);
159
    }
John Koleszar's avatar
John Koleszar committed
160

John Koleszar's avatar
John Koleszar committed
161
    if (b->base_val) {
162
      const int e = p->extra, l = b->len;
John Koleszar's avatar
John Koleszar committed
163

164
      if (l) {
165
        const unsigned char *pb = b->prob;
John Koleszar's avatar
John Koleszar committed
166
        int v = e >> 1;
167
        int n = l;              /* number of bits in v, assumed nonzero */
John Koleszar's avatar
John Koleszar committed
168
        int i = 0;
John Koleszar's avatar
John Koleszar committed
169

John Koleszar's avatar
John Koleszar committed
170
171
        do {
          const int bb = (v >> --n) & 1;
172
          vp9_write(w, bb, pb[i >> 1]);
John Koleszar's avatar
John Koleszar committed
173
174
175
          i = b->tree[i + bb];
        } while (n);
      }
John Koleszar's avatar
John Koleszar committed
176

177
      vp9_write_bit(w, e & 1);
John Koleszar's avatar
John Koleszar committed
178
    }
John Koleszar's avatar
John Koleszar committed
179
180
181
    ++p;
  }

182
  *tp = p + (p->token == EOSB_TOKEN);
John Koleszar's avatar
John Koleszar committed
183
184
}

185
static void write_segment_id(vp9_writer *w, const struct segmentation *seg,
186
                             int segment_id) {
187
  if (seg->enabled && seg->update_map)
188
    vp9_write_tree(w, vp9_segment_tree, seg->tree_probs, segment_id, 3, 0);
John Koleszar's avatar
John Koleszar committed
189
190
}

Paul Wilkins's avatar
Paul Wilkins committed
191
// This function encodes the reference frame
192
193
194
195
196
197
198
static void write_ref_frames(const VP9_COMP *cpi, vp9_writer *w) {
  const VP9_COMMON *const cm = &cpi->common;
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
  const MB_MODE_INFO *const mbmi = &xd->mi_8x8[0]->mbmi;
  const int is_compound = has_second_ref(mbmi);
  const int segment_id = mbmi->segment_id;

John Koleszar's avatar
John Koleszar committed
199
200
  // If segment level coding of this signal is disabled...
  // or the segment allows multiple reference frame options
201
202
203
204
205
  if (vp9_segfeature_active(&cm->seg, segment_id, SEG_LVL_REF_FRAME)) {
    assert(!is_compound);
    assert(mbmi->ref_frame[0] ==
               vp9_get_segdata(&cm->seg, segment_id, SEG_LVL_REF_FRAME));
  } else {
Ronald S. Bultje's avatar
Ronald S. Bultje committed
206
207
    // does the feature use compound prediction or not
    // (if not specified at the frame/segment level)
208
    if (cm->reference_mode == REFERENCE_MODE_SELECT) {
209
      vp9_write(w, is_compound, vp9_get_reference_mode_prob(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
210
    } else {
211
      assert(!is_compound == (cm->reference_mode == SINGLE_REFERENCE));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
212
    }
213

214
215
    if (is_compound) {
      vp9_write(w, mbmi->ref_frame[0] == GOLDEN_FRAME,
216
                vp9_get_pred_prob_comp_ref_p(cm, xd));
Ronald S. Bultje's avatar
Ronald S. Bultje committed
217
    } else {
218
219
220
221
222
223
      const int bit0 = mbmi->ref_frame[0] != LAST_FRAME;
      vp9_write(w, bit0, vp9_get_pred_prob_single_ref_p1(cm, xd));
      if (bit0) {
        const int bit1 = mbmi->ref_frame[0] != GOLDEN_FRAME;
        vp9_write(w, bit1, vp9_get_pred_prob_single_ref_p2(cm, xd));
      }
Paul Wilkins's avatar
Paul Wilkins committed
224
    }
John Koleszar's avatar
John Koleszar committed
225
  }
Paul Wilkins's avatar
Paul Wilkins committed
226
}
John Koleszar's avatar
John Koleszar committed
227

228
229
static void pack_inter_mode_mvs(VP9_COMP *cpi, const MODE_INFO *mi,
                                vp9_writer *w) {
230
231
  VP9_COMMON *const cm = &cpi->common;
  const nmv_context *nmvc = &cm->fc.nmvc;
232
233
  const MACROBLOCK *const x = &cpi->mb;
  const MACROBLOCKD *const xd = &x->e_mbd;
234
  const struct segmentation *const seg = &cm->seg;
235
236
237
238
  const MB_MODE_INFO *const mbmi = &mi->mbmi;
  const MB_PREDICTION_MODE mode = mbmi->mode;
  const int segment_id = mbmi->segment_id;
  const BLOCK_SIZE bsize = mbmi->sb_type;
239
  const int allow_hp = cm->allow_high_precision_mv;
240
241
242
  const int is_inter = is_inter_block(mbmi);
  const int is_compound = has_second_ref(mbmi);
  int skip, ref;
Adrian Grange's avatar
Adrian Grange committed
243

244
245
  if (seg->update_map) {
    if (seg->temporal_update) {
246
      const int pred_flag = mbmi->seg_id_predicted;
247
      vp9_prob pred_prob = vp9_get_pred_prob_seg_id(seg, xd);
248
      vp9_write(w, pred_flag, pred_prob);
249
      if (!pred_flag)
250
        write_segment_id(w, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
251
    } else {
252
      write_segment_id(w, seg, segment_id);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
253
254
    }
  }
255

256
  skip = write_skip(cpi, segment_id, mi, w);
John Koleszar's avatar
John Koleszar committed
257

258
  if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_REF_FRAME))
259
    vp9_write(w, is_inter, vp9_get_intra_inter_prob(cm, xd));
Paul Wilkins's avatar
Paul Wilkins committed
260

261
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT &&
262
      !(is_inter &&
263
        (skip || vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)))) {
264
    write_selected_tx_size(cpi, mbmi->tx_size, bsize, w);
265
266
  }

267
  if (!is_inter) {
268
    if (bsize >= BLOCK_8X8) {
269
      write_intra_mode(w, mode, cm->fc.y_mode_prob[size_group_lookup[bsize]]);
270
    } else {
271
      int idx, idy;
272
273
274
275
276
277
      const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
      for (idy = 0; idy < 2; idy += num_4x4_h) {
        for (idx = 0; idx < 2; idx += num_4x4_w) {
          const MB_PREDICTION_MODE b_mode = mi->bmi[idy * 2 + idx].as_mode;
          write_intra_mode(w, b_mode, cm->fc.y_mode_prob[0]);
278
        }
Jim Bankoski's avatar
Jim Bankoski committed
279
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
280
    }
281
    write_intra_mode(w, mbmi->uv_mode, cm->fc.uv_mode_prob[mode]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
282
  } else {
283
284
285
    const int mode_ctx = mbmi->mode_context[mbmi->ref_frame[0]];
    const vp9_prob *const inter_probs = cm->fc.inter_mode_probs[mode_ctx];
    write_ref_frames(cpi, w);
Yaowu Xu's avatar
Yaowu Xu committed
286

287
    // If segment skip is not enabled code the mode.
288
    if (!vp9_segfeature_active(seg, segment_id, SEG_LVL_SKIP)) {
289
      if (bsize >= BLOCK_8X8) {
290
291
        write_inter_mode(w, mode, inter_probs);
        ++cm->counts.inter_mode[mode_ctx][INTER_OFFSET(mode)];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
292
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
293
    }
294

295
    if (cm->interp_filter == SWITCHABLE) {
296
      const int ctx = vp9_get_pred_context_switchable_interp(xd);
297
      vp9_write_token(w, vp9_switchable_interp_tree,
298
                      cm->fc.switchable_interp_prob[ctx],
299
                      &switchable_interp_encodings[mbmi->interp_filter]);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
300
    } else {
301
      assert(mbmi->interp_filter == cm->interp_filter);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
302
    }
303

304
    if (bsize < BLOCK_8X8) {
305
306
      const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
      const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
Ronald S. Bultje's avatar
Ronald S. Bultje committed
307
      int idx, idy;
308
309
      for (idy = 0; idy < 2; idy += num_4x4_h) {
        for (idx = 0; idx < 2; idx += num_4x4_w) {
310
          const int j = idy * 2 + idx;
311
312
313
          const MB_PREDICTION_MODE b_mode = mi->bmi[j].as_mode;
          write_inter_mode(w, b_mode, inter_probs);
          ++cm->counts.inter_mode[mode_ctx][INTER_OFFSET(b_mode)];
314
          if (b_mode == NEWMV) {
315
316
317
318
            for (ref = 0; ref < 1 + is_compound; ++ref)
              vp9_encode_mv(cpi, w, &mi->bmi[j].as_mv[ref].as_mv,
                            &mbmi->ref_mvs[mbmi->ref_frame[ref]][0].as_mv,
                            nmvc, allow_hp);
John Koleszar's avatar
John Koleszar committed
319
          }
320
        }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
321
      }
322
323
324
325
326
327
328
    } else {
      if (mode == NEWMV) {
        for (ref = 0; ref < 1 + is_compound; ++ref)
          vp9_encode_mv(cpi, w, &mbmi->mv[ref].as_mv,
                        &mbmi->ref_mvs[mbmi->ref_frame[ref]][0].as_mv, nmvc,
                        allow_hp);
      }
Ronald S. Bultje's avatar
Ronald S. Bultje committed
329
330
    }
  }
John Koleszar's avatar
John Koleszar committed
331
}
332

333
static void write_mb_modes_kf(const VP9_COMP *cpi, MODE_INFO **mi_8x8,
334
                              vp9_writer *w) {
335
  const VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
336
  const MACROBLOCKD *const xd = &cpi->mb.e_mbd;
337
  const struct segmentation *const seg = &cm->seg;
338
339
340
341
342
  const MODE_INFO *const mi = mi_8x8[0];
  const MODE_INFO *const above_mi = mi_8x8[-xd->mode_info_stride];
  const MODE_INFO *const left_mi = xd->left_available ? mi_8x8[-1] : NULL;
  const MB_MODE_INFO *const mbmi = &mi->mbmi;
  const BLOCK_SIZE bsize = mbmi->sb_type;
343

344
  if (seg->update_map)
345
    write_segment_id(w, seg, mbmi->segment_id);
346

347
  write_skip(cpi, mbmi->segment_id, mi, w);
348

349
350
  if (bsize >= BLOCK_8X8 && cm->tx_mode == TX_MODE_SELECT)
    write_selected_tx_size(cpi, mbmi->tx_size, bsize, w);
351

352
353
  if (bsize >= BLOCK_8X8) {
    write_intra_mode(w, mbmi->mode, get_y_mode_probs(mi, above_mi, left_mi, 0));
354
  } else {
355
356
    const int num_4x4_w = num_4x4_blocks_wide_lookup[bsize];
    const int num_4x4_h = num_4x4_blocks_high_lookup[bsize];
357
    int idx, idy;
358
359
360
361
362
363

    for (idy = 0; idy < 2; idy += num_4x4_h) {
      for (idx = 0; idx < 2; idx += num_4x4_w) {
        const int block = idy * 2 + idx;
        write_intra_mode(w, mi->bmi[block].as_mode,
                         get_y_mode_probs(mi, above_mi, left_mi, block));
364
365
      }
    }
366
367
  }

368
  write_intra_mode(w, mbmi->uv_mode, vp9_kf_uv_mode_prob[mbmi->mode]);
369
370
}

James Zern's avatar
James Zern committed
371
static void write_modes_b(VP9_COMP *cpi, const TileInfo *const tile,
372
373
                          vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                          int mi_row, int mi_col) {
374
  VP9_COMMON *const cm = &cpi->common;
Ronald S. Bultje's avatar
Ronald S. Bultje committed
375
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
376
  MODE_INFO *m;
Dmitry Kovalev's avatar
Dmitry Kovalev committed
377

378
379
  xd->mi_8x8 = cm->mi_grid_visible + (mi_row * cm->mode_info_stride + mi_col);
  m = xd->mi_8x8[0];
380

James Zern's avatar
James Zern committed
381
  set_mi_row_col(xd, tile,
Dmitry Kovalev's avatar
Dmitry Kovalev committed
382
                 mi_row, num_8x8_blocks_high_lookup[m->mbmi.sb_type],
James Zern's avatar
James Zern committed
383
384
                 mi_col, num_8x8_blocks_wide_lookup[m->mbmi.sb_type],
                 cm->mi_rows, cm->mi_cols);
385
  if (frame_is_intra_only(cm)) {
386
    write_mb_modes_kf(cpi, xd->mi_8x8, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
387
  } else {
388
    pack_inter_mode_mvs(cpi, m, w);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
389
390
391
  }

  assert(*tok < tok_end);
392
  pack_mb_tokens(w, tok, tok_end);
Ronald S. Bultje's avatar
Ronald S. Bultje committed
393
394
}

395
396
static void write_partition(VP9_COMMON *cm, MACROBLOCKD *xd,
                            int hbs, int mi_row, int mi_col,
397
                            PARTITION_TYPE p, BLOCK_SIZE bsize, vp9_writer *w) {
398
399
  const int ctx = partition_plane_context(xd->above_seg_context,
                                          xd->left_seg_context,
400
401
402
403
                                          mi_row, mi_col, bsize);
  const vp9_prob *const probs = get_partition_probs(cm, ctx);
  const int has_rows = (mi_row + hbs) < cm->mi_rows;
  const int has_cols = (mi_col + hbs) < cm->mi_cols;
404
405

  if (has_rows && has_cols) {
406
    vp9_write_token(w, vp9_partition_tree, probs, &partition_encodings[p]);
407
  } else if (!has_rows && has_cols) {
408
409
    assert(p == PARTITION_SPLIT || p == PARTITION_HORZ);
    vp9_write(w, p == PARTITION_SPLIT, probs[1]);
410
  } else if (has_rows && !has_cols) {
411
412
    assert(p == PARTITION_SPLIT || p == PARTITION_VERT);
    vp9_write(w, p == PARTITION_SPLIT, probs[2]);
413
  } else {
414
    assert(p == PARTITION_SPLIT);
415
416
417
  }
}

418
419
static void write_modes_sb(VP9_COMP *cpi,
                           const TileInfo *const tile,
420
421
                           vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end,
                           int mi_row, int mi_col, BLOCK_SIZE bsize) {
422
  VP9_COMMON *const cm = &cpi->common;
423
424
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;

425
426
427
  const int bsl = b_width_log2(bsize);
  const int bs = (1 << bsl) / 4;
  PARTITION_TYPE partition;
428
  BLOCK_SIZE subsize;
429
  MODE_INFO *m = cm->mi_grid_visible[mi_row * cm->mode_info_stride + mi_col];
430

431
  if (mi_row >= cm->mi_rows || mi_col >= cm->mi_cols)
432
433
    return;

Jim Bankoski's avatar
Jim Bankoski committed
434
  partition = partition_lookup[bsl][m->mbmi.sb_type];
435
  write_partition(cm, xd, bs, mi_row, mi_col, partition, bsize, w);
436
  subsize = get_subsize(bsize, partition);
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
  if (subsize < BLOCK_8X8) {
    write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
  } else {
    switch (partition) {
      case PARTITION_NONE:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        break;
      case PARTITION_HORZ:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_row + bs < cm->mi_rows)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col);
        break;
      case PARTITION_VERT:
        write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col);
        if (mi_col + bs < cm->mi_cols)
          write_modes_b(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs);
        break;
      case PARTITION_SPLIT:
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col, subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col + bs,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col,
                       subsize);
        write_modes_sb(cpi, tile, w, tok, tok_end, mi_row + bs, mi_col + bs,
                       subsize);
        break;
      default:
        assert(0);
    }
466
  }
467
468

  // update partition context
469
  if (bsize >= BLOCK_8X8 &&
470
      (bsize == BLOCK_8X8 || partition != PARTITION_SPLIT))
471
    update_partition_context(xd->above_seg_context, xd->left_seg_context,
472
                             mi_row, mi_col, subsize, bsize);
473
474
}

475
476
static void write_modes(VP9_COMP *cpi,
                        const TileInfo *const tile,
477
                        vp9_writer *w, TOKENEXTRA **tok, TOKENEXTRA *tok_end) {
478
  int mi_row, mi_col;
479

James Zern's avatar
James Zern committed
480
  for (mi_row = tile->mi_row_start; mi_row < tile->mi_row_end;
481
       mi_row += MI_BLOCK_SIZE) {
482
    vp9_zero(cpi->mb.e_mbd.left_seg_context);
James Zern's avatar
James Zern committed
483
    for (mi_col = tile->mi_col_start; mi_col < tile->mi_col_end;
484
         mi_col += MI_BLOCK_SIZE)
485
486
      write_modes_sb(cpi, tile, w, tok, tok_end, mi_row, mi_col,
                     BLOCK_64X64);
John Koleszar's avatar
John Koleszar committed
487
  }
John Koleszar's avatar
John Koleszar committed
488
}
489

490
491
492
static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) {
  vp9_coeff_probs_model *coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_count *coef_counts = cpi->coef_counts[tx_size];
493
  unsigned int (*eob_branch_ct)[REF_TYPES][COEF_BANDS][COEFF_CONTEXTS] =
494
495
      cpi->common.counts.eob_branch[tx_size];
  vp9_coeff_stats *coef_branch_ct = cpi->frame_branch_ct[tx_size];
496
  int i, j, k, l, m;
497

498
  for (i = 0; i < PLANE_TYPES; ++i) {
499
500
    for (j = 0; j < REF_TYPES; ++j) {
      for (k = 0; k < COEF_BANDS; ++k) {
501
        for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
502
          vp9_tree_probs_from_distribution(vp9_coef_tree,
503
                                           coef_branch_ct[i][j][k][l],
504
                                           coef_counts[i][j][k][l]);
505
506
          coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] -
                                             coef_branch_ct[i][j][k][l][0][0];
507
508
509
510
          for (m = 0; m < UNCONSTRAINED_NODES; ++m)
            coef_probs[i][j][k][l][m] = get_binary_prob(
                                            coef_branch_ct[i][j][k][l][m][0],
                                            coef_branch_ct[i][j][k][l][m][1]);
511
        }
Daniel Kang's avatar
Daniel Kang committed
512
513
514
      }
    }
  }
515
516
}

517
518
519
520
521
522
static void update_coef_probs_common(vp9_writer* const bc, VP9_COMP *cpi,
                                     TX_SIZE tx_size) {
  vp9_coeff_probs_model *new_frame_coef_probs = cpi->frame_coef_probs[tx_size];
  vp9_coeff_probs_model *old_frame_coef_probs =
      cpi->common.fc.coef_probs[tx_size];
  vp9_coeff_stats *frame_branch_ct = cpi->frame_branch_ct[tx_size];
523
  const vp9_prob upd = DIFF_UPDATE_PROB;
524
  const int entropy_nodes_update = UNCONSTRAINED_NODES;
525
526
527
528
529
530
  int i, j, k, l, t;
  switch (cpi->sf.use_fast_coef_updates) {
    case 0: {
      /* dry run to see if there is any udpate at all needed */
      int savings = 0;
      int update[2] = {0, 0};
531
      for (i = 0; i < PLANE_TYPES; ++i) {
532
533
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
534
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
535
536
537
538
539
540
541
542
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                const vp9_prob oldp = old_frame_coef_probs[i][j][k][l][t];
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
543
                      old_frame_coef_probs[i][j][k][l], &newp, upd);
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t], oldp, &newp, upd);
                if (s > 0 && newp != oldp)
                  u = 1;
                if (u)
                  savings += s - (int)(vp9_cost_zero(upd));
                else
                  savings -= (int)(vp9_cost_zero(upd));
                update[u]++;
              }
            }
          }
        }
      }
559

560
561
562
563
564
565
566
      // printf("Update %d %d, savings %d\n", update[0], update[1], savings);
      /* Is coef updated at all */
      if (update[1] == 0 || savings < 0) {
        vp9_write_bit(bc, 0);
        return;
      }
      vp9_write_bit(bc, 1);
567
      for (i = 0; i < PLANE_TYPES; ++i) {
568
569
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
570
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
571
572
573
574
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
575
                const vp9_prob upd = DIFF_UPDATE_PROB;
576
577
578
579
580
                int s;
                int u = 0;
                if (t == PIVOT_NODE)
                  s = vp9_prob_diff_update_savings_search_model(
                      frame_branch_ct[i][j][k][l][0],
581
                      old_frame_coef_probs[i][j][k][l], &newp, upd);
582
583
584
585
586
587
588
589
590
591
592
593
594
595
                else
                  s = vp9_prob_diff_update_savings_search(
                      frame_branch_ct[i][j][k][l][t],
                      *oldp, &newp, upd);
                if (s > 0 && newp != *oldp)
                  u = 1;
                vp9_write(bc, u, upd);
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
            }
596
          }
Daniel Kang's avatar
Daniel Kang committed
597
598
        }
      }
599
      return;
Daniel Kang's avatar
Daniel Kang committed
600
    }
John Koleszar's avatar
John Koleszar committed
601

602
603
604
    case 1:
    case 2: {
      const int prev_coef_contexts_to_update =
605
606
          cpi->sf.use_fast_coef_updates == 2 ? COEFF_CONTEXTS >> 1
                                             : COEFF_CONTEXTS;
607
      const int coef_band_to_update =
608
609
          cpi->sf.use_fast_coef_updates == 2 ? COEF_BANDS >> 1
                                             : COEF_BANDS;
610
611
      int updates = 0;
      int noupdates_before_first = 0;
612
      for (i = 0; i < PLANE_TYPES; ++i) {
613
614
        for (j = 0; j < REF_TYPES; ++j) {
          for (k = 0; k < COEF_BANDS; ++k) {
615
            for (l = 0; l < BAND_COEFF_CONTEXTS(k); ++l) {
616
617
618
619
620
621
622
623
624
625
626
627
628
              // calc probs and branch cts for this frame only
              for (t = 0; t < entropy_nodes_update; ++t) {
                vp9_prob newp = new_frame_coef_probs[i][j][k][l][t];
                vp9_prob *oldp = old_frame_coef_probs[i][j][k][l] + t;
                int s;
                int u = 0;
                if (l >= prev_coef_contexts_to_update ||
                    k >= coef_band_to_update) {
                  u = 0;
                } else {
                  if (t == PIVOT_NODE)
                    s = vp9_prob_diff_update_savings_search_model(
                        frame_branch_ct[i][j][k][l][0],
629
                        old_frame_coef_probs[i][j][k][l], &newp, upd);
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
                  else
                    s = vp9_prob_diff_update_savings_search(
                        frame_branch_ct[i][j][k][l][t],
                        *oldp, &newp, upd);
                  if (s > 0 && newp != *oldp)
                    u = 1;
                }
                updates += u;
                if (u == 0 && updates == 0) {
                  noupdates_before_first++;
                  continue;
                }
                if (u == 1 && updates == 1) {
                  int v;
                  // first update
                  vp9_write_bit(bc, 1);
                  for (v = 0; v < noupdates_before_first; ++v)
                    vp9_write(bc, 0, upd);
                }
                vp9_write(bc, u, upd);
                if (u) {
                  /* send/use new probability */
                  vp9_write_prob_diff_update(bc, newp, *oldp);
                  *oldp = newp;
                }
              }
John Koleszar's avatar
John Koleszar committed
656
            }
Daniel Kang's avatar
Daniel Kang committed
657
658
659
          }
        }
      }
660
661
662
663
      if (updates == 0) {
        vp9_write_bit(bc, 0);  // no updates
      }
      return;
Daniel Kang's avatar
Daniel Kang committed
664
    }
665
666
667

    default:
      assert(0);
John Koleszar's avatar
John Koleszar committed
668
  }
669
}
John Koleszar's avatar
John Koleszar committed
670

671
static void update_coef_probs(VP9_COMP *cpi, vp9_writer* w) {
672
  const TX_MODE tx_mode = cpi->common.tx_mode;
673
674
  const TX_SIZE max_tx_size = tx_mode_to_biggest_tx_size[tx_mode];
  TX_SIZE tx_size;
675
  vp9_clear_system_state();
676

677
678
  for (tx_size = TX_4X4; tx_size <= TX_32X32; ++tx_size)
    build_tree_distribution(cpi, tx_size);
679

680
681
  for (tx_size = TX_4X4; tx_size <= max_tx_size; ++tx_size)
    update_coef_probs_common(w, cpi, tx_size);
John Koleszar's avatar
John Koleszar committed
682
}
683

684
static void encode_loopfilter(struct loopfilter *lf,
685
                              struct vp9_write_bit_buffer *wb) {
686
687
  int i;

688
  // Encode the loop filter level and type
689
690
  vp9_wb_write_literal(wb, lf->filter_level, 6);
  vp9_wb_write_literal(wb, lf->sharpness_level, 3);
691

692
693
  // Write out loop filter deltas applied at the MB level based on mode or
  // ref frame (if they are enabled).
694
  vp9_wb_write_bit(wb, lf->mode_ref_delta_enabled);
695

696
697
698
  if (lf->mode_ref_delta_enabled) {
    vp9_wb_write_bit(wb, lf->mode_ref_delta_update);
    if (lf->mode_ref_delta_update) {
699
      for (i = 0; i < MAX_REF_LF_DELTAS; i++) {
700
        const int delta = lf->ref_deltas[i];
701
702
703
        const int changed = delta != lf->last_ref_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
704
          lf->last_ref_deltas[i] = delta;
705
706
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
707
708
709
710
        }
      }

      for (i = 0; i < MAX_MODE_LF_DELTAS; i++) {
711
        const int delta = lf->mode_deltas[i];
712
713
714
        const int changed = delta != lf->last_mode_deltas[i];
        vp9_wb_write_bit(wb, changed);
        if (changed) {
715
          lf->last_mode_deltas[i] = delta;
716
717
          vp9_wb_write_literal(wb, abs(delta) & 0x3F, 6);
          vp9_wb_write_bit(wb, delta < 0);
718
719
720
721
722
723
        }
      }
    }
  }
}

724
static void write_delta_q(struct vp9_write_bit_buffer *wb, int delta_q) {
725
  if (delta_q != 0) {
726
727
728
    vp9_wb_write_bit(wb, 1);
    vp9_wb_write_literal(wb, abs(delta_q), 4);
    vp9_wb_write_bit(wb, delta_q < 0);
729
  } else {
730
    vp9_wb_write_bit(wb, 0);
731
732
733
  }
}

734
735
736
737
738
739
static void encode_quantization(VP9_COMMON *cm,
                                struct vp9_write_bit_buffer *wb) {
  vp9_wb_write_literal(wb, cm->base_qindex, QINDEX_BITS);
  write_delta_q(wb, cm->y_dc_delta_q);
  write_delta_q(wb, cm->uv_dc_delta_q);
  write_delta_q(wb, cm->uv_ac_delta_q);
740
741
742
}


743
static void encode_segmentation(VP9_COMP *cpi,
744
                                struct vp9_write_bit_buffer *wb) {
John Koleszar's avatar
John Koleszar committed
745
  int i, j;
746

747
  struct segmentation *seg = &cpi->common.seg;
748
749
750

  vp9_wb_write_bit(wb, seg->enabled);
  if (!seg->enabled)
751
752
753
    return;

  // Segmentation map
754
755
  vp9_wb_write_bit(wb, seg->update_map);
  if (seg->update_map) {
756
757
758
    // Select the coding strategy (temporal or spatial)
    vp9_choose_segmap_coding_method(cpi);
    // Write out probabilities used to decode unpredicted  macro-block segments
Paul Wilkins's avatar
Paul Wilkins committed
759
    for (i = 0; i < SEG_TREE_PROBS; i++) {
760
      const int prob = seg->tree_probs[i];
761
762
763
764
      const int update = prob != MAX_PROB;
      vp9_wb_write_bit(wb, update);
      if (update)
        vp9_wb_write_literal(wb, prob, 8);
765
766
767
    }

    // Write out the chosen coding method.
768
769
    vp9_wb_write_bit(wb, seg->temporal_update);
    if (seg->temporal_update) {
770
      for (i = 0; i < PREDICTION_PROBS; i++) {
771
        const int prob = seg->pred_probs[i];
772
773
774
775
        const int update = prob != MAX_PROB;
        vp9_wb_write_bit(wb, update);
        if (update)
          vp9_wb_write_literal(wb, prob, 8);
776
777
778
779
780
      }
    }
  }

  // Segmentation data
781
782
783
  vp9_wb_write_bit(wb, seg->update_data);
  if (seg->update_data) {
    vp9_wb_write_bit(wb, seg->abs_delta);
784

Paul Wilkins's avatar
Paul Wilkins committed
785
    for (i = 0; i < MAX_SEGMENTS; i++) {
786
      for (j = 0; j < SEG_LVL_MAX; j++) {
787
        const int active = vp9_segfeature_active(seg, i, j);
788
789
        vp9_wb_write_bit(wb, active);
        if (active) {
790
          const int data = vp9_get_segdata(seg, i, j);
791
          const int data_max = vp9_seg_feature_data_max(j);
792
793

          if (vp9_is_segfeature_signed(j)) {
794
            encode_unsigned_max(wb, abs(data), data_max);
795
            vp9_wb_write_bit(wb, data < 0);
796
          } else {
797
            encode_unsigned_max(wb, data, data_max);
798
799
800
801
802
803
804
          }
        }
      }
    }
  }
}

805

806
static void encode_txfm_probs(VP9_COMMON *cm, vp9_writer *w) {
807
  // Mode
808
809
810
  vp9_write_literal(w, MIN(cm->tx_mode, ALLOW_32X32), 2);
  if (cm->tx_mode >= ALLOW_32X32)
    vp9_write_bit(w, cm->tx_mode == TX_MODE_SELECT);
811
812

  // Probabilities
813
  if (cm->tx_mode == TX_MODE_SELECT) {
814
    int i, j;
815
816
817
    unsigned int ct_8x8p[TX_SIZES - 3][2];
    unsigned int ct_16x16p[TX_SIZES - 2][2];
    unsigned int ct_32x32p[TX_SIZES - 1][2];
818
819


820
    for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
821
      tx_counts_to_branch_counts_8x8(cm->counts.tx.p8x8[i], ct_8x8p);
822
      for (j = 0; j < TX_SIZES - 3; j++)
823
        vp9_cond_prob_diff_update(w, &cm->fc.tx_probs.p8x8[i][j], ct_8x8p[j]);
824
    }
825

826
    for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
827
      tx_counts_to_branch_counts_16x16(cm->counts.tx.p16x16[i], ct_16x16p);
828
      for (j = 0; j < TX_SIZES - 2; j++)
829
        vp9_cond_prob_diff_update(w, &cm->fc.tx_probs.p16x16[i][j],
830
                                  ct_16x16p[j]);
831
    }
832

833
    for (i = 0; i < TX_SIZE_CONTEXTS; i++) {
834
      tx_counts_to_branch_counts_32x32(cm->counts.tx.p32x32[i], ct_32x32p);
835
      for (j = 0; j < TX_SIZES - 1; j++)
836
        vp9_cond_prob_diff_update(w, &cm->fc.tx_probs.p32x32[i][j],
837
                                  ct_32x32p[j]);
838
    }
839
840
841
  }
}

842
843
844
static void write_interp_filter(INTERP_FILTER filter,
                                struct vp9_write_bit_buffer *wb) {
  const int filter_to_literal[] = { 1, 0, 2, 3 };
845

846
847
848
  vp9_wb_write_bit(wb, filter == SWITCHABLE);
  if (filter != SWITCHABLE)
    vp9_wb_write_literal(wb, filter_to_literal[filter], 2);
849
850
}

851
852
static void fix_interp_filter(VP9_COMMON *cm) {
  if (cm->interp_filter == SWITCHABLE) {
853
    // Check to see if only one of the filters is actually used