vp9_tokenize.c 9.61 KB
Newer Older
John Koleszar's avatar
John Koleszar committed
1
/*
2
 *  Copyright (c) 2010 The WebM project authors. All Rights Reserved.
John Koleszar's avatar
John Koleszar committed
3
 *
4
 *  Use of this source code is governed by a BSD-style license
5 6
 *  that can be found in the LICENSE file in the root of the source
 *  tree. An additional intellectual property rights grant can be found
7
 *  in the file PATENTS.  All contributing project authors may
8
 *  be found in the AUTHORS file in the root of the source tree.
John Koleszar's avatar
John Koleszar committed
9 10 11 12 13 14 15
 */


#include <math.h>
#include <stdio.h>
#include <string.h>
#include <assert.h>
16 17
#include "vp9/encoder/vp9_onyx_int.h"
#include "vp9/encoder/vp9_tokenize.h"
John Koleszar's avatar
John Koleszar committed
18 19
#include "vpx_mem/vpx_mem.h"

20 21 22
#include "vp9/common/vp9_pred_common.h"
#include "vp9/common/vp9_seg_common.h"
#include "vp9/common/vp9_entropy.h"
23

John Koleszar's avatar
John Koleszar committed
24
static TOKENVALUE dct_value_tokens[DCT_MAX_VALUE * 2];
25
const TOKENVALUE *vp9_dct_value_tokens_ptr;
John Koleszar's avatar
John Koleszar committed
26
static int dct_value_cost[DCT_MAX_VALUE * 2];
27
const int *vp9_dct_value_cost_ptr;
28

29
// Array indices are identical to previously-existing CONTEXT_NODE indices
30 31 32 33 34 35 36 37 38 39 40 41
const vp9_tree_index vp9_coef_tree[TREE_SIZE(ENTROPY_TOKENS)] = {
  -EOB_TOKEN, 2,                       // 0  = EOB
  -ZERO_TOKEN, 4,                      // 1  = ZERO
  -ONE_TOKEN, 6,                       // 2  = ONE
  8, 12,                               // 3  = LOW_VAL
  -TWO_TOKEN, 10,                      // 4  = TWO
  -THREE_TOKEN, -FOUR_TOKEN,           // 5  = THREE
  14, 16,                              // 6  = HIGH_LOW
  -CATEGORY1_TOKEN, -CATEGORY2_TOKEN,  // 7  = CAT_ONE
  18, 20,                              // 8  = CAT_THREEFOUR
  -CATEGORY3_TOKEN, -CATEGORY4_TOKEN,  // 9  = CAT_THREE
  -CATEGORY5_TOKEN, -CATEGORY6_TOKEN   // 10 = CAT_FIVE
42 43 44
};

// Unconstrained Node Tree
45 46 47 48 49 50 51 52 53
const vp9_tree_index vp9_coef_con_tree[TREE_SIZE(ENTROPY_TOKENS)] = {
  2, 6,                                // 0 = LOW_VAL
  -TWO_TOKEN, 4,                       // 1 = TWO
  -THREE_TOKEN, -FOUR_TOKEN,           // 2 = THREE
  8, 10,                               // 3 = HIGH_LOW
  -CATEGORY1_TOKEN, -CATEGORY2_TOKEN,  // 4 = CAT_ONE
  12, 14,                              // 5 = CAT_THREEFOUR
  -CATEGORY3_TOKEN, -CATEGORY4_TOKEN,  // 6 = CAT_THREE
  -CATEGORY5_TOKEN, -CATEGORY6_TOKEN   // 7 = CAT_FIVE
54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86
};

static const vp9_prob Pcat1[] = { 159};
static const vp9_prob Pcat2[] = { 165, 145};
static const vp9_prob Pcat3[] = { 173, 148, 140};
static const vp9_prob Pcat4[] = { 176, 155, 140, 135};
static const vp9_prob Pcat5[] = { 180, 157, 141, 134, 130};
static const vp9_prob Pcat6[] = {
  254, 254, 254, 252, 249, 243, 230, 196, 177, 153, 140, 133, 130, 129
};

static vp9_tree_index cat1[2], cat2[4], cat3[6], cat4[8], cat5[10], cat6[28];

static void init_bit_tree(vp9_tree_index *p, int n) {
  int i = 0;

  while (++i < n) {
    p[0] = p[1] = i << 1;
    p += 2;
  }

  p[0] = p[1] = 0;
}

static void init_bit_trees() {
  init_bit_tree(cat1, 1);
  init_bit_tree(cat2, 2);
  init_bit_tree(cat3, 3);
  init_bit_tree(cat4, 4);
  init_bit_tree(cat5, 5);
  init_bit_tree(cat6, 14);
}

87
const vp9_extra_bit vp9_extra_bits[ENTROPY_TOKENS] = {
88 89 90 91 92
  {0, 0, 0, 0},           // ZERO_TOKEN
  {0, 0, 0, 1},           // ONE_TOKEN
  {0, 0, 0, 2},           // TWO_TOKEN
  {0, 0, 0, 3},           // THREE_TOKEN
  {0, 0, 0, 4},           // FOUR_TOKEN
93 94 95 96 97 98 99
  {cat1, Pcat1, 1, 5},    // CATEGORY1_TOKEN
  {cat2, Pcat2, 2, 7},    // CATEGORY2_TOKEN
  {cat3, Pcat3, 3, 11},   // CATEGORY3_TOKEN
  {cat4, Pcat4, 4, 19},   // CATEGORY4_TOKEN
  {cat5, Pcat5, 5, 35},   // CATEGORY5_TOKEN
  {cat6, Pcat6, 14, 67},  // CATEGORY6_TOKEN
  {0, 0, 0, 0}            // EOB_TOKEN
100 101
};

102
struct vp9_token vp9_coef_encodings[ENTROPY_TOKENS];
103 104 105 106 107 108

void vp9_coef_tree_initialize() {
  init_bit_trees();
  vp9_tokens_from_tree(vp9_coef_encodings, vp9_coef_tree);
}

John Koleszar's avatar
John Koleszar committed
109 110
static void fill_value_tokens() {
  TOKENVALUE *const t = dct_value_tokens + DCT_MAX_VALUE;
111
  const vp9_extra_bit *const e = vp9_extra_bits;
John Koleszar's avatar
John Koleszar committed
112

John Koleszar's avatar
John Koleszar committed
113 114
  int i = -DCT_MAX_VALUE;
  int sign = 1;
John Koleszar's avatar
John Koleszar committed
115

John Koleszar's avatar
John Koleszar committed
116 117 118
  do {
    if (!i)
      sign = 0;
John Koleszar's avatar
John Koleszar committed
119

John Koleszar's avatar
John Koleszar committed
120 121 122
    {
      const int a = sign ? -i : i;
      int eb = sign;
John Koleszar's avatar
John Koleszar committed
123

John Koleszar's avatar
John Koleszar committed
124 125
      if (a > 4) {
        int j = 4;
John Koleszar's avatar
John Koleszar committed
126

John Koleszar's avatar
John Koleszar committed
127
        while (++j < 11  &&  e[j].base_val <= a) {}
John Koleszar's avatar
John Koleszar committed
128

129
        t[i].token = --j;
John Koleszar's avatar
John Koleszar committed
130
        eb |= (a - e[j].base_val) << 1;
131
      } else {
132
        t[i].token = a;
133
      }
134
      t[i].extra = eb;
John Koleszar's avatar
John Koleszar committed
135
    }
John Koleszar's avatar
John Koleszar committed
136

John Koleszar's avatar
John Koleszar committed
137 138 139
    // initialize the cost for extra bits for all possible coefficient value.
    {
      int cost = 0;
140
      const vp9_extra_bit *p = &vp9_extra_bits[t[i].token];
John Koleszar's avatar
John Koleszar committed
141

John Koleszar's avatar
John Koleszar committed
142
      if (p->base_val) {
143
        const int extra = t[i].extra;
144
        const int length = p->len;
John Koleszar's avatar
John Koleszar committed
145

146 147
        if (length)
          cost += treed_cost(p->tree, p->prob, extra >> 1, length);
John Koleszar's avatar
John Koleszar committed
148

149
        cost += vp9_cost_bit(vp9_prob_half, extra & 1); /* sign */
John Koleszar's avatar
John Koleszar committed
150 151
        dct_value_cost[i + DCT_MAX_VALUE] = cost;
      }
John Koleszar's avatar
John Koleszar committed
152
    }
John Koleszar's avatar
John Koleszar committed
153 154
  } while (++i < DCT_MAX_VALUE);

155
  vp9_dct_value_tokens_ptr = dct_value_tokens + DCT_MAX_VALUE;
156
  vp9_dct_value_cost_ptr = dct_value_cost + DCT_MAX_VALUE;
John Koleszar's avatar
John Koleszar committed
157
}
158

159 160 161 162 163
struct tokenize_b_args {
  VP9_COMP *cpi;
  MACROBLOCKD *xd;
  TOKENEXTRA **tp;
  TX_SIZE tx_size;
164
  uint8_t *token_cache;
165
};
166

167
static void set_entropy_context_b(int plane, int block, BLOCK_SIZE plane_bsize,
168
                                  TX_SIZE tx_size, void *arg) {
169
  struct tokenize_b_args* const args = arg;
170
  MACROBLOCKD *const xd = args->xd;
171
  struct macroblock_plane *p = &args->cpi->mb.plane[plane];
172
  struct macroblockd_plane *pd = &xd->plane[plane];
173 174
  int aoff, loff;
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &aoff, &loff);
175
  set_contexts(xd, pd, plane_bsize, tx_size, p->eobs[block] > 0, aoff, loff);
176 177
}

178
static void tokenize_b(int plane, int block, BLOCK_SIZE plane_bsize,
179
                       TX_SIZE tx_size, void *arg) {
180 181 182 183
  struct tokenize_b_args* const args = arg;
  VP9_COMP *cpi = args->cpi;
  MACROBLOCKD *xd = args->xd;
  TOKENEXTRA **tp = args->tp;
184
  uint8_t *token_cache = args->token_cache;
185
  struct macroblock_plane *p = &cpi->mb.plane[plane];
186
  struct macroblockd_plane *pd = &xd->plane[plane];
187
  MB_MODE_INFO *mbmi = &xd->mi_8x8[0]->mbmi;
Daniel Kang's avatar
Daniel Kang committed
188
  int pt; /* near block/prev token context index */
189
  int c = 0, rc = 0;
Daniel Kang's avatar
Daniel Kang committed
190
  TOKENEXTRA *t = *tp;        /* store tokens starting here */
191
  const int eob = p->eobs[block];
192
  const PLANE_TYPE type = pd->plane_type;
193
  const int16_t *qcoeff_ptr = BLOCK_OFFSET(p->qcoeff, block);
194
  const int segment_id = mbmi->segment_id;
195
  const int16_t *scan, *nb;
196
  const scan_order *so;
197 198
  vp9_coeff_count *const counts = cpi->coef_counts[tx_size];
  vp9_coeff_probs_model *const coef_probs = cpi->common.fc.coef_probs[tx_size];
199
  const int ref = is_inter_block(mbmi);
200
  const uint8_t *const band_translate = get_band_translate(tx_size);
201
  const int seg_eob = get_tx_eob(&cpi->common.seg, segment_id, tx_size);
202

203 204 205
  int aoff, loff;
  txfrm_block_to_raster_xy(plane_bsize, tx_size, block, &aoff, &loff);

206
  assert((!type && !plane) || (type && plane));
207

208 209
  pt = get_entropy_context(tx_size, pd->above_context + aoff,
                                    pd->left_context + loff);
210 211 212 213
  so = get_scan(xd, tx_size, type, block);
  scan = so->scan;
  nb = so->neighbors;

214
  c = 0;
215
  do {
216
    const int band = band_translate[c];
217
    int token;
218
    int v = 0;
219 220
    rc = scan[c];
    if (c)
221
      pt = get_coef_context(nb, token_cache, c);
222
    if (c < eob) {
223
      v = qcoeff_ptr[rc];
224 225
      assert(-DCT_MAX_VALUE <= v  &&  v < DCT_MAX_VALUE);

226 227
      t->extra = vp9_dct_value_tokens_ptr[v].extra;
      token    = vp9_dct_value_tokens_ptr[v].token;
228
    } else {
229
      token = EOB_TOKEN;
230
    }
John Koleszar's avatar
John Koleszar committed
231

232
    t->token = token;
233
    t->context_tree = coef_probs[type][ref][band][pt];
234
    t->skip_eob_node = (c > 0) && (token_cache[scan[c - 1]] == 0);
235

236
    assert(vp9_coef_encodings[t->token].len - t->skip_eob_node > 0);
237

238 239 240 241
    ++counts[type][ref][band][pt][token];
    if (!t->skip_eob_node)
      ++cpi->common.counts.eob_branch[tx_size][type][ref][band][pt];

242
    token_cache[rc] = vp9_pt_energy_class[token];
243
    ++t;
244
  } while (c < eob && ++c < seg_eob);
245 246

  *tp = t;
247 248

  set_contexts(xd, pd, plane_bsize, tx_size, c > 0, aoff, loff);
John Koleszar's avatar
John Koleszar committed
249 250
}

251
struct is_skippable_args {
252
  MACROBLOCK *x;
253 254
  int *skippable;
};
255

256
static void is_skippable(int plane, int block,
257
                         BLOCK_SIZE plane_bsize, TX_SIZE tx_size,
258
                         void *argv) {
259
  struct is_skippable_args *args = argv;
260
  args->skippable[0] &= (!args->x->plane[plane].eobs[block]);
Deb Mukherjee's avatar
Deb Mukherjee committed
261 262
}

263
static int sb_is_skippable(MACROBLOCK *x, BLOCK_SIZE bsize) {
264
  int result = 1;
265 266
  struct is_skippable_args args = {x, &result};
  foreach_transformed_block(&x->e_mbd, bsize, is_skippable, &args);
267
  return result;
268 269
}

270
int vp9_is_skippable_in_plane(MACROBLOCK *x, BLOCK_SIZE bsize, int plane) {
271
  int result = 1;
272 273 274
  struct is_skippable_args args = {x, &result};
  foreach_transformed_block_in_plane(&x->e_mbd, bsize, plane, is_skippable,
                                     &args);
275
  return result;
276 277
}

278
void vp9_tokenize_sb(VP9_COMP *cpi, TOKENEXTRA **t, int dry_run,
279
                     BLOCK_SIZE bsize) {
280 281
  VP9_COMMON *const cm = &cpi->common;
  MACROBLOCKD *const xd = &cpi->mb.e_mbd;
282
  MB_MODE_INFO *const mbmi = &xd->mi_8x8[0]->mbmi;
283
  TOKENEXTRA *t_backup = *t;
284
  const int mb_skip_context = vp9_get_pred_context_mbskip(xd);
285
  const int skip_inc = !vp9_segfeature_active(&cm->seg, mbmi->segment_id,
286
                                              SEG_LVL_SKIP);
287
  struct tokenize_b_args arg = {cpi, xd, t, mbmi->tx_size, cpi->mb.token_cache};
288

289
  mbmi->skip_coeff = sb_is_skippable(&cpi->mb, bsize);
Paul Wilkins's avatar
Paul Wilkins committed
290
  if (mbmi->skip_coeff) {
291
    if (!dry_run)
292
      cm->counts.mbskip[mb_skip_context][1] += skip_inc;
293
    reset_skip_context(xd, bsize);
294 295 296 297 298
    if (dry_run)
      *t = t_backup;
    return;
  }

299
  if (!dry_run) {
300
    cm->counts.mbskip[mb_skip_context][0] += skip_inc;
301 302 303
    foreach_transformed_block(xd, bsize, tokenize_b, &arg);
  } else {
    foreach_transformed_block(xd, bsize, set_entropy_context_b, &arg);
304
    *t = t_backup;
305
  }
306 307
}

308
void vp9_tokenize_initialize() {
John Koleszar's avatar
John Koleszar committed
309
  fill_value_tokens();
John Koleszar's avatar
John Koleszar committed
310
}