diff --git a/vp9/common/vp9_blockd.h b/vp9/common/vp9_blockd.h index d0d485272ef1cb01118b99f70536d3aa912d4d23..f52adfc972dae2f193914053fa5ada6bf51fc5ba 100644 --- a/vp9/common/vp9_blockd.h +++ b/vp9/common/vp9_blockd.h @@ -82,9 +82,8 @@ static INLINE int is_inter_mode(MB_PREDICTION_MODE mode) { #define INTER_MODES (1 + NEWMV - NEARESTMV) -static INLINE int inter_mode_offset(MB_PREDICTION_MODE mode) { - return (mode - NEARESTMV); -} +#define INTER_OFFSET(mode) ((mode) - NEARESTMV) + /* For keyframes, intra block modes are predicted by the (already decoded) modes for the Y blocks to the left and above us; for interframes, there diff --git a/vp9/common/vp9_entropy.c b/vp9/common/vp9_entropy.c index d3a867c3f403587bea371861591643e8fb33cc27..2676762be59036128e1d72133bab307faa56d9f2 100644 --- a/vp9/common/vp9_entropy.c +++ b/vp9/common/vp9_entropy.c @@ -332,7 +332,7 @@ static void adapt_coef_probs(VP9_COMMON *cm, TX_SIZE tx_size, if (l >= 3 && k == 0) continue; vp9_tree_probs_from_distribution(vp9_coefmodel_tree, branch_ct, - coef_counts[i][j][k][l], 0); + coef_counts[i][j][k][l]); branch_ct[0][1] = eob_branch_count[i][j][k][l] - branch_ct[0][0]; for (m = 0; m < UNCONSTRAINED_NODES; ++m) dst_coef_probs[i][j][k][l][m] = merge_probs( diff --git a/vp9/common/vp9_entropymode.c b/vp9/common/vp9_entropymode.c index a963d55e6b82f2d677cf7e6c5f989d0765e7b456..3b2510dcd476833d6225267378dd7dbd7c880095 100644 --- a/vp9/common/vp9_entropymode.c +++ b/vp9/common/vp9_entropymode.c @@ -235,9 +235,9 @@ const vp9_tree_index vp9_intra_mode_tree[TREE_SIZE(INTRA_MODES)] = { struct vp9_token vp9_intra_mode_encodings[INTRA_MODES]; const vp9_tree_index vp9_inter_mode_tree[TREE_SIZE(INTER_MODES)] = { - -ZEROMV, 2, - -NEARESTMV, 4, - -NEARMV, -NEWMV + -INTER_OFFSET(ZEROMV), 2, + -INTER_OFFSET(NEARESTMV), 4, + -INTER_OFFSET(NEARMV), -INTER_OFFSET(NEWMV) }; struct vp9_token vp9_inter_mode_encodings[INTER_MODES]; @@ -343,8 +343,7 @@ void vp9_entropy_mode_init() { vp9_tokens_from_tree(vp9_switchable_interp_encodings, vp9_switchable_interp_tree); vp9_tokens_from_tree(vp9_partition_encodings, vp9_partition_tree); - vp9_tokens_from_tree_offset(vp9_inter_mode_encodings, - vp9_inter_mode_tree, NEARESTMV); + vp9_tokens_from_tree(vp9_inter_mode_encodings, vp9_inter_mode_tree); } #define COUNT_SAT 20 @@ -356,9 +355,9 @@ static int adapt_prob(vp9_prob pre_prob, const unsigned int ct[2]) { static void adapt_probs(const vp9_tree_index *tree, const vp9_prob *pre_probs, const unsigned int *counts, - unsigned int offset, vp9_prob *probs) { - tree_merge_probs(tree, pre_probs, counts, offset, - COUNT_SAT, MAX_UPDATE_FACTOR, probs); + vp9_prob *probs) { + tree_merge_probs(tree, pre_probs, counts, COUNT_SAT, MAX_UPDATE_FACTOR, + probs); } void vp9_adapt_mode_probs(VP9_COMMON *cm) { @@ -383,25 +382,24 @@ void vp9_adapt_mode_probs(VP9_COMMON *cm) { for (i = 0; i < INTER_MODE_CONTEXTS; i++) adapt_probs(vp9_inter_mode_tree, pre_fc->inter_mode_probs[i], - counts->inter_mode[i], NEARESTMV, fc->inter_mode_probs[i]); + counts->inter_mode[i], fc->inter_mode_probs[i]); for (i = 0; i < BLOCK_SIZE_GROUPS; i++) adapt_probs(vp9_intra_mode_tree, pre_fc->y_mode_prob[i], - counts->y_mode[i], 0, fc->y_mode_prob[i]); + counts->y_mode[i], fc->y_mode_prob[i]); for (i = 0; i < INTRA_MODES; ++i) adapt_probs(vp9_intra_mode_tree, pre_fc->uv_mode_prob[i], - counts->uv_mode[i], 0, fc->uv_mode_prob[i]); + counts->uv_mode[i], fc->uv_mode_prob[i]); for (i = 0; i < PARTITION_CONTEXTS; i++) adapt_probs(vp9_partition_tree, pre_fc->partition_prob[i], - counts->partition[i], 0, fc->partition_prob[i]); + counts->partition[i], fc->partition_prob[i]); if (cm->mcomp_filter_type == SWITCHABLE) { for (i = 0; i < SWITCHABLE_FILTER_CONTEXTS; i++) adapt_probs(vp9_switchable_interp_tree, pre_fc->switchable_interp_prob[i], - counts->switchable_interp[i], 0, - fc->switchable_interp_prob[i]); + counts->switchable_interp[i], fc->switchable_interp_prob[i]); } if (cm->tx_mode == TX_MODE_SELECT) { diff --git a/vp9/common/vp9_entropymv.c b/vp9/common/vp9_entropymv.c index b061cdb382125681bfbf1f39dd5fed9feb65f02e..290dcdd172781cd195fe005320e4bde18ae17690 100644 --- a/vp9/common/vp9_entropymv.c +++ b/vp9/common/vp9_entropymv.c @@ -196,8 +196,8 @@ static vp9_prob adapt_prob(vp9_prob prep, const unsigned int ct[2]) { static void adapt_probs(const vp9_tree_index *tree, const vp9_prob *pre_probs, const unsigned int *counts, vp9_prob *probs) { - tree_merge_probs(tree, pre_probs, counts, 0, - MV_COUNT_SAT, MV_MAX_UPDATE_FACTOR, probs); + tree_merge_probs(tree, pre_probs, counts, MV_COUNT_SAT, MV_MAX_UPDATE_FACTOR, + probs); } void vp9_adapt_mv_probs(VP9_COMMON *cm, int allow_hp) { @@ -207,8 +207,7 @@ void vp9_adapt_mv_probs(VP9_COMMON *cm, int allow_hp) { const nmv_context *pre_fc = &cm->frame_contexts[cm->frame_context_idx].nmvc; const nmv_context_counts *counts = &cm->counts.mv; - adapt_probs(vp9_mv_joint_tree, pre_fc->joints, counts->joints, - fc->joints); + adapt_probs(vp9_mv_joint_tree, pre_fc->joints, counts->joints, fc->joints); for (i = 0; i < 2; ++i) { nmv_component *comp = &fc->comps[i]; diff --git a/vp9/common/vp9_treecoder.c b/vp9/common/vp9_treecoder.c index 1805fb4d867bcb846dba9312815400ad91e72c24..e2a5b9faaff444afffc420bdd135687419df1d35 100644 --- a/vp9/common/vp9_treecoder.c +++ b/vp9/common/vp9_treecoder.c @@ -35,28 +35,20 @@ void vp9_tokens_from_tree(struct vp9_token *p, vp9_tree t) { tree2tok(p, t, 0, 0, 0); } -void vp9_tokens_from_tree_offset(struct vp9_token *p, vp9_tree t, - int offset) { - tree2tok(p - offset, t, 0, 0, 0); -} - static unsigned int convert_distribution(unsigned int i, vp9_tree tree, unsigned int branch_ct[][2], - const unsigned int num_events[], - unsigned int tok0_offset) { + const unsigned int num_events[]) { unsigned int left, right; - if (tree[i] <= 0) { - left = num_events[-tree[i] - tok0_offset]; - } else { - left = convert_distribution(tree[i], tree, branch_ct, num_events, - tok0_offset); - } + if (tree[i] <= 0) + left = num_events[-tree[i]]; + else + left = convert_distribution(tree[i], tree, branch_ct, num_events); + if (tree[i + 1] <= 0) - right = num_events[-tree[i + 1] - tok0_offset]; + right = num_events[-tree[i + 1]]; else - right = convert_distribution(tree[i + 1], tree, branch_ct, num_events, - tok0_offset); + right = convert_distribution(tree[i + 1], tree, branch_ct, num_events); branch_ct[i >> 1][0] = left; branch_ct[i >> 1][1] = right; @@ -65,9 +57,8 @@ static unsigned int convert_distribution(unsigned int i, vp9_tree tree, void vp9_tree_probs_from_distribution(vp9_tree tree, unsigned int branch_ct[/* n-1 */][2], - const unsigned int num_events[/* n */], - unsigned int tok0_offset) { - convert_distribution(0, tree, branch_ct, num_events, tok0_offset); + const unsigned int num_events[/* n */]) { + convert_distribution(0, tree, branch_ct, num_events); } diff --git a/vp9/common/vp9_treecoder.h b/vp9/common/vp9_treecoder.h index 9c776d61c07bcd746741ddc6318e24c047e08b11..a79b1564a1c495a26abadabd249a20c2b57106d4 100644 --- a/vp9/common/vp9_treecoder.h +++ b/vp9/common/vp9_treecoder.h @@ -42,7 +42,6 @@ struct vp9_token { /* Construct encoding array from tree. */ void vp9_tokens_from_tree(struct vp9_token*, vp9_tree); -void vp9_tokens_from_tree_offset(struct vp9_token*, vp9_tree, int offset); /* Convert array of token occurrence counts into a table of probabilities for the associated binary encoding tree. Also writes count of branches @@ -51,8 +50,7 @@ void vp9_tokens_from_tree_offset(struct vp9_token*, vp9_tree, int offset); void vp9_tree_probs_from_distribution(vp9_tree tree, unsigned int branch_ct[ /* n - 1 */ ][2], - const unsigned int num_events[ /* n */ ], - unsigned int tok0_offset); + const unsigned int num_events[ /* n */ ]); static INLINE vp9_prob clip_prob(int p) { @@ -116,10 +114,10 @@ static unsigned int tree_merge_probs_impl(unsigned int i, static void tree_merge_probs(const vp9_tree_index *tree, const vp9_prob *pre_probs, - const unsigned int *counts, int offset, + const unsigned int *counts, unsigned int count_sat, unsigned int max_update_factor, vp9_prob *probs) { - tree_merge_probs_impl(0, tree, pre_probs, &counts[-offset], + tree_merge_probs_impl(0, tree, pre_probs, counts, count_sat, max_update_factor, probs); } diff --git a/vp9/decoder/vp9_decodemv.c b/vp9/decoder/vp9_decodemv.c index 1ca5786216bf0d1aaf4aff7eae6465f2685cac8f..e54bd79463d649664656ac3833670bfd7a77f37a 100644 --- a/vp9/decoder/vp9_decodemv.c +++ b/vp9/decoder/vp9_decodemv.c @@ -48,12 +48,13 @@ static MB_PREDICTION_MODE read_intra_mode_uv(VP9_COMMON *cm, vp9_reader *r, } static MB_PREDICTION_MODE read_inter_mode(VP9_COMMON *cm, vp9_reader *r, - uint8_t context) { - const MB_PREDICTION_MODE mode = treed_read(r, vp9_inter_mode_tree, - cm->fc.inter_mode_probs[context]); + int ctx) { + const int mode = treed_read(r, vp9_inter_mode_tree, + cm->fc.inter_mode_probs[ctx]); if (!cm->frame_parallel_decoding_mode) - ++cm->counts.inter_mode[context][inter_mode_offset(mode)]; - return mode; + ++cm->counts.inter_mode[ctx][mode]; + + return NEARESTMV + mode; } static int read_segment_id(vp9_reader *r, const struct segmentation *seg) { diff --git a/vp9/encoder/vp9_bitstream.c b/vp9/encoder/vp9_bitstream.c index 87bd36c2bfa70c2feadff7217ca9fe772e4ffcec..53ff702fd823813b8f945a567f51aa23e14c45e9 100644 --- a/vp9/encoder/vp9_bitstream.c +++ b/vp9/encoder/vp9_bitstream.c @@ -169,10 +169,8 @@ static void update_mode(vp9_writer *w, int n, vp9_tree tree, const unsigned int num_events[/* n */]) { int i = 0; - vp9_tree_probs_from_distribution(tree, bct, num_events, 0); - n--; - - for (i = 0; i < n; ++i) + vp9_tree_probs_from_distribution(tree, bct, num_events); + for (i = 0; i < n - 1; ++i) vp9_cond_prob_diff_update(w, &Pcur[i], bct[i]); } @@ -231,7 +229,7 @@ static void update_switchable_interp_probs(VP9_COMP *cpi, vp9_writer *w) { int i, j; for (j = 0; j < SWITCHABLE_FILTER_CONTEXTS; ++j) { vp9_tree_probs_from_distribution(vp9_switchable_interp_tree, branch_ct, - cm->counts.switchable_interp[j], 0); + cm->counts.switchable_interp[j]); for (i = 0; i < SWITCHABLE_FILTERS - 1; ++i) vp9_cond_prob_diff_update(w, &cm->fc.switchable_interp_prob[j][i], @@ -250,7 +248,7 @@ static void update_inter_mode_probs(VP9_COMMON *cm, vp9_writer *w) { for (i = 0; i < INTER_MODE_CONTEXTS; ++i) { unsigned int branch_ct[INTER_MODES - 1][2]; vp9_tree_probs_from_distribution(vp9_inter_mode_tree, branch_ct, - cm->counts.inter_mode[i], NEARESTMV); + cm->counts.inter_mode[i]); for (j = 0; j < INTER_MODES - 1; ++j) vp9_cond_prob_diff_update(w, &cm->fc.inter_mode_probs[i][j], @@ -321,7 +319,7 @@ static void write_sb_mv_ref(vp9_writer *w, MB_PREDICTION_MODE mode, const vp9_prob *p) { assert(is_inter_mode(mode)); write_token(w, vp9_inter_mode_tree, p, - &vp9_inter_mode_encodings[inter_mode_offset(mode)]); + &vp9_inter_mode_encodings[INTER_OFFSET(mode)]); } @@ -448,7 +446,7 @@ static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) { if (bsize >= BLOCK_8X8) { write_sb_mv_ref(bc, mode, mv_ref_p); ++cm->counts.inter_mode[mi->mode_context[rf]] - [inter_mode_offset(mode)]; + [INTER_OFFSET(mode)]; } } @@ -471,7 +469,7 @@ static void pack_inter_mode_mvs(VP9_COMP *cpi, MODE_INFO *m, vp9_writer *bc) { const MB_PREDICTION_MODE blockmode = m->bmi[j].as_mode; write_sb_mv_ref(bc, blockmode, mv_ref_p); ++cm->counts.inter_mode[mi->mode_context[rf]] - [inter_mode_offset(blockmode)]; + [INTER_OFFSET(blockmode)]; if (blockmode == NEWMV) { #ifdef ENTROPY_STATS @@ -703,7 +701,7 @@ static void build_tree_distribution(VP9_COMP *cpi, TX_SIZE tx_size) { continue; vp9_tree_probs_from_distribution(vp9_coef_tree, coef_branch_ct[i][j][k][l], - coef_counts[i][j][k][l], 0); + coef_counts[i][j][k][l]); coef_branch_ct[i][j][k][l][0][1] = eob_branch_ct[i][j][k][l] - coef_branch_ct[i][j][k][l][0][0]; for (m = 0; m < UNCONSTRAINED_NODES; ++m) diff --git a/vp9/encoder/vp9_encodemv.c b/vp9/encoder/vp9_encodemv.c index e2c6c4c0cf1e23122f51a89de5b34a4d7f652256..030ca64134ec64c7d8c747cd511b6ee5f4b0db3a 100644 --- a/vp9/encoder/vp9_encodemv.c +++ b/vp9/encoder/vp9_encodemv.c @@ -155,9 +155,8 @@ static void counts_to_nmv_context( unsigned int (*branch_ct_class0_hp)[2], unsigned int (*branch_ct_hp)[2]) { int i, j, k; - vp9_tree_probs_from_distribution(vp9_mv_joint_tree, - branch_ct_joint, - nmv_count->joints, 0); + vp9_tree_probs_from_distribution(vp9_mv_joint_tree, branch_ct_joint, + nmv_count->joints); for (i = 0; i < 2; ++i) { const uint32_t s0 = nmv_count->comps[i].sign[0]; const uint32_t s1 = nmv_count->comps[i].sign[1]; @@ -166,10 +165,10 @@ static void counts_to_nmv_context( branch_ct_sign[i][1] = s1; vp9_tree_probs_from_distribution(vp9_mv_class_tree, branch_ct_classes[i], - nmv_count->comps[i].classes, 0); + nmv_count->comps[i].classes); vp9_tree_probs_from_distribution(vp9_mv_class0_tree, branch_ct_class0[i], - nmv_count->comps[i].class0, 0); + nmv_count->comps[i].class0); for (j = 0; j < MV_OFFSET_BITS; ++j) { const uint32_t b0 = nmv_count->comps[i].bits[j][0]; const uint32_t b1 = nmv_count->comps[i].bits[j][1]; @@ -182,11 +181,11 @@ static void counts_to_nmv_context( for (k = 0; k < CLASS0_SIZE; ++k) { vp9_tree_probs_from_distribution(vp9_mv_fp_tree, branch_ct_class0_fp[i][k], - nmv_count->comps[i].class0_fp[k], 0); + nmv_count->comps[i].class0_fp[k]); } vp9_tree_probs_from_distribution(vp9_mv_fp_tree, branch_ct_fp[i], - nmv_count->comps[i].fp, 0); + nmv_count->comps[i].fp); } if (usehp) { for (i = 0; i < 2; ++i) { diff --git a/vp9/encoder/vp9_rdopt.c b/vp9/encoder/vp9_rdopt.c index 993919e5bd905e6dbcddfe75165fdfb35befc1b2..76237ba24d6c975e996d4ccf89415c50bc01865c 100644 --- a/vp9/encoder/vp9_rdopt.c +++ b/vp9/encoder/vp9_rdopt.c @@ -268,10 +268,10 @@ void vp9_initialize_rd_consts(VP9_COMP *cpi) { MB_PREDICTION_MODE m; for (m = NEARESTMV; m < MB_MODE_COUNT; m++) - cpi->mb.inter_mode_cost[i][inter_mode_offset(m)] = + cpi->mb.inter_mode_cost[i][INTER_OFFSET(m)] = cost_token(vp9_inter_mode_tree, cm->fc.inter_mode_probs[i], - &vp9_inter_mode_encodings[inter_mode_offset(m)]); + &vp9_inter_mode_encodings[INTER_OFFSET(m)]); } } } @@ -1416,7 +1416,7 @@ static int cost_mv_ref(VP9_COMP *cpi, MB_PREDICTION_MODE mode, // Don't account for mode here if segment skip is enabled. if (!vp9_segfeature_active(&cpi->common.seg, segment_id, SEG_LVL_SKIP)) { assert(is_inter_mode(mode)); - return x->inter_mode_cost[mode_context][inter_mode_offset(mode)]; + return x->inter_mode_cost[mode_context][INTER_OFFSET(mode)]; } else { return 0; } @@ -1707,7 +1707,7 @@ static void rd_check_segment_txsize(VP9_COMP *cpi, MACROBLOCK *x, const struct buf_2d orig_src = x->plane[0].src; struct buf_2d orig_pre[2]; - mode_idx = inter_mode_offset(this_mode); + mode_idx = INTER_OFFSET(this_mode); bsi->rdstat[i][mode_idx].brdcost = INT64_MAX; // if we're near/nearest and mv == 0,0, compare to zeromv @@ -2002,7 +2002,7 @@ static void rd_check_segment_txsize(VP9_COMP *cpi, MACROBLOCK *x, return; } - mode_idx = inter_mode_offset(mode_selected); + mode_idx = INTER_OFFSET(mode_selected); vpx_memcpy(t_above, bsi->rdstat[i][mode_idx].ta, sizeof(t_above)); vpx_memcpy(t_left, bsi->rdstat[i][mode_idx].tl, sizeof(t_left)); @@ -2078,7 +2078,7 @@ static int64_t rd_pick_best_mbsegmentation(VP9_COMP *cpi, MACROBLOCK *x, return INT64_MAX; /* set it to the best */ for (i = 0; i < 4; i++) { - mode_idx = inter_mode_offset(bsi->modes[i]); + mode_idx = INTER_OFFSET(bsi->modes[i]); mi->bmi[i].as_mv[0].as_int = bsi->rdstat[i][mode_idx].mvs[0].as_int; if (has_second_ref(mbmi)) mi->bmi[i].as_mv[1].as_int = bsi->rdstat[i][mode_idx].mvs[1].as_int;