Commit 1b6ccfcf authored by James Zern's avatar James Zern
Browse files

Revert "Fix separation between ext-inter group of expts."

This reverts commit 5ca25dfc.

It breaks high-bitdepth builds with:
--enable-aom-highbitdepth --enable-experimental --enable-dual-filter
--enable-entropy --enable-ext-inter --enable-ext-intra
--enable-ext-partition-types --enable-ext-refs --enable-ext-tx
--enable-filter-intra --enable-loop-restoration

or more simply:
--enable-aom-highbitdepth --enable-experimental --enable-ext-inter

BUG=aomedia:463

Change-Id: If814131e634e4411569859f7426efafe158b83fd
parent 4a2e3b2d
......@@ -203,24 +203,15 @@ static INLINE int have_newmv_in_inter_mode(PREDICTION_MODE mode) {
}
static INLINE int use_masked_motion_search(COMPOUND_TYPE type) {
#if CONFIG_WEDGE
return (type == COMPOUND_WEDGE);
#else
(void)type;
return 0;
#endif
}
static INLINE int is_masked_compound_type(COMPOUND_TYPE type) {
#if CONFIG_COMPOUND_SEGMENT && CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
return (type == COMPOUND_WEDGE || type == COMPOUND_SEG);
#elif !CONFIG_COMPOUND_SEGMENT && CONFIG_WEDGE
#else
return (type == COMPOUND_WEDGE);
#elif CONFIG_COMPOUND_SEGMENT && !CONFIG_WEDGE
return (type == COMPOUND_SEG);
#endif // CONFIG_COMPOUND_SEGMENT
(void)type;
return 0;
}
#else
......
......@@ -772,7 +772,7 @@ static const aom_prob default_inter_compound_mode_probs
{ 25, 29, 50, 192, 64, 192, 128, 180, 180 }, // 6 = two intra neighbours
};
#if CONFIG_COMPOUND_SEGMENT && CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
static const aom_prob
default_compound_type_probs[BLOCK_SIZES][COMPOUND_TYPES - 1] = {
#if CONFIG_CB4X4
......@@ -785,7 +785,7 @@ static const aom_prob
{ 255, 200 }, { 255, 200 }, { 255, 200 },
#endif // CONFIG_EXT_PARTITION
};
#elif !CONFIG_COMPOUND_SEGMENT && CONFIG_WEDGE
#else // !CONFIG_COMPOUND_SEGMENT
static const aom_prob
default_compound_type_probs[BLOCK_SIZES][COMPOUND_TYPES - 1] = {
#if CONFIG_CB4X4
......@@ -797,22 +797,7 @@ static const aom_prob
{ 255 }, { 255 }, { 255 },
#endif // CONFIG_EXT_PARTITION
};
#elif CONFIG_COMPOUND_SEGMENT && !CONFIG_WEDGE
static const aom_prob
default_compound_type_probs[BLOCK_SIZES][COMPOUND_TYPES - 1] = {
#if CONFIG_CB4X4
{ 208 }, { 208 }, { 208 },
#endif
{ 208 }, { 208 }, { 208 }, { 208 }, { 208 }, { 208 }, { 216 },
{ 216 }, { 216 }, { 224 }, { 224 }, { 240 }, { 240 },
#if CONFIG_EXT_PARTITION
{ 255 }, { 255 }, { 255 },
#endif // CONFIG_EXT_PARTITION
};
#else
static const aom_prob default_compound_type_probs[BLOCK_SIZES]
[COMPOUND_TYPES - 1];
#endif // CONFIG_COMPOUND_SEGMENT && CONFIG_WEDGE
#endif // CONFIG_COMPOUND_SEGMENT
static const aom_prob default_interintra_prob[BLOCK_SIZE_GROUPS] = {
208, 208, 208, 208,
......@@ -992,21 +977,15 @@ const aom_tree_index av1_inter_compound_mode_tree
-INTER_COMPOUND_OFFSET(NEAR_NEWMV), -INTER_COMPOUND_OFFSET(NEW_NEARMV)
};
#if CONFIG_COMPOUND_SEGMENT && CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
const aom_tree_index av1_compound_type_tree[TREE_SIZE(COMPOUND_TYPES)] = {
-COMPOUND_AVERAGE, 2, -COMPOUND_WEDGE, -COMPOUND_SEG
};
#elif !CONFIG_COMPOUND_SEGMENT && CONFIG_WEDGE
#else // !CONFIG_COMPOUND_SEGMENT
const aom_tree_index av1_compound_type_tree[TREE_SIZE(COMPOUND_TYPES)] = {
-COMPOUND_AVERAGE, -COMPOUND_WEDGE
};
#elif CONFIG_COMPOUND_SEGMENT && !CONFIG_WEDGE
const aom_tree_index av1_compound_type_tree[TREE_SIZE(COMPOUND_TYPES)] = {
-COMPOUND_AVERAGE, -COMPOUND_SEG
};
#else
const aom_tree_index av1_compound_type_tree[TREE_SIZE(COMPOUND_TYPES)] = {};
#endif // CONFIG_COMPOUND_SEGMENT && CONFIG_WEDGE
#endif // CONFIG_COMPOUND_SEGMENT
/* clang-format on */
#endif // CONFIG_EXT_INTER
......@@ -3506,13 +3485,11 @@ void av1_adapt_inter_frame_probs(AV1_COMMON *cm) {
pre_fc->wedge_interintra_prob[i], counts->wedge_interintra[i]);
}
#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
for (i = 0; i < BLOCK_SIZES; ++i) {
aom_tree_merge_probs(av1_compound_type_tree, pre_fc->compound_type_prob[i],
counts->compound_interinter[i],
fc->compound_type_prob[i]);
}
#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
#endif // CONFIG_EXT_INTER
for (i = 0; i < BLOCK_SIZE_GROUPS; i++)
......
......@@ -362,9 +362,7 @@ typedef enum {
typedef enum {
COMPOUND_AVERAGE = 0,
#if CONFIG_WEDGE
COMPOUND_WEDGE,
#endif // CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
COMPOUND_SEG,
#endif // CONFIG_COMPOUND_SEGMENT
......
......@@ -355,13 +355,10 @@ const uint8_t *av1_get_compound_type_mask_inverse(
#endif
BLOCK_SIZE sb_type) {
assert(is_masked_compound_type(comp_data->type));
(void)sb_type;
switch (comp_data->type) {
#if CONFIG_WEDGE
case COMPOUND_WEDGE:
return av1_get_contiguous_soft_mask(comp_data->wedge_index,
!comp_data->wedge_sign, sb_type);
#endif // CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
case COMPOUND_SEG:
return invert_mask(mask_buffer, comp_data->seg_mask, h, w, stride);
......@@ -373,13 +370,10 @@ const uint8_t *av1_get_compound_type_mask_inverse(
const uint8_t *av1_get_compound_type_mask(
const INTERINTER_COMPOUND_DATA *const comp_data, BLOCK_SIZE sb_type) {
assert(is_masked_compound_type(comp_data->type));
(void)sb_type;
switch (comp_data->type) {
#if CONFIG_WEDGE
case COMPOUND_WEDGE:
return av1_get_contiguous_soft_mask(comp_data->wedge_index,
comp_data->wedge_sign, sb_type);
#endif // CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
case COMPOUND_SEG: return comp_data->seg_mask;
#endif // CONFIG_COMPOUND_SEGMENT
......
......@@ -205,12 +205,9 @@ extern const wedge_params_type wedge_params_lookup[BLOCK_SIZES];
static INLINE int is_interinter_compound_used(COMPOUND_TYPE type,
BLOCK_SIZE sb_type) {
(void)sb_type;
switch (type) {
case COMPOUND_AVERAGE: return 1;
#if CONFIG_WEDGE
case COMPOUND_AVERAGE: (void)sb_type; return 1;
case COMPOUND_WEDGE: return wedge_params_lookup[sb_type].bits > 0;
#endif // CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
case COMPOUND_SEG: return sb_type >= BLOCK_8X8;
#endif // CONFIG_COMPOUND_SEGMENT
......
......@@ -4698,7 +4698,6 @@ static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
}
}
}
#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
if (cm->reference_mode != SINGLE_REFERENCE) {
for (i = 0; i < BLOCK_SIZES; i++) {
for (j = 0; j < COMPOUND_TYPES - 1; j++) {
......@@ -4706,7 +4705,6 @@ static int read_compressed_header(AV1Decoder *pbi, const uint8_t *data,
}
}
}
#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
#endif // CONFIG_EXT_INTER
#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
......
......@@ -2220,20 +2220,16 @@ static void read_inter_block_mode_info(AV1Decoder *const pbi,
#endif // CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
) {
if (is_any_masked_compound_used(bsize)) {
#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
mbmi->interinter_compound_data.type =
aom_read_tree(r, av1_compound_type_tree,
cm->fc->compound_type_prob[bsize], ACCT_STR);
#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
#if CONFIG_WEDGE
if (mbmi->interinter_compound_data.type == COMPOUND_WEDGE) {
mbmi->interinter_compound_data.wedge_index =
aom_read_literal(r, get_wedge_bits_lookup(bsize), ACCT_STR);
mbmi->interinter_compound_data.wedge_sign = aom_read_bit(r, ACCT_STR);
}
#endif // CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
if (mbmi->interinter_compound_data.type == COMPOUND_SEG) {
else if (mbmi->interinter_compound_data.type == COMPOUND_SEG) {
mbmi->interinter_compound_data.mask_type =
aom_read_literal(r, MAX_SEG_MASK_BITS, ACCT_STR);
}
......
......@@ -2069,20 +2069,16 @@ static void pack_inter_mode_mvs(AV1_COMP *cpi, const int mi_row,
&& mbmi->motion_mode == SIMPLE_TRANSLATION
#endif // CONFIG_MOTION_VAR
&& is_any_masked_compound_used(bsize)) {
#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
av1_write_token(
w, av1_compound_type_tree, cm->fc->compound_type_prob[bsize],
&compound_type_encodings[mbmi->interinter_compound_data.type]);
#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
#if CONFIG_WEDGE
if (mbmi->interinter_compound_data.type == COMPOUND_WEDGE) {
aom_write_literal(w, mbmi->interinter_compound_data.wedge_index,
get_wedge_bits_lookup(bsize));
aom_write_bit(w, mbmi->interinter_compound_data.wedge_sign);
}
#endif // CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
if (mbmi->interinter_compound_data.type == COMPOUND_SEG) {
else if (mbmi->interinter_compound_data.type == COMPOUND_SEG) {
aom_write_literal(w, mbmi->interinter_compound_data.mask_type,
MAX_SEG_MASK_BITS);
}
......@@ -5033,14 +5029,12 @@ static uint32_t write_compressed_header(AV1_COMP *cpi, uint8_t *data) {
cm->counts.wedge_interintra[i], probwt);
}
}
#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
if (cm->reference_mode != SINGLE_REFERENCE) {
for (i = 0; i < BLOCK_SIZES; i++)
prob_diff_update(av1_compound_type_tree, fc->compound_type_prob[i],
cm->counts.compound_interinter[i], COMPOUND_TYPES,
probwt, header_bc);
}
#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
#endif // CONFIG_EXT_INTER
#if CONFIG_MOTION_VAR || CONFIG_WARPED_MOTION
......
......@@ -4957,12 +4957,9 @@ static int cost_mv_ref(const AV1_COMP *const cpi, PREDICTION_MODE mode,
#if CONFIG_EXT_INTER
static int get_interinter_compound_type_bits(BLOCK_SIZE bsize,
COMPOUND_TYPE comp_type) {
(void)bsize;
switch (comp_type) {
case COMPOUND_AVERAGE: return 0;
#if CONFIG_WEDGE
case COMPOUND_WEDGE: return get_interinter_wedge_bits(bsize);
#endif // CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
case COMPOUND_SEG: return 1;
#endif // CONFIG_COMPOUND_SEGMENT
......@@ -7111,7 +7108,6 @@ static INLINE void restore_dst_buf(MACROBLOCKD *xd, BUFFER_SET dst) {
}
#if CONFIG_EXT_INTER
#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
static void do_masked_motion_search(const AV1_COMP *const cpi, MACROBLOCK *x,
const uint8_t *mask, int mask_stride,
BLOCK_SIZE bsize, int mi_row, int mi_col,
......@@ -7265,7 +7261,6 @@ static void do_masked_motion_search_indexed(
&tmp_mv[1], &rate_mv[1], 1);
}
}
#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
#endif // CONFIG_EXT_INTER
// In some situations we want to discount tha pparent cost of a new motion
......@@ -7299,7 +7294,6 @@ static INLINE void clamp_mv2(MV *mv, const MACROBLOCKD *xd) {
}
#if CONFIG_EXT_INTER
#if CONFIG_WEDGE
static int estimate_wedge_sign(const AV1_COMP *cpi, const MACROBLOCK *x,
const BLOCK_SIZE bsize, const uint8_t *pred0,
int stride0, const uint8_t *pred1, int stride1) {
......@@ -7343,7 +7337,6 @@ static int estimate_wedge_sign(const AV1_COMP *cpi, const MACROBLOCK *x,
(int64_t)(esq[0][3] + esq[0][1] + esq[0][2]);
return (tl + br > 0);
}
#endif // CONFIG_WEDGE
#endif // CONFIG_EXT_INTER
#if !CONFIG_DUAL_FILTER
......@@ -7449,7 +7442,6 @@ static InterpFilter predict_interp_filter(
#if CONFIG_EXT_INTER
// Choose the best wedge index and sign
#if CONFIG_WEDGE
static int64_t pick_wedge(const AV1_COMP *const cpi, const MACROBLOCK *const x,
const BLOCK_SIZE bsize, const uint8_t *const p0,
const uint8_t *const p1, int *const best_wedge_sign,
......@@ -7621,7 +7613,6 @@ static int64_t pick_interinter_wedge(const AV1_COMP *const cpi,
mbmi->interinter_compound_data.wedge_index = wedge_index;
return rd;
}
#endif // CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
static int64_t pick_interinter_seg_mask(const AV1_COMP *const cpi,
......@@ -7711,7 +7702,6 @@ static int64_t pick_interinter_seg_mask(const AV1_COMP *const cpi,
}
#endif // CONFIG_COMPOUND_SEGMENT
#if CONFIG_WEDGE && CONFIG_INTERINTRA
static int64_t pick_interintra_wedge(const AV1_COMP *const cpi,
const MACROBLOCK *const x,
const BLOCK_SIZE bsize,
......@@ -7731,9 +7721,7 @@ static int64_t pick_interintra_wedge(const AV1_COMP *const cpi,
mbmi->interintra_wedge_index = wedge_index;
return rd;
}
#endif // CONFIG_WEDGE && CONFIG_INTERINTRA
#if CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
static int interinter_compound_motion_search(const AV1_COMP *const cpi,
MACROBLOCK *x,
const BLOCK_SIZE bsize,
......@@ -7762,7 +7750,6 @@ static int interinter_compound_motion_search(const AV1_COMP *const cpi,
}
return tmp_rate_mv;
}
#endif // CONFIG_COMPOUND_SEGMENT || CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
// TODO(sarahparker) this and build_and_cost_compound_wedge can probably
......@@ -7829,7 +7816,6 @@ static int64_t build_and_cost_compound_seg(
}
#endif // CONFIG_COMPOUND_SEGMENT
#if CONFIG_WEDGE
static int64_t build_and_cost_compound_wedge(
const AV1_COMP *const cpi, MACROBLOCK *x, const int_mv *const cur_mv,
const BLOCK_SIZE bsize, const int this_mode, int rs2, int rate_mv,
......@@ -7888,7 +7874,6 @@ static int64_t build_and_cost_compound_wedge(
}
return best_rd_cur;
}
#endif // CONFIG_WEDGE
#endif // CONFIG_EXT_INTER
typedef struct {
......@@ -8533,10 +8518,8 @@ static int64_t handle_inter_mode(
int pred_exists = 1;
const int bw = block_size_wide[bsize];
int_mv single_newmv[TOTAL_REFS_PER_FRAME];
#if CONFIG_INTERINTRA
const unsigned int *const interintra_mode_cost =
cpi->interintra_mode_cost[size_group_lookup[bsize]];
#endif // CONFIG_INTERINTRA
const int is_comp_interintra_pred = (mbmi->ref_frame[1] == INTRA_FRAME);
#if CONFIG_REF_MV
uint8_t ref_frame_type = av1_ref_frame_type(mbmi->ref_frame);
......@@ -8829,7 +8812,6 @@ static int64_t handle_inter_mode(
RDCOST(x->rdmult, x->rddiv, rs2 + rate_mv + rate_sum, dist_sum);
best_rd_compound = best_rd_cur;
break;
#if CONFIG_WEDGE
case COMPOUND_WEDGE:
if (x->source_variance > cpi->sf.disable_wedge_search_var_thresh &&
best_rd_compound / 3 < ref_best_rd) {
......@@ -8838,7 +8820,6 @@ static int64_t handle_inter_mode(
&tmp_rate_mv, preds0, preds1, strides, mi_row, mi_col);
}
break;
#endif // CONFIG_WEDGE
#if CONFIG_COMPOUND_SEGMENT
case COMPOUND_SEG:
if (x->source_variance > cpi->sf.disable_wedge_search_var_thresh &&
......@@ -8899,13 +8880,16 @@ static int64_t handle_inter_mode(
: 0);
}
#if CONFIG_INTERINTRA
if (is_comp_interintra_pred) {
INTERINTRA_MODE best_interintra_mode = II_DC_PRED;
int64_t best_interintra_rd = INT64_MAX;
int rmode, rate_sum;
int64_t dist_sum;
int j;
int64_t best_interintra_rd_nowedge = INT64_MAX;
int64_t best_interintra_rd_wedge = INT64_MAX;
int rwedge;
int_mv tmp_mv;
int tmp_rate_mv = 0;
int tmp_skip_txfm_sb;
int64_t tmp_skip_sse_sb;
......@@ -8959,12 +8943,8 @@ static int64_t handle_inter_mode(
// Don't need to call restore_dst_buf here
return INT64_MAX;
}
#if CONFIG_WEDGE
if (is_interintra_wedge_used(bsize)) {
int64_t best_interintra_rd_nowedge = INT64_MAX;
int64_t best_interintra_rd_wedge = INT64_MAX;
int_mv tmp_mv;
int rwedge = av1_cost_bit(cm->fc->wedge_interintra_prob[bsize], 0);
rwedge = av1_cost_bit(cm->fc->wedge_interintra_prob[bsize], 0);
if (rd != INT64_MAX)
rd = RDCOST(x->rdmult, x->rddiv, rmode + rate_mv + rwedge + rate_sum,
dist_sum);
......@@ -9031,7 +9011,6 @@ static int64_t handle_inter_mode(
best_interintra_rd = best_interintra_rd_nowedge;
}
}
#endif // CONFIG_WEDGE
pred_exists = 0;
*args->compmode_interintra_cost =
......@@ -9050,7 +9029,6 @@ static int64_t handle_inter_mode(
*args->compmode_interintra_cost =
av1_cost_bit(cm->fc->interintra_prob[size_group_lookup[bsize]], 0);
}
#endif // CONFIG_INTERINTRA
if (pred_exists == 0) {
int tmp_rate;
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment