Commit 329e340d authored by hui su's avatar hui su

Add a speed feature to skip transform type selection

Setting FIXED_TX_TYPE as 1 makes the encoder skip tx_type search,
about twice as fast.

This speed feature is off by defualt; we can turn it on when we
want to quickly test new ideas.

Change-Id: Ieab5807d17fcd54fce3e8ae2f59a18b42eb79408
parent ba1bed68
......@@ -423,12 +423,30 @@ static const TX_TYPE filter_intra_mode_to_tx_type_lookup[FILTER_INTRA_MODES] = {
int pick_intra_filter(int angle);
#endif // CONFIG_EXT_INTRA
#define FIXED_TX_TYPE 0
static INLINE TX_TYPE get_default_tx_type(PLANE_TYPE plane_type,
const MACROBLOCKD *xd,
int block_idx, TX_SIZE tx_size) {
const MB_MODE_INFO *const mbmi = &xd->mi[0]->mbmi;
if (is_inter_block(mbmi) || plane_type != PLANE_TYPE_Y ||
xd->lossless[mbmi->segment_id] || tx_size >= TX_32X32)
return DCT_DCT;
return intra_mode_to_tx_type_context[plane_type == PLANE_TYPE_Y ?
get_y_mode(xd->mi[0], block_idx) : mbmi->uv_mode];
}
static INLINE TX_TYPE get_tx_type(PLANE_TYPE plane_type,
const MACROBLOCKD *xd,
int block_idx, TX_SIZE tx_size) {
const MODE_INFO *const mi = xd->mi[0];
const MB_MODE_INFO *const mbmi = &mi->mbmi;
if (FIXED_TX_TYPE)
return get_default_tx_type(plane_type, xd, block_idx, tx_size);
#if CONFIG_EXT_INTRA
if (!is_inter_block(mbmi)) {
const int use_ext_intra_mode_info =
......
......@@ -564,6 +564,7 @@ static void read_intra_frame_mode_info(VP10_COMMON *const cm,
mbmi->mode == DC_PRED)
read_palette_mode_info(cm, xd, r);
if (!FIXED_TX_TYPE) {
#if CONFIG_EXT_TX
if (get_ext_tx_types(mbmi->tx_size, mbmi->sb_type, 0) > 1 &&
cm->base_qindex > 0 && !mbmi->skip &&
......@@ -577,26 +578,27 @@ static void read_intra_frame_mode_info(VP10_COMMON *const cm,
cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode]);
if (counts)
++counts->intra_ext_tx[eset][mbmi->tx_size][mbmi->mode]
[mbmi->tx_type];
[mbmi->tx_type];
}
} else {
mbmi->tx_type = DCT_DCT;
}
#else
if (mbmi->tx_size < TX_32X32 &&
cm->base_qindex > 0 && !mbmi->skip &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
FRAME_COUNTS *counts = xd->counts;
TX_TYPE tx_type_nom = intra_mode_to_tx_type_context[mbmi->mode];
mbmi->tx_type = vpx_read_tree(
r, vp10_ext_tx_tree,
cm->fc->intra_ext_tx_prob[mbmi->tx_size][tx_type_nom]);
if (counts)
++counts->intra_ext_tx[mbmi->tx_size][tx_type_nom][mbmi->tx_type];
} else {
mbmi->tx_type = DCT_DCT;
}
if (mbmi->tx_size < TX_32X32 &&
cm->base_qindex > 0 && !mbmi->skip &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
FRAME_COUNTS *counts = xd->counts;
TX_TYPE tx_type_nom = intra_mode_to_tx_type_context[mbmi->mode];
mbmi->tx_type = vpx_read_tree(
r, vp10_ext_tx_tree,
cm->fc->intra_ext_tx_prob[mbmi->tx_size][tx_type_nom]);
if (counts)
++counts->intra_ext_tx[mbmi->tx_size][tx_type_nom][mbmi->tx_type];
} else {
mbmi->tx_type = DCT_DCT;
}
#endif // CONFIG_EXT_TX
}
#if CONFIG_EXT_INTRA
mbmi->ext_intra_mode_info.use_ext_intra_mode[0] = 0;
......@@ -1376,64 +1378,66 @@ static void read_inter_frame_mode_info(VP10Decoder *const pbi,
else
read_intra_block_mode_info(cm, xd, mi, r);
if (!FIXED_TX_TYPE) {
#if CONFIG_EXT_TX
if (get_ext_tx_types(mbmi->tx_size, mbmi->sb_type, inter_block) > 1 &&
cm->base_qindex > 0 && !mbmi->skip &&
if (get_ext_tx_types(mbmi->tx_size, mbmi->sb_type, inter_block) > 1 &&
cm->base_qindex > 0 && !mbmi->skip &&
#if CONFIG_SUPERTX
!supertx_enabled &&
!supertx_enabled &&
#endif // CONFIG_SUPERTX
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
int eset = get_ext_tx_set(mbmi->tx_size, mbmi->sb_type,
inter_block);
FRAME_COUNTS *counts = xd->counts;
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
int eset = get_ext_tx_set(mbmi->tx_size, mbmi->sb_type,
inter_block);
FRAME_COUNTS *counts = xd->counts;
if (inter_block) {
if (eset > 0) {
mbmi->tx_type =
vpx_read_tree(r, vp10_ext_tx_inter_tree[eset],
cm->fc->inter_ext_tx_prob[eset][mbmi->tx_size]);
if (counts)
++counts->inter_ext_tx[eset][mbmi->tx_size][mbmi->tx_type];
}
} else if (ALLOW_INTRA_EXT_TX) {
if (eset > 0) {
mbmi->tx_type = vpx_read_tree(r, vp10_ext_tx_intra_tree[eset],
cm->fc->intra_ext_tx_prob[eset]
[mbmi->tx_size][mbmi->mode]);
if (counts)
++counts->intra_ext_tx[eset][mbmi->tx_size]
[mbmi->mode][mbmi->tx_type];
if (inter_block) {
if (eset > 0) {
mbmi->tx_type =
vpx_read_tree(r, vp10_ext_tx_inter_tree[eset],
cm->fc->inter_ext_tx_prob[eset][mbmi->tx_size]);
if (counts)
++counts->inter_ext_tx[eset][mbmi->tx_size][mbmi->tx_type];
}
} else if (ALLOW_INTRA_EXT_TX) {
if (eset > 0) {
mbmi->tx_type = vpx_read_tree(r, vp10_ext_tx_intra_tree[eset],
cm->fc->intra_ext_tx_prob[eset]
[mbmi->tx_size][mbmi->mode]);
if (counts)
++counts->intra_ext_tx[eset][mbmi->tx_size]
[mbmi->mode][mbmi->tx_type];
}
}
} else {
mbmi->tx_type = DCT_DCT;
}
} else {
mbmi->tx_type = DCT_DCT;
}
#else
if (mbmi->tx_size < TX_32X32 &&
cm->base_qindex > 0 && !mbmi->skip &&
if (mbmi->tx_size < TX_32X32 &&
cm->base_qindex > 0 && !mbmi->skip &&
#if CONFIG_SUPERTX
!supertx_enabled &&
!supertx_enabled &&
#endif // CONFIG_SUPERTX
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
FRAME_COUNTS *counts = xd->counts;
if (inter_block) {
mbmi->tx_type = vpx_read_tree(
r, vp10_ext_tx_tree,
cm->fc->inter_ext_tx_prob[mbmi->tx_size]);
if (counts)
++counts->inter_ext_tx[mbmi->tx_size][mbmi->tx_type];
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
FRAME_COUNTS *counts = xd->counts;
if (inter_block) {
mbmi->tx_type = vpx_read_tree(
r, vp10_ext_tx_tree,
cm->fc->inter_ext_tx_prob[mbmi->tx_size]);
if (counts)
++counts->inter_ext_tx[mbmi->tx_size][mbmi->tx_type];
} else {
const TX_TYPE tx_type_nom = intra_mode_to_tx_type_context[mbmi->mode];
mbmi->tx_type = vpx_read_tree(
r, vp10_ext_tx_tree,
cm->fc->intra_ext_tx_prob[mbmi->tx_size][tx_type_nom]);
if (counts)
++counts->intra_ext_tx[mbmi->tx_size][tx_type_nom][mbmi->tx_type];
}
} else {
const TX_TYPE tx_type_nom = intra_mode_to_tx_type_context[mbmi->mode];
mbmi->tx_type = vpx_read_tree(
r, vp10_ext_tx_tree,
cm->fc->intra_ext_tx_prob[mbmi->tx_size][tx_type_nom]);
if (counts)
++counts->intra_ext_tx[mbmi->tx_size][tx_type_nom][mbmi->tx_type];
mbmi->tx_type = DCT_DCT;
}
} else {
mbmi->tx_type = DCT_DCT;
}
#endif // CONFIG_EXT_TX
}
}
void vp10_read_mode_info(VP10Decoder *const pbi, MACROBLOCKD *xd,
......
......@@ -1120,55 +1120,57 @@ static void pack_inter_mode_mvs(VP10_COMP *cpi, const MODE_INFO *mi,
#endif // CONFIG_EXT_INTERP
}
if (!FIXED_TX_TYPE) {
#if CONFIG_EXT_TX
if (get_ext_tx_types(mbmi->tx_size, bsize, is_inter) > 1 &&
cm->base_qindex > 0 && !mbmi->skip &&
if (get_ext_tx_types(mbmi->tx_size, bsize, is_inter) > 1 &&
cm->base_qindex > 0 && !mbmi->skip &&
#if CONFIG_SUPERTX
!supertx_enabled &&
!supertx_enabled &&
#endif // CONFIG_SUPERTX
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
int eset = get_ext_tx_set(mbmi->tx_size, bsize, is_inter);
if (is_inter) {
if (eset > 0)
vp10_write_token(w, vp10_ext_tx_inter_tree[eset],
cm->fc->inter_ext_tx_prob[eset][mbmi->tx_size],
&ext_tx_inter_encodings[eset][mbmi->tx_type]);
} else if (ALLOW_INTRA_EXT_TX) {
if (eset > 0)
vp10_write_token(
w, vp10_ext_tx_intra_tree[eset],
cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode],
&ext_tx_intra_encodings[eset][mbmi->tx_type]);
}
}
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
int eset = get_ext_tx_set(mbmi->tx_size, bsize, is_inter);
if (is_inter) {
if (eset > 0)
vp10_write_token(w, vp10_ext_tx_inter_tree[eset],
cm->fc->inter_ext_tx_prob[eset][mbmi->tx_size],
&ext_tx_inter_encodings[eset][mbmi->tx_type]);
} else if (ALLOW_INTRA_EXT_TX) {
if (eset > 0)
vp10_write_token(
w, vp10_ext_tx_intra_tree[eset],
cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode],
&ext_tx_intra_encodings[eset][mbmi->tx_type]);
}
}
#else
if (mbmi->tx_size < TX_32X32 &&
cm->base_qindex > 0 && !mbmi->skip &&
if (mbmi->tx_size < TX_32X32 &&
cm->base_qindex > 0 && !mbmi->skip &&
#if CONFIG_SUPERTX
!supertx_enabled &&
!supertx_enabled &&
#endif // CONFIG_SUPERTX
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
if (is_inter) {
vp10_write_token(
w, vp10_ext_tx_tree,
cm->fc->inter_ext_tx_prob[mbmi->tx_size],
&ext_tx_encodings[mbmi->tx_type]);
} else {
vp10_write_token(
w, vp10_ext_tx_tree,
cm->fc->intra_ext_tx_prob[mbmi->tx_size]
[intra_mode_to_tx_type_context[mbmi->mode]],
&ext_tx_encodings[mbmi->tx_type]);
}
} else {
if (!mbmi->skip) {
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
if (is_inter) {
vp10_write_token(
w, vp10_ext_tx_tree,
cm->fc->inter_ext_tx_prob[mbmi->tx_size],
&ext_tx_encodings[mbmi->tx_type]);
} else {
vp10_write_token(
w, vp10_ext_tx_tree,
cm->fc->intra_ext_tx_prob[mbmi->tx_size]
[intra_mode_to_tx_type_context[mbmi->mode]],
&ext_tx_encodings[mbmi->tx_type]);
}
} else {
if (!mbmi->skip) {
#if CONFIG_SUPERTX
if (!supertx_enabled)
if (!supertx_enabled)
#endif // CONFIG_SUPERTX
assert(mbmi->tx_type == DCT_DCT);
}
}
assert(mbmi->tx_type == DCT_DCT);
}
}
#endif // CONFIG_EXT_TX
}
}
static void write_palette_mode_info(const VP10_COMMON *cm,
......@@ -1264,30 +1266,31 @@ static void write_mb_modes_kf(const VP10_COMMON *cm, const MACROBLOCKD *xd,
mbmi->mode == DC_PRED)
write_palette_mode_info(cm, xd, mi, w);
if (!FIXED_TX_TYPE) {
#if CONFIG_EXT_TX
if (get_ext_tx_types(mbmi->tx_size, bsize, 0) > 1 &&
cm->base_qindex > 0 && !mbmi->skip &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP) &&
ALLOW_INTRA_EXT_TX) {
int eset = get_ext_tx_set(mbmi->tx_size, bsize, 0);
if (eset > 0)
vp10_write_token(
w, vp10_ext_tx_intra_tree[eset],
cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode],
&ext_tx_intra_encodings[eset][mbmi->tx_type]);
}
if (get_ext_tx_types(mbmi->tx_size, bsize, 0) > 1 &&
cm->base_qindex > 0 && !mbmi->skip &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP) &&
ALLOW_INTRA_EXT_TX) {
int eset = get_ext_tx_set(mbmi->tx_size, bsize, 0);
if (eset > 0)
vp10_write_token(
w, vp10_ext_tx_intra_tree[eset],
cm->fc->intra_ext_tx_prob[eset][mbmi->tx_size][mbmi->mode],
&ext_tx_intra_encodings[eset][mbmi->tx_type]);
}
#else
if (mbmi->tx_size < TX_32X32 &&
cm->base_qindex > 0 && !mbmi->skip &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
vp10_write_token(
w, vp10_ext_tx_tree,
cm->fc->intra_ext_tx_prob[mbmi->tx_size]
[intra_mode_to_tx_type_context[mbmi->mode]],
&ext_tx_encodings[mbmi->tx_type]);
}
if (mbmi->tx_size < TX_32X32 &&
cm->base_qindex > 0 && !mbmi->skip &&
!segfeature_active(&cm->seg, mbmi->segment_id, SEG_LVL_SKIP)) {
vp10_write_token(
w, vp10_ext_tx_tree,
cm->fc->intra_ext_tx_prob[mbmi->tx_size]
[intra_mode_to_tx_type_context[mbmi->mode]],
&ext_tx_encodings[mbmi->tx_type]);
}
#endif // CONFIG_EXT_TX
}
#if CONFIG_EXT_INTRA
if (bsize >= BLOCK_8X8)
......
......@@ -1094,7 +1094,8 @@ static void choose_tx_size_from_rd(VP10_COMP *cpi, MACROBLOCK *x,
last_rd = INT64_MAX;
for (n = start_tx; n >= end_tx; --n) {
const int r_tx_size = vp10_cost_tx_size(n, max_tx_size, tx_probs);
if (FIXED_TX_TYPE && tx_type != get_default_tx_type(0, xd, 0, n))
continue;
#if CONFIG_EXT_TX
ext_tx_set = get_ext_tx_set(n, bs, is_inter);
if (is_inter) {
......@@ -1149,7 +1150,7 @@ static void choose_tx_size_from_rd(VP10_COMP *cpi, MACROBLOCK *x,
cpi->sf.use_fast_coef_costing);
if (n < TX_32X32 &&
!xd->lossless[xd->mi[0]->mbmi.segment_id] &&
r != INT_MAX) {
r != INT_MAX && !FIXED_TX_TYPE) {
if (is_inter)
r += cpi->inter_tx_type_costs[mbmi->tx_size][mbmi->tx_type];
else
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment