Commit fdd0a614 authored by Vladimir Kazakov's avatar Vladimir Kazakov Committed by Thomas Daede

Add Default to MotionVector

Deriving from the Default trait makes it easier to create motion vectors initialized with defaults.
parent 04dfb2b0
...@@ -1222,7 +1222,7 @@ impl Block { ...@@ -1222,7 +1222,7 @@ impl Block {
partition: PartitionType::PARTITION_NONE, partition: PartitionType::PARTITION_NONE,
skip: false, skip: false,
ref_frames: [INTRA_FRAME; 2], ref_frames: [INTRA_FRAME; 2],
mv: [ MotionVector { row:0, col: 0 }; 2], mv: [ MotionVector::default(); 2],
neighbors_ref_counts: [0; TOTAL_REFS_PER_FRAME], neighbors_ref_counts: [0; TOTAL_REFS_PER_FRAME],
cdef_index: 0, cdef_index: 0,
n4_w: BLOCK_64X64.width_mi(), n4_w: BLOCK_64X64.width_mi(),
...@@ -2123,7 +2123,7 @@ impl ContextWriter { ...@@ -2123,7 +2123,7 @@ impl ContextWriter {
if !found_match && mv_stack.len() < MAX_REF_MV_STACK_SIZE { if !found_match && mv_stack.len() < MAX_REF_MV_STACK_SIZE {
let mv_cand = CandidateMV { let mv_cand = CandidateMV {
this_mv: blk.mv[i], this_mv: blk.mv[i],
comp_mv: MotionVector { row: 0, col: 0 }, comp_mv: MotionVector::default(),
weight weight
}; };
...@@ -2193,7 +2193,7 @@ impl ContextWriter { ...@@ -2193,7 +2193,7 @@ impl ContextWriter {
if !self.find_matching_mv(mv, mv_stack) { if !self.find_matching_mv(mv, mv_stack) {
let mv_cand = CandidateMV { let mv_cand = CandidateMV {
this_mv: mv, this_mv: mv,
comp_mv: MotionVector { row: 0, col: 0 }, comp_mv: MotionVector::default(),
weight: 2 weight: 2
}; };
mv_stack.push(mv_cand); mv_stack.push(mv_cand);
...@@ -2450,8 +2450,8 @@ impl ContextWriter { ...@@ -2450,8 +2450,8 @@ impl ContextWriter {
let mut ref_id_count = [0 as usize; 2]; let mut ref_id_count = [0 as usize; 2];
let mut ref_diff_count = [0 as usize; 2]; let mut ref_diff_count = [0 as usize; 2];
let mut ref_id_mvs = [[MotionVector { row: 0, col: 0 }; 2]; 2]; let mut ref_id_mvs = [[MotionVector::default(); 2]; 2];
let mut ref_diff_mvs = [[MotionVector { row: 0, col: 0 }; 2]; 2]; let mut ref_diff_mvs = [[MotionVector::default(); 2]; 2];
for pass in passes { for pass in passes {
let mut idx = 0; let mut idx = 0;
...@@ -2477,7 +2477,7 @@ impl ContextWriter { ...@@ -2477,7 +2477,7 @@ impl ContextWriter {
} }
if is_compound { if is_compound {
let mut combined_mvs = [[MotionVector { row: 0, col: 0}; 2]; 2]; let mut combined_mvs = [[MotionVector::default(); 2]; 2];
for list in 0..2 { for list in 0..2 {
let mut comp_count = 0; let mut comp_count = 0;
......
...@@ -422,7 +422,7 @@ impl<T: Pixel> FrameState<T> { ...@@ -422,7 +422,7 @@ impl<T: Pixel> FrameState<T> {
deblock: Default::default(), deblock: Default::default(),
segmentation: Default::default(), segmentation: Default::default(),
restoration: rs, restoration: rs,
frame_mvs: vec![vec![MotionVector{row: 0, col: 0}; fi.w_in_b * fi.h_in_b]; REF_FRAMES] frame_mvs: vec![vec![MotionVector::default(); fi.w_in_b * fi.h_in_b]; REF_FRAMES]
} }
} }
} }
...@@ -1166,7 +1166,7 @@ pub fn encode_block_b<T: Pixel>( ...@@ -1166,7 +1166,7 @@ pub fn encode_block_b<T: Pixel>(
let ref_mvs = if num_mv_found > 0 { let ref_mvs = if num_mv_found > 0 {
[mv_stack[ref_mv_idx].this_mv, mv_stack[ref_mv_idx].comp_mv] [mv_stack[ref_mv_idx].this_mv, mv_stack[ref_mv_idx].comp_mv]
} else { } else {
[MotionVector{ row: 0, col: 0 }; 2] [MotionVector::default(); 2]
}; };
let mv_precision = if fi.force_integer_mv != 0 { let mv_precision = if fi.force_integer_mv != 0 {
......
...@@ -177,7 +177,7 @@ pub fn get_subset_predictors<T: Pixel>( ...@@ -177,7 +177,7 @@ pub fn get_subset_predictors<T: Pixel>(
} }
if predictors.len() > 0 { if predictors.len() > 0 {
let mut median_mv = MotionVector{row: 0, col: 0}; let mut median_mv = MotionVector::default();
for mv in predictors.iter() { for mv in predictors.iter() {
median_mv = median_mv + *mv; median_mv = median_mv + *mv;
} }
...@@ -186,7 +186,7 @@ pub fn get_subset_predictors<T: Pixel>( ...@@ -186,7 +186,7 @@ pub fn get_subset_predictors<T: Pixel>(
predictors.push(median_mv.quantize_to_fullpel()); predictors.push(median_mv.quantize_to_fullpel());
} }
predictors.push(MotionVector{row: 0, col: 0}); predictors.push(MotionVector::default());
// Coarse motion estimation. // Coarse motion estimation.
...@@ -242,7 +242,7 @@ pub fn motion_estimation<T: Pixel>( ...@@ -242,7 +242,7 @@ pub fn motion_estimation<T: Pixel>(
// Full-pixel motion estimation // Full-pixel motion estimation
let mut lowest_cost = std::u64::MAX; let mut lowest_cost = std::u64::MAX;
let mut best_mv = MotionVector { row: 0, col: 0 }; let mut best_mv = MotionVector::default();
let frame_mvs = &fs.frame_mvs[ref_slot]; let frame_mvs = &fs.frame_mvs[ref_slot];
let frame_ref = &fi.rec_buffer.frames[fi.ref_frames[0] as usize]; let frame_ref = &fi.rec_buffer.frames[fi.ref_frames[0] as usize];
...@@ -329,7 +329,7 @@ pub fn motion_estimation<T: Pixel>( ...@@ -329,7 +329,7 @@ pub fn motion_estimation<T: Pixel>(
blk_w, blk_w,
blk_h, blk_h,
[ref_frame, NONE_FRAME], [ref_frame, NONE_FRAME],
[cand_mv, MotionVector { row: 0, col: 0 }] [cand_mv, MotionVector::default()]
); );
} }
...@@ -354,7 +354,7 @@ pub fn motion_estimation<T: Pixel>( ...@@ -354,7 +354,7 @@ pub fn motion_estimation<T: Pixel>(
best_mv best_mv
} }
None => MotionVector { row: 0, col: 0 } None => MotionVector::default()
} }
} }
...@@ -366,7 +366,7 @@ fn get_best_predictor<T: Pixel>( ...@@ -366,7 +366,7 @@ fn get_best_predictor<T: Pixel>(
mvx_min: isize, mvx_max: isize, mvy_min: isize, mvy_max: isize, mvx_min: isize, mvx_max: isize, mvy_min: isize, mvy_max: isize,
blk_w: usize, blk_h: usize, blk_w: usize, blk_h: usize,
center_mv: &mut MotionVector, center_mv_cost: &mut u64) { center_mv: &mut MotionVector, center_mv_cost: &mut u64) {
*center_mv = MotionVector{row: 0, col: 0}; *center_mv = MotionVector::default();
*center_mv_cost = std::u64::MAX; *center_mv_cost = std::u64::MAX;
for &init_mv in predictors.iter() { for &init_mv in predictors.iter() {
...@@ -401,7 +401,7 @@ fn diamond_me_search<T: Pixel>( ...@@ -401,7 +401,7 @@ fn diamond_me_search<T: Pixel>(
loop { loop {
let mut best_diamond_rd_cost = std::u64::MAX; let mut best_diamond_rd_cost = std::u64::MAX;
let mut best_diamond_mv = MotionVector { row: 0, col: 0 }; let mut best_diamond_mv = MotionVector::default();
for p in diamond_pattern.iter() { for p in diamond_pattern.iter() {
...@@ -543,7 +543,7 @@ pub fn estimate_motion_ss4<T: Pixel>( ...@@ -543,7 +543,7 @@ pub fn estimate_motion_ss4<T: Pixel>(
let y_hi = po.y + (((range_y).min(mvy_max / 8)) >> 2); let y_hi = po.y + (((range_y).min(mvy_max / 8)) >> 2);
let mut lowest_cost = std::u64::MAX; let mut lowest_cost = std::u64::MAX;
let mut best_mv = MotionVector { row: 0, col: 0 }; let mut best_mv = MotionVector::default();
// Divide by 16 to account for subsampling, 0.125 is a fudge factor // Divide by 16 to account for subsampling, 0.125 is a fudge factor
let lambda = (fi.me_lambda * 256.0 / 16.0 * 0.125) as u32; let lambda = (fi.me_lambda * 256.0 / 16.0 * 0.125) as u32;
...@@ -563,7 +563,7 @@ pub fn estimate_motion_ss4<T: Pixel>( ...@@ -563,7 +563,7 @@ pub fn estimate_motion_ss4<T: Pixel>(
1, 1,
fi.sequence.bit_depth, fi.sequence.bit_depth,
lambda, lambda,
[MotionVector { row: 0, col: 0 }; 2], [MotionVector::default(); 2],
fi.allow_high_precision_mv fi.allow_high_precision_mv
); );
...@@ -589,7 +589,7 @@ pub fn estimate_motion_ss2<T: Pixel>( ...@@ -589,7 +589,7 @@ pub fn estimate_motion_ss2<T: Pixel>(
let (mvx_min, mvx_max, mvy_min, mvy_max) = get_mv_range(fi.w_in_b, fi.h_in_b, &bo_adj, blk_w, blk_h); let (mvx_min, mvx_max, mvy_min, mvy_max) = get_mv_range(fi.w_in_b, fi.h_in_b, &bo_adj, blk_w, blk_h);
let mut lowest_cost = std::u64::MAX; let mut lowest_cost = std::u64::MAX;
let mut best_mv = MotionVector { row: 0, col: 0 }; let mut best_mv = MotionVector::default();
// Divide by 4 to account for subsampling, 0.125 is a fudge factor // Divide by 4 to account for subsampling, 0.125 is a fudge factor
let lambda = (fi.me_lambda * 256.0 / 4.0 * 0.125) as u32; let lambda = (fi.me_lambda * 256.0 / 4.0 * 0.125) as u32;
...@@ -616,7 +616,7 @@ pub fn estimate_motion_ss2<T: Pixel>( ...@@ -616,7 +616,7 @@ pub fn estimate_motion_ss2<T: Pixel>(
1, 1,
fi.sequence.bit_depth, fi.sequence.bit_depth,
lambda, lambda,
[MotionVector { row: 0, col: 0 }; 2], [MotionVector::default(); 2],
fi.allow_high_precision_mv fi.allow_high_precision_mv
); );
} }
......
...@@ -732,7 +732,7 @@ pub enum FilterIntraMode { ...@@ -732,7 +732,7 @@ pub enum FilterIntraMode {
FILTER_INTRA_MODES FILTER_INTRA_MODES
} }
#[derive(Copy, Debug, Clone)] #[derive(Clone, Copy, Debug, Default)]
pub struct MotionVector { pub struct MotionVector {
pub row: i16, pub row: i16,
pub col: i16 pub col: i16
......
...@@ -343,7 +343,7 @@ impl Default for EncodingSettings { ...@@ -343,7 +343,7 @@ impl Default for EncodingSettings {
skip: false, skip: false,
rd: std::f64::MAX, rd: std::f64::MAX,
ref_frames: [INTRA_FRAME, NONE_FRAME], ref_frames: [INTRA_FRAME, NONE_FRAME],
mvs: [MotionVector { row: 0, col: 0 }; 2], mvs: [MotionVector::default(); 2],
tx_size: TxSize::TX_4X4, tx_size: TxSize::TX_4X4,
tx_type: TxType::DCT_DCT tx_type: TxType::DCT_DCT
} }
...@@ -406,7 +406,7 @@ pub fn rdo_mode_decision<T: Pixel>( ...@@ -406,7 +406,7 @@ pub fn rdo_mode_decision<T: Pixel>(
mode_contexts.push(cw.find_mvrefs(bo, ref_frames, &mut mv_stack, bsize, fi, false)); mode_contexts.push(cw.find_mvrefs(bo, ref_frames, &mut mv_stack, bsize, fi, false));
if fi.frame_type == FrameType::INTER { if fi.frame_type == FrameType::INTER {
let mut pmv = [MotionVector{ row: 0, col: 0 }; 2]; let mut pmv = [MotionVector::default(); 2];
if mv_stack.len() > 0 { pmv[0] = mv_stack[0].this_mv; } if mv_stack.len() > 0 { pmv[0] = mv_stack[0].this_mv; }
if mv_stack.len() > 1 { pmv[1] = mv_stack[1].this_mv; } if mv_stack.len() > 1 { pmv[1] = mv_stack[1].this_mv; }
let ref_slot = ref_slot_set[i] as usize; let ref_slot = ref_slot_set[i] as usize;
...@@ -425,7 +425,7 @@ pub fn rdo_mode_decision<T: Pixel>( ...@@ -425,7 +425,7 @@ pub fn rdo_mode_decision<T: Pixel>(
mvs_from_me.push([ mvs_from_me.push([
b_me, b_me,
MotionVector { row: 0, col: 0 } MotionVector::default()
]); ]);
for &x in RAV1E_INTER_MODES_MINIMAL { for &x in RAV1E_INTER_MODES_MINIMAL {
...@@ -586,19 +586,19 @@ pub fn rdo_mode_decision<T: Pixel>( ...@@ -586,19 +586,19 @@ pub fn rdo_mode_decision<T: Pixel>(
PredictionMode::NEARESTMV | PredictionMode::NEAREST_NEARESTMV => if mv_stacks[i].len() > 0 { PredictionMode::NEARESTMV | PredictionMode::NEAREST_NEARESTMV => if mv_stacks[i].len() > 0 {
[mv_stacks[i][0].this_mv, mv_stacks[i][0].comp_mv] [mv_stacks[i][0].this_mv, mv_stacks[i][0].comp_mv]
} else { } else {
[MotionVector { row: 0, col: 0 }; 2] [MotionVector::default(); 2]
}, },
PredictionMode::NEAR0MV => if mv_stacks[i].len() > 1 { PredictionMode::NEAR0MV => if mv_stacks[i].len() > 1 {
[mv_stacks[i][1].this_mv, mv_stacks[i][1].comp_mv] [mv_stacks[i][1].this_mv, mv_stacks[i][1].comp_mv]
} else { } else {
[MotionVector { row: 0, col: 0 }; 2] [MotionVector::default(); 2]
}, },
PredictionMode::NEAR1MV | PredictionMode::NEAR2MV => PredictionMode::NEAR1MV | PredictionMode::NEAR2MV =>
[mv_stacks[i][luma_mode as usize - PredictionMode::NEAR0MV as usize + 1].this_mv, [mv_stacks[i][luma_mode as usize - PredictionMode::NEAR0MV as usize + 1].this_mv,
mv_stacks[i][luma_mode as usize - PredictionMode::NEAR0MV as usize + 1].comp_mv], mv_stacks[i][luma_mode as usize - PredictionMode::NEAR0MV as usize + 1].comp_mv],
PredictionMode::NEAREST_NEWMV => [mv_stacks[i][0].this_mv, mvs_from_me[i][1]], PredictionMode::NEAREST_NEWMV => [mv_stacks[i][0].this_mv, mvs_from_me[i][1]],
PredictionMode::NEW_NEARESTMV => [mvs_from_me[i][0], mv_stacks[i][0].comp_mv], PredictionMode::NEW_NEARESTMV => [mvs_from_me[i][0], mv_stacks[i][0].comp_mv],
_ => [MotionVector { row: 0, col: 0 }; 2] _ => [MotionVector::default(); 2]
}; };
let mode_set_chroma = vec![luma_mode]; let mode_set_chroma = vec![luma_mode];
...@@ -686,7 +686,7 @@ pub fn rdo_mode_decision<T: Pixel>( ...@@ -686,7 +686,7 @@ pub fn rdo_mode_decision<T: Pixel>(
}); });
modes.iter().take(num_modes_rdo).for_each(|&luma_mode| { modes.iter().take(num_modes_rdo).for_each(|&luma_mode| {
let mvs = [MotionVector { row: 0, col: 0 }; 2]; let mvs = [MotionVector::default(); 2];
let ref_frames = [INTRA_FRAME, NONE_FRAME]; let ref_frames = [INTRA_FRAME, NONE_FRAME];
let mut mode_set_chroma = vec![luma_mode]; let mut mode_set_chroma = vec![luma_mode];
if is_chroma_block && luma_mode != PredictionMode::DC_PRED { if is_chroma_block && luma_mode != PredictionMode::DC_PRED {
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment