Skip to content
Snippets Groups Projects
Commit 9629ea6a authored by Jean-Marc Valin's avatar Jean-Marc Valin
Browse files

Fine-tuning the scripts

parent 0f7fe64d
No related branches found
No related tags found
No related merge requests found
......@@ -108,8 +108,8 @@ features = features[:, :, :nb_used_features]
features = features[:, :num_subframes, :]
#variable quantizer depending on the delay
q0 = 2
q1 = 10
q0 = 3
q1 = 15
quant_id = np.round(q1 + (q0-q1)*np.arange(args.num_redundancy_frames//2)/args.num_redundancy_frames).astype('int16')
#print(quant_id)
......@@ -154,7 +154,10 @@ hard_distr_embed = np.broadcast_to(hard_distr_embed, (sym_batch.shape[0], sym_ba
fake_lambda = np.ones((sym_batch.shape[0], sym_batch.shape[1], 1), dtype='float32')
rate_input = np.concatenate((sym_batch, hard_distr_embed, fake_lambda), axis=-1)
rates = sq_rate_metric(None, rate_input, reduce=False).numpy()
print("rate = ", np.mean(rates))
print(rates.shape)
print("average rate = ", np.mean(rates[args.num_redundancy_frames:,:]))
#sym_batch.tofile('qsyms.f32')
sym_batch = sym_batch / quant_scale
print(sym_batch.shape, quant_state.shape)
......
......@@ -167,8 +167,8 @@ def pvq_quant_search(x, k):
abs_kx = tf.abs(kx)
kk=tf.reduce_sum(abs_y, axis=-1)
#print("sums = ", kk)
plus = 1.0001*tf.reduce_min((abs_y+.5)/(abs_kx+1e-15), axis=-1)
minus = .9999*tf.reduce_max((abs_y-.5)/(abs_kx+1e-15), axis=-1)
plus = 1.000001*tf.reduce_min((abs_y+.5)/(abs_kx+1e-15), axis=-1)
minus = .999999*tf.reduce_max((abs_y-.5)/(abs_kx+1e-15), axis=-1)
#print("plus = ", plus)
#print("minus = ", minus)
factor = tf.where(kk>k, minus, plus)
......@@ -183,7 +183,7 @@ def pvq_quant_search(x, k):
y = tf.round(kx)
#print(y)
#print(K.mean(K.sum(K.abs(y), axis=-1)))
return y
def pvq_quantize(x, k):
......@@ -281,7 +281,7 @@ def new_split_decoder(decoder):
range_select = Lambda(lambda x: x[0][:,x[1]:x[2],:])
elem_select = Lambda(lambda x: x[0][:,x[1],:])
points = [0, 64, 128, 192, 256]
points = [0, 100, 200, 300, 400]
outputs = []
for i in range(len(points)-1):
begin = points[i]//bunch
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment