fixed dropout and prenet bug

test
Michael Nguyen 2018-09-05 12:38:03 -05:00
parent 487a34c49b
commit 9ee48c9286
4 changed files with 6 additions and 3 deletions

1
analyze.py Normal file
View File

@ -0,0 +1 @@
# visualisation tools for mimic2

View File

@ -34,7 +34,7 @@ hparams = tf.contrib.training.HParams(
adam_beta2=0.999,
initial_learning_rate=0.0015,
learning_rate_decay_halflife=100000,
use_cmudict=True, # Use CMUDict during training to learn pronunciation of ARPAbet phonemes
use_cmudict=False, # Use CMUDict during training to learn pronunciation of ARPAbet phonemes
# Eval:
max_iters=200,

View File

@ -8,7 +8,7 @@ def prenet(inputs, is_training, layer_sizes=[256, 128], scope=None):
with tf.variable_scope(scope or 'prenet'):
for i, size in enumerate(layer_sizes):
dense = tf.layers.dense(x, units=size, activation=tf.nn.relu, name='dense_%d' % (i+1))
x = tf.layers.dropout(dense, rate=drop_rate, training=True, name='dropout_%d' % (i+1))
x = tf.layers.dropout(dense, rate=drop_rate, training=is_training, name='dropout_%d' % (i+1))
return x

View File

@ -49,11 +49,13 @@ class Tacotron():
# Attention
attention_cell = AttentionWrapper(
DecoderPrenetWrapper(GRUCell(256), is_training),
GRUCell(256),
LocationSensitiveAttention(256, encoder_outputs),
alignment_history=True,
output_attention=False) # [N, T_in, 256]
attention_cell = DecoderPrenetWrapper(attention_cell, is_training)
# Concatenate attention context vector and RNN cell output into a 512D vector.
concat_cell = ConcatOutputAndAttentionWrapper(attention_cell) # [N, T_in, 512]