Make embedding dimension configurable

tuning
Keith Ito 2018-03-17 14:22:24 -07:00
parent 11a6a096d9
commit 14ee2615eb
2 changed files with 2 additions and 1 deletions

View File

@ -20,6 +20,7 @@ hparams = tf.contrib.training.HParams(
# Model:
# TODO: add more configurable hparams
outputs_per_step=5,
embedding_dim=256,
# Training:
batch_size=32,

View File

@ -39,7 +39,7 @@ class Tacotron():
# Embeddings
embedding_table = tf.get_variable(
'embedding', [len(symbols), 256], dtype=tf.float32,
'embedding', [len(symbols), hp.embedding_dim], dtype=tf.float32,
initializer=tf.truncated_normal_initializer(stddev=0.5))
embedded_inputs = tf.nn.embedding_lookup(embedding_table, inputs) # [N, T_in, 256]