remove ignore generate eval flag

pull/581/head
Edresson 2021-07-15 03:34:28 -03:00
parent d906fea08c
commit b1620d1f3f
4 changed files with 7 additions and 9 deletions

View File

@ -32,8 +32,8 @@ args = parser.parse_args()
c_dataset = load_config(args.config_dataset_path)
train_files, dev_files = load_meta_data(c_dataset.datasets, eval_split=args.eval, ignore_generated_eval=True)
wav_files = train_files + dev_files
meta_data_train, meta_data_eval = load_meta_data(c_dataset.datasets, eval_split=args.eval)
wav_files = meta_data_train + meta_data_eval
speaker_manager = SpeakerManager(encoder_model_path=args.model_path, encoder_config_path=args.config_path, use_cuda=args.use_cuda)

View File

@ -227,7 +227,7 @@ def main(args): # pylint: disable=redefined-outer-name
ap = AudioProcessor(**c.audio)
# load data instances
meta_data_train, meta_data_eval = load_meta_data(c.datasets, eval_split=args.eval, ignore_generated_eval=True)
meta_data_train, meta_data_eval = load_meta_data(c.datasets, eval_split=args.eval)
# use eval and training partitions
meta_data = meta_data_train + meta_data_eval

View File

@ -24,7 +24,7 @@ def main():
c = load_config(args.config_path)
# load all datasets
train_items, eval_items = load_meta_data(c.datasets, eval_split=True, ignore_generated_eval=True)
train_items, eval_items = load_meta_data(c.datasets, eval_split=True)
items = train_items + eval_items
texts = "".join(item[0] for item in items)

View File

@ -30,7 +30,7 @@ def split_dataset(items):
return items[:eval_split_size], items[eval_split_size:]
def load_meta_data(datasets, eval_split=True, ignore_generated_eval=False):
def load_meta_data(datasets, eval_split=True):
meta_data_train_all = []
meta_data_eval_all = [] if eval_split else None
for dataset in datasets:
@ -47,11 +47,9 @@ def load_meta_data(datasets, eval_split=True, ignore_generated_eval=False):
if eval_split:
if meta_file_val:
meta_data_eval = preprocessor(root_path, meta_file_val)
meta_data_eval_all += meta_data_eval
elif not ignore_generated_eval:
else:
meta_data_eval, meta_data_train = split_dataset(meta_data_train)
meta_data_eval_all += meta_data_eval
meta_data_eval_all += meta_data_eval
meta_data_train_all += meta_data_train
# load attention masks for duration predictor training
if dataset.meta_file_attn_mask: