Skip to content

Commit

Permalink
add dataset_id to description
Browse files Browse the repository at this point in the history
  • Loading branch information
MorrisNein committed Nov 11, 2023
1 parent 6d4267b commit af5e2da
Showing 1 changed file with 4 additions and 2 deletions.
6 changes: 4 additions & 2 deletions experiments/fedot_warm_start/run.py
Original file line number Diff line number Diff line change
Expand Up @@ -372,7 +372,8 @@ def main():
# Gathering knowledge base
train_histories = {}
with open(progress_file_path, 'a') as progress_file:
for dataset_id in tqdm(dataset_ids, 'FEDOT, all datasets', file=progress_file):
for dataset_id in (pbar := tqdm(dataset_ids, 'FEDOT, all datasets', file=progress_file)):
pbar.set_description(pbar.desc + f' ({dataset_id})')
try:
timeout = TRAIN_TIMEOUT if dataset_id in dataset_ids_test else TEST_TIMEOUT
dataset = algorithm.components.datasets_loader.load_single(dataset_id)
Expand All @@ -393,7 +394,8 @@ def main():
pickle.dump(algorithm, meta_learner_file)

with open(progress_file_path, 'a') as progress_file:
for dataset_id in tqdm(dataset_ids_test, 'MetaFEDOT, Test datasets', file=progress_file):
for dataset_id in (pbar := tqdm(dataset_ids_test, 'MetaFEDOT, Test datasets', file=progress_file)):
pbar.set_description(pbar.desc + f' ({dataset_id})')
try:
# Run meta AutoML
# 1
Expand Down

0 comments on commit af5e2da

Please sign in to comment.