Skip to content
Snippets Groups Projects
Commit feea7c69 authored by Tim O'Donnell's avatar Tim O'Donnell
Browse files

more logging

parent 61595c92
No related branches found
No related tags found
No related merge requests found
......@@ -162,6 +162,7 @@ def run(argv=sys.argv[1:]):
os.mkdir(args.out_models_dir)
print("Done.")
start = time.time()
for (h, hyperparameters) in enumerate(hyperparameters_lst):
n_models = None
if 'n_models' in hyperparameters:
......@@ -214,16 +215,20 @@ def run(argv=sys.argv[1:]):
# which it adds models to, so no merging is required. It also saves
# as it goes so no saving is required at the end.
start = time.time()
data_trained_on = 0
for _ in tqdm.trange(len(work_items)):
item = work_items.pop(0) # want to keep freeing up memory
work_predictor = work_entrypoint(item)
assert work_predictor is predictor
print("*" * 30)
print("Trained %d networks in %0.2f sec." % (
len(predictor.neural_networks), time.time() - start))
print("*" * 30)
if args.percent_rank_calibration_num_peptides_per_length > 0:
start = time.time()
print("Performing percent rank calibration.")
start = time.time()
predictor.calibrate_percentile_ranks(
num_peptides_per_length=args.percent_rank_calibration_num_peptides_per_length,
worker_pool=worker_pool)
......@@ -292,6 +297,5 @@ def process_work(
return predictor
if __name__ == '__main__':
run()
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment