diff --git a/mhcflurry/parallelism.py b/mhcflurry/parallelism.py index 5b04401c29c80e6f66b8aa34fa7cd9533afcff8a..88913986287c7694ceef2e506196612eb330a20e 100644 --- a/mhcflurry/parallelism.py +++ b/mhcflurry/parallelism.py @@ -221,7 +221,7 @@ def worker_init_entry_point( def worker_init(keras_backend=None, gpu_device_nums=None, worker_log_dir=None): if worker_log_dir: sys.stderr = sys.stdout = open( - os.path.join(worker_log_dir, "LOG-worker.%d.txt" % os.getpid(), "w")) + os.path.join(worker_log_dir, "LOG-worker.%d.txt" % os.getpid()), "w") # Each worker needs distinct random numbers numpy.random.seed()