From b43602faab8c7a9b29c9d99f93bab3443c99c1a8 Mon Sep 17 00:00:00 2001
From: Tim O'Donnell <timodonnell@gmail.com>
Date: Wed, 19 Jun 2019 12:55:44 -0400
Subject: [PATCH] fix

---
 mhcflurry/class1_neural_network.py | 4 +++-
 mhcflurry/parallelism.py           | 6 +++++-
 2 files changed, 8 insertions(+), 2 deletions(-)

diff --git a/mhcflurry/class1_neural_network.py b/mhcflurry/class1_neural_network.py
index ffb3435b..e434e1b2 100644
--- a/mhcflurry/class1_neural_network.py
+++ b/mhcflurry/class1_neural_network.py
@@ -519,14 +519,16 @@ class Class1NeuralNetwork(object):
             use_multiprocessing=False,
             workers=1,
             validation_data=(validation_x_dict, validation_y_dict),
+            verbose=verbose,
             callbacks=[keras.callbacks.EarlyStopping(
                 monitor="val_loss",
                 patience=patience,
-                verbose=1)]
+                verbose=verbose)]
         )
         if verbose > 0:
             print("fit_generator completed in %0.2f sec (%d total points)" % (
                 time.time() - start, yielded_values_box[0]))
+        return result
 
 
     def fit(
diff --git a/mhcflurry/parallelism.py b/mhcflurry/parallelism.py
index 88913986..0e652a96 100644
--- a/mhcflurry/parallelism.py
+++ b/mhcflurry/parallelism.py
@@ -221,7 +221,11 @@ def worker_init_entry_point(
 def worker_init(keras_backend=None, gpu_device_nums=None, worker_log_dir=None):
     if worker_log_dir:
         sys.stderr = sys.stdout = open(
-            os.path.join(worker_log_dir, "LOG-worker.%d.txt" % os.getpid()), "w")
+            os.path.join(worker_log_dir, "LOG-"
+                                         ""
+                                         ""
+                                         ""
+                                         "worker.%d.txt" % os.getpid()), "w")
 
     # Each worker needs distinct random numbers
     numpy.random.seed()
-- 
GitLab