From 81afbadcfee596f235ea7ea9faba7cc739a1e1f1 Mon Sep 17 00:00:00 2001
From: Tim O'Donnell <timodonnell@gmail.com>
Date: Mon, 17 Jun 2019 18:25:15 -0400
Subject: [PATCH] fix

---
 mhcflurry/regression_target.py               | 2 +-
 mhcflurry/train_pan_allele_models_command.py | 1 +
 test/test_train_pan_allele_models_command.py | 7 +++++++
 3 files changed, 9 insertions(+), 1 deletion(-)

diff --git a/mhcflurry/regression_target.py b/mhcflurry/regression_target.py
index d1189178..3c9ed6a7 100644
--- a/mhcflurry/regression_target.py
+++ b/mhcflurry/regression_target.py
@@ -14,7 +14,7 @@ def from_ic50(ic50, max_ic50=50000.0):
     numpy.array of float
 
     """
-    x = 1.0 - (numpy.log(ic50) / numpy.log(max_ic50))
+    x = 1.0 - (numpy.log(numpy.maximum(ic50, 1e-12)) / numpy.log(max_ic50))
     return numpy.minimum(
         1.0,
         numpy.maximum(0.0, x))
diff --git a/mhcflurry/train_pan_allele_models_command.py b/mhcflurry/train_pan_allele_models_command.py
index 1d67f003..ae5bb8d8 100644
--- a/mhcflurry/train_pan_allele_models_command.py
+++ b/mhcflurry/train_pan_allele_models_command.py
@@ -336,6 +336,7 @@ def main(args):
     start = time.time()
 
     worker_pool = worker_pool_with_gpu_assignments_from_args(args)
+    print("Worker pool", worker_pool)
 
     if worker_pool:
         print("Processing %d work items in parallel." % len(work_items))
diff --git a/test/test_train_pan_allele_models_command.py b/test/test_train_pan_allele_models_command.py
index d422ea89..397ccee6 100644
--- a/test/test_train_pan_allele_models_command.py
+++ b/test/test_train_pan_allele_models_command.py
@@ -136,3 +136,10 @@ if os.environ.get("KERAS_BACKEND") != "theano":
 
 def test_run_serial():
     run_and_check(n_jobs=1)
+
+if __name__ == "__main__":
+    test_run_serial()
+    #for (name, value) in list(globals().items()):
+    #    if name.startswith("test_"):
+    #        print("Running test", name)
+    #        value()
\ No newline at end of file
-- 
GitLab