diff --git a/mhcflurry/testing_utils.py b/mhcflurry/testing_utils.py index 157a11af9f62891be7e61d64f1fae93b5463e830..64e4965b25c6f169ac69b0bdeb5e4ff89cee2473 100644 --- a/mhcflurry/testing_utils.py +++ b/mhcflurry/testing_utils.py @@ -2,6 +2,14 @@ Utilities used in MHCflurry unit tests. """ from . import Class1NeuralNetwork +from .common import set_keras_backend + + +def startup(): + """ + Configure Keras backend for running unit tests. + """ + set_keras_backend("tensorflow-cpu", num_threads=2) def cleanup(): diff --git a/test/test_calibrate_percentile_ranks_command.py b/test/test_calibrate_percentile_ranks_command.py index 2dc44a0b018a74113b5314bff7ecc79486115fb3..a5656ac35e5480fb2771a514d5a89dfb19e1aad3 100644 --- a/test/test_calibrate_percentile_ranks_command.py +++ b/test/test_calibrate_percentile_ranks_command.py @@ -14,9 +14,9 @@ from mhcflurry.downloads import get_path os.environ["CUDA_VISIBLE_DEVICES"] = "" -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup teardown = cleanup - +setup = startup def run_and_check(n_jobs=0, delete=True, additional_args=[]): source_models_dir = get_path("models_class1_pan", "models.with_mass_spec") diff --git a/test/test_changing_allele_representations.py b/test/test_changing_allele_representations.py index 38b9e12c339fd8bb2c6d40624c84bab92b4df636..271cb3d976fa1f53eb4d818e8806044a54f192fa 100644 --- a/test/test_changing_allele_representations.py +++ b/test/test_changing_allele_representations.py @@ -8,8 +8,9 @@ from mhcflurry.downloads import get_path from numpy.testing import assert_equal -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup teardown = cleanup +setup = startup ALLELE_TO_SEQUENCE = pandas.read_csv( get_path( diff --git a/test/test_class1_affinity_predictor.py b/test/test_class1_affinity_predictor.py index 8f2b4aa057bf1e42abc99752330ac45d28dded72..aa70a25da2f657578cf9caea84505847a6158a8d 100644 --- a/test/test_class1_affinity_predictor.py +++ b/test/test_class1_affinity_predictor.py @@ -15,13 +15,14 @@ from nose.tools import eq_, assert_raises from numpy import testing from mhcflurry.downloads import get_path -import mhcflurry.testing_utils +from mhcflurry.testing_utils import cleanup, startup DOWNLOADED_PREDICTOR = Class1AffinityPredictor.load() def setup(): global DOWNLOADED_PREDICTOR + startup() DOWNLOADED_PREDICTOR = Class1AffinityPredictor.load() logging.basicConfig(level=logging.DEBUG) @@ -29,7 +30,7 @@ def setup(): def teardown(): global DOWNLOADED_PREDICTOR DOWNLOADED_PREDICTOR = None - mhcflurry.testing_utils.cleanup() + cleanup() # To hunt down a weird warning we were seeing in pandas. diff --git a/test/test_class1_neural_network.py b/test/test_class1_neural_network.py index 4620334a38e96b917f41ec9db515ac33384711ce..47be495cb94c0f4c9133df458bc31ca97ed861df 100644 --- a/test/test_class1_neural_network.py +++ b/test/test_class1_neural_network.py @@ -13,8 +13,9 @@ from mhcflurry.class1_neural_network import Class1NeuralNetwork from mhcflurry.downloads import get_path from mhcflurry.common import random_peptides -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup teardown = cleanup +setup = startup def test_class1_neural_network_a0205_training_accuracy(): diff --git a/test/test_class1_pan.py b/test/test_class1_pan.py index 9ad87d69e1ced68490c29fca4a657ab2473da2a3..0528ef60da5b6f82d4381452030cbe51c2ce83ef 100644 --- a/test/test_class1_pan.py +++ b/test/test_class1_pan.py @@ -11,8 +11,9 @@ from mhcflurry import Class1AffinityPredictor,Class1NeuralNetwork from mhcflurry.allele_encoding import AlleleEncoding from mhcflurry.downloads import get_path -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup teardown = cleanup +setup = startup HYPERPARAMETERS = { diff --git a/test/test_custom_loss.py b/test/test_custom_loss.py index 2426f48db03242e4427b1bf7fd59a527551401aa..98ee4ab52495b6163f15189c06969455c78062a5 100644 --- a/test/test_custom_loss.py +++ b/test/test_custom_loss.py @@ -11,8 +11,9 @@ import keras.backend as K from mhcflurry.custom_loss import CUSTOM_LOSSES -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup teardown = cleanup +setup = startup def evaluate_loss(loss, y_true, y_pred): diff --git a/test/test_download_models_class1.py b/test/test_download_models_class1.py index ffbf8b3cf6ac172cef426a2cadd9d0efa9ecd101..29a4d47a3dc12ba553e26f6fc1af93a338f543ea 100644 --- a/test/test_download_models_class1.py +++ b/test/test_download_models_class1.py @@ -5,14 +5,14 @@ from numpy.testing import assert_equal from mhcflurry import Class1AffinityPredictor, Class1NeuralNetwork -from mhcflurry.testing_utils import cleanup - +from mhcflurry.testing_utils import cleanup, startup DOWNLOADED_PREDICTOR = None def setup(): global DOWNLOADED_PREDICTOR + startup() DOWNLOADED_PREDICTOR = Class1AffinityPredictor.load() diff --git a/test/test_multi_output.py b/test/test_multi_output.py index b4254b56d7b1e7282c3127b00d15cf6593b98795..452f7a767c09a653be5f37113d20b0f117f44fd7 100644 --- a/test/test_multi_output.py +++ b/test/test_multi_output.py @@ -12,8 +12,9 @@ logging.getLogger('tensorflow').disabled = True from mhcflurry.class1_neural_network import Class1NeuralNetwork from mhcflurry.common import random_peptides -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup teardown = cleanup +setup = startup def test_multi_output(): diff --git a/test/test_network_merging.py b/test/test_network_merging.py index 56683a52dbd8bfe63ad60b06ddaa76111d5d186f..69eab37dea033739fdb2f112b35ff3e6655cae8f 100644 --- a/test/test_network_merging.py +++ b/test/test_network_merging.py @@ -5,8 +5,7 @@ from mhcflurry import Class1AffinityPredictor, Class1NeuralNetwork from mhcflurry.common import random_peptides from mhcflurry.downloads import get_path -from mhcflurry.testing_utils import cleanup - +from mhcflurry.testing_utils import cleanup, startup logging.getLogger('tensorflow').disabled = True PAN_ALLELE_PREDICTOR = None @@ -14,6 +13,7 @@ PAN_ALLELE_PREDICTOR = None def setup(): global PAN_ALLELE_PREDICTOR + startup() PAN_ALLELE_PREDICTOR = Class1AffinityPredictor.load( get_path("models_class1_pan", "models.with_mass_spec"), max_models=4, diff --git a/test/test_predict_command.py b/test/test_predict_command.py index 507d7c4a6c62939636ccb7315707957850a99c0a..c3f0a5c1aa22a634b06c862056c9216452849a35 100644 --- a/test/test_predict_command.py +++ b/test/test_predict_command.py @@ -6,8 +6,9 @@ from numpy.testing import assert_equal from mhcflurry import predict_command -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup teardown = cleanup +setup = startup TEST_CSV = ''' Allele,Peptide,Experiment diff --git a/test/test_released_predictors_on_hpv_dataset.py b/test/test_released_predictors_on_hpv_dataset.py index 3369257455157cc4047aedde6818139dee54b52f..26bf0227a74160187a3e385d1f633573b0400926 100644 --- a/test/test_released_predictors_on_hpv_dataset.py +++ b/test/test_released_predictors_on_hpv_dataset.py @@ -13,8 +13,7 @@ from nose.tools import eq_, assert_less, assert_greater, assert_almost_equal from mhcflurry import Class1AffinityPredictor from mhcflurry.downloads import get_path -from mhcflurry.testing_utils import cleanup - +from mhcflurry.testing_utils import cleanup, startup def data_path(name): ''' @@ -30,6 +29,7 @@ PREDICTORS = None def setup(): global PREDICTORS + startup() PREDICTORS = { 'allele-specific': Class1AffinityPredictor.load( get_path("models_class1", "models")), diff --git a/test/test_released_predictors_well_correlated.py b/test/test_released_predictors_well_correlated.py index 82071d653e3db2c6147e73d97e8d1f146b785d84..5040843ffa76338b5dcc0f242be7a02784d1c5b2 100644 --- a/test/test_released_predictors_well_correlated.py +++ b/test/test_released_predictors_well_correlated.py @@ -15,13 +15,14 @@ from mhcflurry.encodable_sequences import EncodableSequences from mhcflurry.downloads import get_path from mhcflurry.common import random_peptides -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup PREDICTORS = None def setup(): global PREDICTORS + startup() PREDICTORS = { 'allele-specific': Class1AffinityPredictor.load( get_path("models_class1", "models")), @@ -38,7 +39,7 @@ def teardown(): def test_correlation( alleles=None, - num_peptides_per_length=500, + num_peptides_per_length=100, lengths=[8, 9, 10], debug=False): peptides = [] diff --git a/test/test_speed.py b/test/test_speed.py index eb604a5087458df6c517b7dde5db189d7ebe11e0..037ae61c637a34f70d1e33eb5f382587107537c0 100644 --- a/test/test_speed.py +++ b/test/test_speed.py @@ -18,46 +18,41 @@ from mhcflurry.encodable_sequences import EncodableSequences from mhcflurry.common import random_peptides from mhcflurry.downloads import get_path -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup -ALLELE_SPECIFIC_PREDICTOR = Class1AffinityPredictor.load( - get_path("models_class1", "models")) -PAN_ALLELE_PREDICTOR = Class1AffinityPredictor.load( - get_path("models_class1_pan", "models.with_mass_spec")) - - -PREDICTORS = None +ALLELE_SPECIFIC_PREDICTOR = None +PAN_ALLELE_PREDICTOR = None def setup(): - global PREDICTORS - PREDICTORS = { - 'allele-specific': Class1AffinityPredictor.load( - get_path("models_class1", "models")), - 'pan-allele': Class1AffinityPredictor.load( - get_path("models_class1_pan", "models.with_mass_spec")) - } + global ALLELE_SPECIFIC_PREDICTOR, PAN_ALLELE_PREDICTOR + startup() + ALLELE_SPECIFIC_PREDICTOR = Class1AffinityPredictor.load( + get_path("models_class1", "models")) + + PAN_ALLELE_PREDICTOR = Class1AffinityPredictor.load( + get_path("models_class1_pan", "models.with_mass_spec")) def teardown(): - global PREDICTORS - PREDICTORS = None + global ALLELE_SPECIFIC_PREDICTOR, PAN_ALLELE_PREDICTOR + ALLELE_SPECIFIC_PREDICTOR = None + PAN_ALLELE_PREDICTOR = None cleanup() DEFAULT_NUM_PREDICTIONS = 10000 -def test_speed_allele_specific( - profile=False, - predictor=ALLELE_SPECIFIC_PREDICTOR, - num=DEFAULT_NUM_PREDICTIONS): - +def test_speed_allele_specific(profile=False, num=DEFAULT_NUM_PREDICTIONS): + global ALLELE_SPECIFIC_PREDICTOR starts = collections.OrderedDict() timings = collections.OrderedDict() profilers = collections.OrderedDict() + predictor = ALLELE_SPECIFIC_PREDICTOR + def start(name): starts[name] = time.time() if profile: @@ -101,15 +96,14 @@ def test_speed_allele_specific( (key, pstats.Stats(value)) for (key, value) in profilers.items()) -def test_speed_pan_allele( - profile=False, - predictor=PAN_ALLELE_PREDICTOR, - num=DEFAULT_NUM_PREDICTIONS): - +def test_speed_pan_allele(profile=False, num=DEFAULT_NUM_PREDICTIONS): + global PAN_ALLELE_PREDICTOR starts = collections.OrderedDict() timings = collections.OrderedDict() profilers = collections.OrderedDict() + predictor = PAN_ALLELE_PREDICTOR + def start(name): starts[name] = time.time() if profile: diff --git a/test/test_train_and_related_commands.py b/test/test_train_and_related_commands.py index 7b893f7be0914669115e0477284aa70fc300a88d..92efbdbda6bf56fc580adf8c70b037c1d5a04086 100644 --- a/test/test_train_and_related_commands.py +++ b/test/test_train_and_related_commands.py @@ -14,8 +14,9 @@ from numpy.testing import assert_array_less, assert_equal from mhcflurry import Class1AffinityPredictor from mhcflurry.downloads import get_path -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup teardown = cleanup +setup = startup os.environ["CUDA_VISIBLE_DEVICES"] = "" diff --git a/test/test_train_pan_allele_models_command.py b/test/test_train_pan_allele_models_command.py index 51c7c3ffb5bc16ba4e92504702e06fe8e4ee8d58..53c5bb254de227e455aa598fd35182e5e19901eb 100644 --- a/test/test_train_pan_allele_models_command.py +++ b/test/test_train_pan_allele_models_command.py @@ -15,8 +15,9 @@ from numpy.testing import assert_equal, assert_array_less from mhcflurry import Class1AffinityPredictor,Class1NeuralNetwork from mhcflurry.downloads import get_path -from mhcflurry.testing_utils import cleanup +from mhcflurry.testing_utils import cleanup, startup teardown = cleanup +setup = startup os.environ["CUDA_VISIBLE_DEVICES"] = ""