diff --git a/mhcflurry/calibrate_percentile_ranks_command.py b/mhcflurry/calibrate_percentile_ranks_command.py
index b441262f78f70049a1eb554605286e75a6132248..c11dd4b1040f0eaca2bf2115da9f15f1fff53f55 100644
--- a/mhcflurry/calibrate_percentile_ranks_command.py
+++ b/mhcflurry/calibrate_percentile_ranks_command.py
@@ -108,7 +108,12 @@ def run(argv=sys.argv[1:]):
 
     configure_logging(verbose=args.verbosity > 1)
 
-    predictor = Class1AffinityPredictor.load(args.models_dir)
+    # It's important that we don't trigger a Keras import here since that breaks
+    # local parallelism (tensorflow backend). So we set optimization_level=0.
+    predictor = Class1AffinityPredictor.load(
+        args.models_dir,
+        optimization_level=0,
+    )
 
     if args.allele:
         alleles = [normalize_allele_name(a) for a in args.allele]
@@ -236,6 +241,7 @@ def calibrate_percentile_ranks(
         model_kwargs={}):
     if verbose:
         print("Calibrating", allele)
+    predictor.optimize()  # since we loaded with optimization_level=0
     start = time.time()
     summary_results = predictor.calibrate_percentile_ranks(
         peptides=peptides,
diff --git a/mhcflurry/random_negative_peptides.py b/mhcflurry/random_negative_peptides.py
index 80fe7ab4e4cbad42376596550c14f7998a16ac0f..1afa30454eb0384ff700b845a12907f7988b0bab 100644
--- a/mhcflurry/random_negative_peptides.py
+++ b/mhcflurry/random_negative_peptides.py
@@ -7,6 +7,7 @@ import pandas
 from .hyperparameters import HyperparameterDefaults
 from .common import amino_acid_distribution, random_peptides
 
+
 class RandomNegativePeptides(object):
     hyperparameter_defaults = HyperparameterDefaults(
         random_negative_rate=0.0,
diff --git a/test/test_calibrate_percentile_ranks_command.py b/test/test_calibrate_percentile_ranks_command.py
index a5656ac35e5480fb2771a514d5a89dfb19e1aad3..f9ef02a29d37d25b2db4cb2e7e8f9e173a65ca49 100644
--- a/test/test_calibrate_percentile_ranks_command.py
+++ b/test/test_calibrate_percentile_ranks_command.py
@@ -18,6 +18,7 @@ from mhcflurry.testing_utils import cleanup, startup
 teardown = cleanup
 setup = startup
 
+
 def run_and_check(n_jobs=0, delete=True, additional_args=[]):
     source_models_dir = get_path("models_class1_pan", "models.with_mass_spec")
     dest_models_dir = tempfile.mkdtemp(prefix="mhcflurry-test-models")