diff --git a/mhcflurry/class1_neural_network.py b/mhcflurry/class1_neural_network.py
index 6ff042aa09693f1e3154a68760a8c8bb79513baa..3aa75abc0a72a70a549f1fb89b5b5c3f7638fdbb 100644
--- a/mhcflurry/class1_neural_network.py
+++ b/mhcflurry/class1_neural_network.py
@@ -163,10 +163,7 @@ class Class1NeuralNetwork(object):
         self.network_weights = None
         self.network_weights_loader = None
 
-        self.loss_history = None
-        self.fit_seconds = None
-        self.fit_num_points = []
-
+        self.fit_info = []
         self.prediction_cache = weakref.WeakKeyDictionary()
 
     KERAS_MODELS_CACHE = {}
@@ -310,7 +307,6 @@ class Class1NeuralNetwork(object):
         """
         config = dict(config)
         instance = cls(**config.pop('hyperparameters'))
-        assert all(hasattr(instance, key) for key in config), config.keys()
         instance.__dict__.update(config)
         instance.network_weights = weights
         instance.network_weights_loader = weights_loader
@@ -471,9 +467,6 @@ class Class1NeuralNetwork(object):
             How often (in seconds) to print progress update. Set to None to
             disable.
         """
-
-        self.fit_num_points.append(len(peptides))
-
         encodable_peptides = EncodableSequences.create(peptides)
         peptide_encoding = self.peptides_to_network_input(encodable_peptides)
 
@@ -629,7 +622,7 @@ class Class1NeuralNetwork(object):
         min_val_loss_iteration = None
         min_val_loss = None
 
-        self.loss_history = collections.defaultdict(list)
+        fit_info = collections.defaultdict(list)
         start = time.time()
         last_progress_print = None
         x_dict_with_random_negatives = {}
@@ -692,7 +685,7 @@ class Class1NeuralNetwork(object):
                 sample_weight=sample_weights_with_random_negatives)
 
             for (key, value) in fit_history.history.items():
-                self.loss_history[key].extend(value)
+                fit_info[key].extend(value)
 
             # Print progress no more often than once every few seconds.
             if progress_print_interval is not None and (
@@ -704,13 +697,13 @@ class Class1NeuralNetwork(object):
                        "Min val loss (%s) at epoch %s" % (
                            i,
                            self.hyperparameters['max_epochs'],
-                           self.loss_history['loss'][-1],
+                           fit_info['loss'][-1],
                            str(min_val_loss),
                            min_val_loss_iteration)).strip())
                 last_progress_print = time.time()
 
             if self.hyperparameters['validation_split']:
-                val_loss = self.loss_history['val_loss'][-1]
+                val_loss = fit_info['val_loss'][-1]
                 val_losses.append(val_loss)
 
                 if min_val_loss is None or val_loss <= min_val_loss:
@@ -728,11 +721,14 @@ class Class1NeuralNetwork(object):
                                 "Min val loss (%s) at epoch %s" % (
                                     i,
                                     self.hyperparameters['max_epochs'],
-                                    self.loss_history['loss'][-1],
+                                    fit_info['loss'][-1],
                                     str(min_val_loss),
                                     min_val_loss_iteration)).strip())
                         break
-        self.fit_seconds = time.time() - start
+
+        fit_info["time"] = time.time() - start
+        fit_info["num_points"] = len(peptides)
+        self.fit_info.append(dict(fit_info))
 
     def predict(self, peptides, allele_encoding=None, batch_size=4096):
         """
diff --git a/test/test_class1_neural_network.py b/test/test_class1_neural_network.py
index b082f28cf89276bf842cc427d98ad7defab0985a..8ba330cff8bd19154784c879ead66b2edc40a182 100644
--- a/test/test_class1_neural_network.py
+++ b/test/test_class1_neural_network.py
@@ -89,9 +89,12 @@ def test_inequalities():
     # Memorize the dataset.
     hyperparameters = dict(
         loss="custom:mse_with_inequalities",
+        peptide_amino_acid_encoding="one-hot",
         activation="tanh",
         layer_sizes=[16],
         max_epochs=50,
+        minibatch_size=32,
+        random_negative_rate=0.0,
         early_stopping=False,
         validation_split=0.0,
         locally_connected_layers=[
diff --git a/test/test_train_allele_specific_models_command.py b/test/test_train_allele_specific_models_command.py
index fab19ce7e8e11400c7e7a4f7e5448e4cdb364759..a1e597ca0e9b37069358e1e61e79ca2433a4c9f5 100644
--- a/test/test_train_allele_specific_models_command.py
+++ b/test/test_train_allele_specific_models_command.py
@@ -12,7 +12,7 @@ from mhcflurry.downloads import get_path
 HYPERPARAMETERS = [
     {
         "n_models": 2,
-        "max_epochs": 2,
+        "max_epochs": 20,
         "patience": 10,
         "early_stopping": True,
         "validation_split": 0.2,