diff --git a/downloads-generation/models_class1/hyperparameters.yaml b/downloads-generation/models_class1/hyperparameters.yaml
index 964cc7e5c893eb7744e71947794b6d3040bc60d6..fd23856c27a8f6ecbefa42839c114f2cec24ff32 100644
--- a/downloads-generation/models_class1/hyperparameters.yaml
+++ b/downloads-generation/models_class1/hyperparameters.yaml
@@ -42,9 +42,7 @@
 ],
 "activation": "relu",
 "output_activation": "sigmoid",
-"layer_sizes": [
-    16
-],
+"layer_sizes": [32],
 "dense_layer_l1_regularization": 0.001,
 "batch_normalization": false,
 "dropout_probability": 0.0,
diff --git a/downloads-generation/models_class1_experiments1/GENERATE.sh b/downloads-generation/models_class1_experiments1/GENERATE.sh
index da0354ea6cd618727fce57af1eaf05ef8f60d98c..adc6ecda2ef3ffc946850f5264d0dc49f1a87a9e 100755
--- a/downloads-generation/models_class1_experiments1/GENERATE.sh
+++ b/downloads-generation/models_class1_experiments1/GENERATE.sh
@@ -40,7 +40,7 @@ time mhcflurry-class1-train-allele-specific-models \
     --allele $ALLELES 2>&1 | tee -a LOG.standard.txt &
 
 # Model variations on qualitative + quantitative
-for mod in 0local_noL1 0local 2local widelocal dense8 dense32 dense64 noL1 onehot embedding
+for mod in 0local_noL1 0local 2local widelocal dense16 dense64 noL1 onehot embedding
 do
     cp $SCRIPT_DIR/hyperparameters-${mod}.yaml .
     mkdir models-${mod}
diff --git a/downloads-generation/models_class1_experiments1/hyperparameters-0local.yaml b/downloads-generation/models_class1_experiments1/hyperparameters-0local.yaml
index 25e4410c7c305c83e9f3f18036b59c4b2fe4270f..08f1525c66b26ba42706749114ca6d92fc46bd43 100644
--- a/downloads-generation/models_class1_experiments1/hyperparameters-0local.yaml
+++ b/downloads-generation/models_class1_experiments1/hyperparameters-0local.yaml
@@ -37,7 +37,7 @@
 "activation": "relu",
 "output_activation": "sigmoid",
 "layer_sizes": [
-    16 
+    32
 ],
 "dense_layer_l1_regularization": 0.001,
 "batch_normalization": false,
diff --git a/downloads-generation/models_class1_experiments1/hyperparameters-0local_noL1.yaml b/downloads-generation/models_class1_experiments1/hyperparameters-0local_noL1.yaml
index abe4d296fa4c3d0a6d6bd3967ea28facfcff4e6a..888713425597703b70726e89c61c196d469da5e2 100644
--- a/downloads-generation/models_class1_experiments1/hyperparameters-0local_noL1.yaml
+++ b/downloads-generation/models_class1_experiments1/hyperparameters-0local_noL1.yaml
@@ -37,7 +37,7 @@
 "activation": "relu",
 "output_activation": "sigmoid",
 "layer_sizes": [
-    16
+    32
 ],
 "dense_layer_l1_regularization": 0.0,
 "batch_normalization": false,
diff --git a/downloads-generation/models_class1_experiments1/hyperparameters-2local.yaml b/downloads-generation/models_class1_experiments1/hyperparameters-2local.yaml
index 61319daa13694bdf23086f62cd8ad1a7a4624ed1..a06ab8f76d7660f6bd8488425ffd9a0e5a72e2e2 100644
--- a/downloads-generation/models_class1_experiments1/hyperparameters-2local.yaml
+++ b/downloads-generation/models_class1_experiments1/hyperparameters-2local.yaml
@@ -47,7 +47,7 @@
 "activation": "relu",
 "output_activation": "sigmoid",
 "layer_sizes": [
-   16 
+   32
 ],
 "dense_layer_l1_regularization": 0.001,
 "batch_normalization": false,
diff --git a/downloads-generation/models_class1_experiments1/hyperparameters-dense8.yaml b/downloads-generation/models_class1_experiments1/hyperparameters-dense16.yaml
similarity index 99%
rename from downloads-generation/models_class1_experiments1/hyperparameters-dense8.yaml
rename to downloads-generation/models_class1_experiments1/hyperparameters-dense16.yaml
index 1c2d48d31c2cb047b8313083b8e87ef2fcea4f3b..a714d2ad7253583392bb7ff672e1162201661888 100644
--- a/downloads-generation/models_class1_experiments1/hyperparameters-dense8.yaml
+++ b/downloads-generation/models_class1_experiments1/hyperparameters-dense16.yaml
@@ -42,7 +42,7 @@
 "activation": "relu",
 "output_activation": "sigmoid",
 "layer_sizes": [
-   8 
+   16
 ],
 "dense_layer_l1_regularization": 0.001,
 "batch_normalization": false,
diff --git a/downloads-generation/models_class1_experiments1/hyperparameters-dense32.yaml b/downloads-generation/models_class1_experiments1/hyperparameters-dense32.yaml
deleted file mode 100644
index a1dd1af483730927a17dae79dc992e2babb799bd..0000000000000000000000000000000000000000
--- a/downloads-generation/models_class1_experiments1/hyperparameters-dense32.yaml
+++ /dev/null
@@ -1,50 +0,0 @@
-[{
-##########################################
-# ENSEMBLE SIZE
-##########################################
-"n_models": 8,
-
-##########################################
-# OPTIMIZATION
-##########################################
-"max_epochs": 500,
-"patience": 10,
-"early_stopping": true,
-"validation_split": 0.2,
-"minibatch_size": 128,
-
-##########################################
-# RANDOM NEGATIVE PEPTIDES
-##########################################
-"random_negative_rate": 0.0,
-"random_negative_constant": 25,
-"random_negative_affinity_min": 20000.0,
-"random_negative_affinity_max": 50000.0,
-
-##########################################
-# PEPTIDE REPRESENTATION
-##########################################
-# One of "one-hot", "embedding", or "BLOSUM62".
-"peptide_amino_acid_encoding": "BLOSUM62",
-"use_embedding": false,  # maintained for backward compatability
-"kmer_size": 15,
-
-##########################################
-# NEURAL NETWORK ARCHITECTURE
-##########################################
-"locally_connected_layers": [
-    {
-        "filters": 8,
-        "activation": "tanh",
-        "kernel_size": 3
-    }
-],
-"activation": "relu",
-"output_activation": "sigmoid",
-"layer_sizes": [
-   32 
-],
-"dense_layer_l1_regularization": 0.001,
-"batch_normalization": false,
-"dropout_probability": 0.0,
-}]
diff --git a/downloads-generation/models_class1_experiments1/hyperparameters-embedding.yaml b/downloads-generation/models_class1_experiments1/hyperparameters-embedding.yaml
index 0bfed0e1dfff9921a4927e085465c11d321b4d20..1598f3a101357f202bef82634e07e71998c2d7b3 100644
--- a/downloads-generation/models_class1_experiments1/hyperparameters-embedding.yaml
+++ b/downloads-generation/models_class1_experiments1/hyperparameters-embedding.yaml
@@ -43,7 +43,7 @@
 "activation": "relu",
 "output_activation": "sigmoid",
 "layer_sizes": [
-   16 
+   32
 ],
 "dense_layer_l1_regularization": 0.001,
 "batch_normalization": false,
diff --git a/downloads-generation/models_class1_experiments1/hyperparameters-noL1.yaml b/downloads-generation/models_class1_experiments1/hyperparameters-noL1.yaml
index 4d3f9ced58eb9d56d5e6183209f88224617bc079..cb910a2ee47244f07a87f7245d03628c10e309ae 100644
--- a/downloads-generation/models_class1_experiments1/hyperparameters-noL1.yaml
+++ b/downloads-generation/models_class1_experiments1/hyperparameters-noL1.yaml
@@ -42,7 +42,7 @@
 "activation": "relu",
 "output_activation": "sigmoid",
 "layer_sizes": [
-   16 
+   32
 ],
 "dense_layer_l1_regularization": 0.0,
 "batch_normalization": false,
diff --git a/downloads-generation/models_class1_experiments1/hyperparameters-onehot.yaml b/downloads-generation/models_class1_experiments1/hyperparameters-onehot.yaml
index a8c14b146241d8ac9ac9c6df57fa149a4e54c334..dad568de1a06b0f64f53c1d9375fc478398bd1d7 100644
--- a/downloads-generation/models_class1_experiments1/hyperparameters-onehot.yaml
+++ b/downloads-generation/models_class1_experiments1/hyperparameters-onehot.yaml
@@ -42,7 +42,7 @@
 "activation": "relu",
 "output_activation": "sigmoid",
 "layer_sizes": [
-   16 
+   32
 ],
 "dense_layer_l1_regularization": 0.001,
 "batch_normalization": false,
diff --git a/downloads-generation/models_class1_experiments1/hyperparameters-widelocal.yaml b/downloads-generation/models_class1_experiments1/hyperparameters-widelocal.yaml
index a927bdd5170f96072b86d8ae1f1ee0917e8d84ba..c2a39f14de772c5f1b3c4235698b92b71e56bcc3 100644
--- a/downloads-generation/models_class1_experiments1/hyperparameters-widelocal.yaml
+++ b/downloads-generation/models_class1_experiments1/hyperparameters-widelocal.yaml
@@ -43,7 +43,7 @@
 "activation": "relu",
 "output_activation": "sigmoid",
 "layer_sizes": [
-   16 
+   32
 ],
 "dense_layer_l1_regularization": 0.001,
 "batch_normalization": false,