Skip to content
Snippets Groups Projects
Commit 28041d1c authored by Tim O'Donnell's avatar Tim O'Donnell
Browse files

Switch back to dense=16

parent bdc672f4
No related branches found
No related tags found
No related merge requests found
Showing
with 11 additions and 11 deletions
...@@ -42,7 +42,7 @@ ...@@ -42,7 +42,7 @@
], ],
"activation": "relu", "activation": "relu",
"output_activation": "sigmoid", "output_activation": "sigmoid",
"layer_sizes": [32], "layer_sizes": [16],
"dense_layer_l1_regularization": 0.001, "dense_layer_l1_regularization": 0.001,
"batch_normalization": false, "batch_normalization": false,
"dropout_probability": 0.0, "dropout_probability": 0.0,
......
...@@ -40,7 +40,7 @@ time mhcflurry-class1-train-allele-specific-models \ ...@@ -40,7 +40,7 @@ time mhcflurry-class1-train-allele-specific-models \
--allele $ALLELES 2>&1 | tee -a LOG.standard.txt & --allele $ALLELES 2>&1 | tee -a LOG.standard.txt &
# Model variations on qualitative + quantitative # Model variations on qualitative + quantitative
for mod in 0local_noL1 0local 2local widelocal dense16 dense64 noL1 onehot embedding for mod in 0local_noL1 0local 2local widelocal dense8 dense32 noL1 onehot embedding
do do
cp $SCRIPT_DIR/hyperparameters-${mod}.yaml . cp $SCRIPT_DIR/hyperparameters-${mod}.yaml .
mkdir models-${mod} mkdir models-${mod}
......
...@@ -37,7 +37,7 @@ ...@@ -37,7 +37,7 @@
"activation": "relu", "activation": "relu",
"output_activation": "sigmoid", "output_activation": "sigmoid",
"layer_sizes": [ "layer_sizes": [
32 16
], ],
"dense_layer_l1_regularization": 0.001, "dense_layer_l1_regularization": 0.001,
"batch_normalization": false, "batch_normalization": false,
......
...@@ -37,7 +37,7 @@ ...@@ -37,7 +37,7 @@
"activation": "relu", "activation": "relu",
"output_activation": "sigmoid", "output_activation": "sigmoid",
"layer_sizes": [ "layer_sizes": [
32 16
], ],
"dense_layer_l1_regularization": 0.0, "dense_layer_l1_regularization": 0.0,
"batch_normalization": false, "batch_normalization": false,
......
...@@ -47,7 +47,7 @@ ...@@ -47,7 +47,7 @@
"activation": "relu", "activation": "relu",
"output_activation": "sigmoid", "output_activation": "sigmoid",
"layer_sizes": [ "layer_sizes": [
32 16
], ],
"dense_layer_l1_regularization": 0.001, "dense_layer_l1_regularization": 0.001,
"batch_normalization": false, "batch_normalization": false,
......
...@@ -42,7 +42,7 @@ ...@@ -42,7 +42,7 @@
"activation": "relu", "activation": "relu",
"output_activation": "sigmoid", "output_activation": "sigmoid",
"layer_sizes": [ "layer_sizes": [
16 32
], ],
"dense_layer_l1_regularization": 0.001, "dense_layer_l1_regularization": 0.001,
"batch_normalization": false, "batch_normalization": false,
......
...@@ -42,7 +42,7 @@ ...@@ -42,7 +42,7 @@
"activation": "relu", "activation": "relu",
"output_activation": "sigmoid", "output_activation": "sigmoid",
"layer_sizes": [ "layer_sizes": [
64 8
], ],
"dense_layer_l1_regularization": 0.001, "dense_layer_l1_regularization": 0.001,
"batch_normalization": false, "batch_normalization": false,
......
...@@ -43,7 +43,7 @@ ...@@ -43,7 +43,7 @@
"activation": "relu", "activation": "relu",
"output_activation": "sigmoid", "output_activation": "sigmoid",
"layer_sizes": [ "layer_sizes": [
32 16
], ],
"dense_layer_l1_regularization": 0.001, "dense_layer_l1_regularization": 0.001,
"batch_normalization": false, "batch_normalization": false,
......
...@@ -42,7 +42,7 @@ ...@@ -42,7 +42,7 @@
"activation": "relu", "activation": "relu",
"output_activation": "sigmoid", "output_activation": "sigmoid",
"layer_sizes": [ "layer_sizes": [
32 16
], ],
"dense_layer_l1_regularization": 0.0, "dense_layer_l1_regularization": 0.0,
"batch_normalization": false, "batch_normalization": false,
......
...@@ -42,7 +42,7 @@ ...@@ -42,7 +42,7 @@
"activation": "relu", "activation": "relu",
"output_activation": "sigmoid", "output_activation": "sigmoid",
"layer_sizes": [ "layer_sizes": [
32 16
], ],
"dense_layer_l1_regularization": 0.001, "dense_layer_l1_regularization": 0.001,
"batch_normalization": false, "batch_normalization": false,
......
...@@ -43,7 +43,7 @@ ...@@ -43,7 +43,7 @@
"activation": "relu", "activation": "relu",
"output_activation": "sigmoid", "output_activation": "sigmoid",
"layer_sizes": [ "layer_sizes": [
32 16
], ],
"dense_layer_l1_regularization": 0.001, "dense_layer_l1_regularization": 0.001,
"batch_normalization": false, "batch_normalization": false,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment