From d589cd2b69c1e3627b33605842021c168db0c74d Mon Sep 17 00:00:00 2001
From: Tim O'Donnell <timodonnell@gmail.com>
Date: Wed, 15 May 2019 16:51:22 -0400
Subject: [PATCH] first cut on models_class1_pan download

---
 .../models_class1_pan_unselected/GENERATE.sh  | 54 ++++++++++++++++++
 .../models_class1_pan_unselected/README.md    |  9 +++
 .../generate_hyperparameters.py               | 55 +++++++++++++++++++
 3 files changed, 118 insertions(+)
 create mode 100755 downloads-generation/models_class1_pan_unselected/GENERATE.sh
 create mode 100644 downloads-generation/models_class1_pan_unselected/README.md
 create mode 100644 downloads-generation/models_class1_pan_unselected/generate_hyperparameters.py

diff --git a/downloads-generation/models_class1_pan_unselected/GENERATE.sh b/downloads-generation/models_class1_pan_unselected/GENERATE.sh
new file mode 100755
index 00000000..569677da
--- /dev/null
+++ b/downloads-generation/models_class1_pan_unselected/GENERATE.sh
@@ -0,0 +1,54 @@
+#!/bin/bash
+#
+# Train pan-allele MHCflurry Class I models.
+#
+set -e
+set -x
+
+DOWNLOAD_NAME=models_class1_unselected
+SCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation
+SCRIPT_ABSOLUTE_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/$(basename "${BASH_SOURCE[0]}")"
+SCRIPT_DIR=$(dirname "$SCRIPT_ABSOLUTE_PATH")
+
+mkdir -p "$SCRATCH_DIR"
+rm -rf "$SCRATCH_DIR/$DOWNLOAD_NAME"
+mkdir "$SCRATCH_DIR/$DOWNLOAD_NAME"
+
+# Send stdout and stderr to a logfile included with the archive.
+#exec >  >(tee -ia "$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt")
+#exec 2> >(tee -ia "$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt" >&2)
+
+# Log some environment info
+date
+pip freeze
+git status
+
+cd $SCRATCH_DIR/$DOWNLOAD_NAME
+
+mkdir models
+
+python $SCRIPT_DIR/generate_hyperparameters.py > hyperparameters.yaml
+
+GPUS=$(nvidia-smi -L 2> /dev/null | wc -l) || GPUS=0
+echo "Detected GPUS: $GPUS"
+
+PROCESSORS=$(getconf _NPROCESSORS_ONLN)
+echo "Detected processors: $PROCESSORS"
+
+time mhcflurry-class1-train-pan-allele-models \
+    --data "$(mhcflurry-downloads path data_curated)/curated_training_data.with_mass_spec.csv.bz2" \
+    --allele-sequences "$(mhcflurry-downloads path allele_sequences)/allele_sequences.csv" \
+    --pretrain-data "$(mhcflurry-downloads path random_peptide_predictions)/predictions.csv.bz2" \
+    --held-out-measurements-per-allele-fraction-and-max 0.25 100 \
+    --ensemble-size 4 \
+    --hyperparameters hyperparameters.yaml \
+    --out-models-dir models \
+
+
+    #--num-jobs $(expr $PROCESSORS \* 2) --gpus $GPUS --max-workers-per-gpu 2 --max-tasks-per-worker 50
+
+cp $SCRIPT_ABSOLUTE_PATH .
+bzip2 LOG.txt
+tar -cjf "../${DOWNLOAD_NAME}.tar.bz2" *
+
+echo "Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2"
diff --git a/downloads-generation/models_class1_pan_unselected/README.md b/downloads-generation/models_class1_pan_unselected/README.md
new file mode 100644
index 00000000..add1df33
--- /dev/null
+++ b/downloads-generation/models_class1_pan_unselected/README.md
@@ -0,0 +1,9 @@
+# Class I pan-allele models (ensemble)
+
+This download contains trained MHC Class I MHCflurry models.
+
+To generate this download run:
+
+```
+./GENERATE.sh
+```
diff --git a/downloads-generation/models_class1_pan_unselected/generate_hyperparameters.py b/downloads-generation/models_class1_pan_unselected/generate_hyperparameters.py
new file mode 100644
index 00000000..6a9c2782
--- /dev/null
+++ b/downloads-generation/models_class1_pan_unselected/generate_hyperparameters.py
@@ -0,0 +1,55 @@
+"""
+Generate grid of hyperparameters
+"""
+
+from sys import stdout
+from copy import deepcopy
+from yaml import dump
+
+base_hyperparameters = {
+    'activation': 'tanh',
+    'allele_dense_layer_sizes': [],
+    'batch_normalization': False,
+    'dense_layer_l1_regularization': 0.0,
+    'dense_layer_l2_regularization': 0.0,
+    'dropout_probability': 0.5,
+    'early_stopping': True,
+    'init': 'glorot_uniform',
+    'layer_sizes': [1024, 512],
+    'learning_rate': None,
+    'locally_connected_layers': [],
+    'loss': 'custom:mse_with_inequalities',
+    'max_epochs': 5000,
+    'minibatch_size': 128,
+    'optimizer': 'rmsprop',
+    'output_activation': 'sigmoid',
+    "patience": 20,
+    'peptide_encoding': {
+        'vector_encoding_name': 'BLOSUM62',
+        'alignment_method': 'left_pad_centered_right_pad',
+        'max_length': 15,
+    },
+    'peptide_allele_merge_activation': '',
+    'peptide_allele_merge_method': 'concatenate',
+    'peptide_amino_acid_encoding': 'BLOSUM62',
+    'peptide_dense_layer_sizes': [],
+    'random_negative_affinity_max': 50000.0,
+    'random_negative_affinity_min': 20000.0,
+    'random_negative_constant': 25,
+    'random_negative_distribution_smoothing': 0.0,
+    'random_negative_match_distribution': True,
+    'random_negative_rate': 0.2,
+    'train_data': {},
+    'validation_split': 0.1,
+}
+
+grid = []
+for layer_sizes in [[1024], [1024 * 10], [1024, 512], [512, 512], [1024, 1024]]:
+    for l1 in [0.0, 0.0001, 0.001, 0.01]:
+        new = deepcopy(base_hyperparameters)
+        new["layer_sizes"] = layer_sizes
+        new["dense_layer_l1_regularization"] = l1
+        if not grid or new not in grid:
+            grid.append(new)
+
+dump(grid, stdout)
-- 
GitLab