From 700535fa58a6854280f15f75fdf0df8f49764bf6 Mon Sep 17 00:00:00 2001 From: Tim O'Donnell <timodonnell@gmail.com> Date: Sat, 25 Nov 2017 14:34:59 -0500 Subject: [PATCH] add widelocal variant --- .../models_class1_experiments1/GENERATE.sh | 11 ++-- .../hyperparameters-widelocal.yaml | 50 +++++++++++++++++++ 2 files changed, 57 insertions(+), 4 deletions(-) create mode 100644 downloads-generation/models_class1_experiments1/hyperparameters-widelocal.yaml diff --git a/downloads-generation/models_class1_experiments1/GENERATE.sh b/downloads-generation/models_class1_experiments1/GENERATE.sh index 3afe151f..ae40082a 100755 --- a/downloads-generation/models_class1_experiments1/GENERATE.sh +++ b/downloads-generation/models_class1_experiments1/GENERATE.sh @@ -34,10 +34,10 @@ time mhcflurry-class1-train-allele-specific-models \ --hyperparameters hyperparameters-standard.yaml \ --out-models-dir models-standard-quantitative \ --percent-rank-calibration-num-peptides-per-length 0 \ - --alleles $ALLELES & + --allele $ALLELES 2>&1 | tee -a LOG.standard.txt & # Model variations on qualitative + quantitative -for mod in 0local_noL1 0local 2local dense16 dense64 noL1 onehot embedding +for mod in 0local_noL1 0local 2local widelocal dense16 dense64 noL1 onehot embedding do cp $SCRIPT_DIR/hyperparameters-${mod}.yaml . mkdir models-${mod} @@ -46,12 +46,15 @@ do --hyperparameters hyperparameters-${mod}.yaml \ --out-models-dir models-${mod} \ --percent-rank-calibration-num-peptides-per-length 0 \ - --alleles $ALLELES & + --allele $ALLELES 2>&1 | tee -a LOG.${mod}.txt & done wait cp $SCRIPT_ABSOLUTE_PATH . -bzip2 LOG.txt +for i in $(ls *.txt) +do + bzip2 $i +done tar -cjf "../${DOWNLOAD_NAME}.tar.bz2" * echo "Created archive: $SCRATCH_DIR/$DOWNLOAD_NAME.tar.bz2" diff --git a/downloads-generation/models_class1_experiments1/hyperparameters-widelocal.yaml b/downloads-generation/models_class1_experiments1/hyperparameters-widelocal.yaml new file mode 100644 index 00000000..cfde02c2 --- /dev/null +++ b/downloads-generation/models_class1_experiments1/hyperparameters-widelocal.yaml @@ -0,0 +1,50 @@ +[{ +########################################## +# ENSEMBLE SIZE +########################################## +"n_models": 8, + +########################################## +# OPTIMIZATION +########################################## +"max_epochs": 500, +"patience": 10, +"early_stopping": true, +"validation_split": 0.2, + +########################################## +# RANDOM NEGATIVE PEPTIDES +########################################## +"random_negative_rate": 0.0, +"random_negative_constant": 25, +"random_negative_affinity_min": 20000.0, +"random_negative_affinity_max": 50000.0, + +########################################## +# PEPTIDE REPRESENTATION +########################################## +# One of "one-hot", "embedding", or "BLOSUM62". +"peptide_amino_acid_encoding": "BLOSUM62", +"use_embedding": false, # maintained for backward compatability +"embedding_output_dim": 8, # only used if using embedding +"kmer_size": 15, + +########################################## +# NEURAL NETWORK ARCHITECTURE +########################################## +"locally_connected_layers": [ + { + "filters": 8, + "activation": "tanh", + "kernel_size": 5 + } +], +"activation": "relu", +"output_activation": "sigmoid", +"layer_sizes": [ + 32 +], +"dense_layer_l1_regularization": 0.001, +"batch_normalization": false, +"dropout_probability": 0.0, +}] -- GitLab