From 323094944d374aefefc6ed06d8afaada061fdd40 Mon Sep 17 00:00:00 2001
From: Timothy ODonnell <odonnt02@li03c03.chimera.hpc.mssm.edu>
Date: Wed, 2 Oct 2019 20:54:45 -0400
Subject: [PATCH] fix

---
 .../GENERATE.WITH_HPC_CLUSTER.sh              |  7 ++--
 ...er_submit_script_header.mssm_hpc.nogpu.lsf | 32 +++++++++++++++++++
 2 files changed, 35 insertions(+), 4 deletions(-)
 create mode 100644 downloads-generation/data_mass_spec_benchmark/cluster_submit_script_header.mssm_hpc.nogpu.lsf

diff --git a/downloads-generation/data_mass_spec_benchmark/GENERATE.WITH_HPC_CLUSTER.sh b/downloads-generation/data_mass_spec_benchmark/GENERATE.WITH_HPC_CLUSTER.sh
index 7be43470..e57f2354 100755
--- a/downloads-generation/data_mass_spec_benchmark/GENERATE.WITH_HPC_CLUSTER.sh
+++ b/downloads-generation/data_mass_spec_benchmark/GENERATE.WITH_HPC_CLUSTER.sh
@@ -27,6 +27,7 @@ cd $SCRATCH_DIR/$DOWNLOAD_NAME
 
 cp $SCRIPT_DIR/write_proteome_peptides.py .
 cp $SCRIPT_DIR/run_mhcflurry.py .
+cp $SCRIPT_DIR/run_thirdparty_predictors.py .
 cp $SCRIPT_DIR/write_allele_list.py .
 
 PEPTIDES=$(mhcflurry-downloads path data_mass_spec_annotated)/annotated_ms.csv.bz2
@@ -100,7 +101,7 @@ python write_proteome_peptides.py \
 python run_thirdparty_predictors.py \
     proteome_peptides.all.csv \
     --predictor netmhcpan4 \
-    --chunk-size 100000 \
+    --chunk-size 10000 \
     --allele $(cat alleles.txt) \
     --out "predictions/all.netmhcpan4" \
     --worker-log-dir "$SCRATCH_DIR/$DOWNLOAD_NAME" \
@@ -108,9 +109,7 @@ python run_thirdparty_predictors.py \
     --cluster-max-retries 15 \
     --cluster-submit-command bsub \
     --cluster-results-workdir ~/mhcflurry-scratch \
-    --cluster-script-prefix-path $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.lsf
-
-
+    --cluster-script-prefix-path cluster_submit_script_header.mssm_hpc.nogpu.lsf
 
 
 bzip2 proteome_peptides.chr1.csv
diff --git a/downloads-generation/data_mass_spec_benchmark/cluster_submit_script_header.mssm_hpc.nogpu.lsf b/downloads-generation/data_mass_spec_benchmark/cluster_submit_script_header.mssm_hpc.nogpu.lsf
new file mode 100644
index 00000000..7b85917f
--- /dev/null
+++ b/downloads-generation/data_mass_spec_benchmark/cluster_submit_script_header.mssm_hpc.nogpu.lsf
@@ -0,0 +1,32 @@
+#!/bin/bash
+#BSUB -J MHCf-{work_item_num} # Job name
+#BSUB -P acc_nkcancer # allocation account or Unix group
+#BSUB -q express # queue
+#BSUB -R span[hosts=1] # one node
+#BSUB -n 1 # number of compute cores
+#BSUB -W 46:00 # walltime in HH:MM
+#BSUB -R rusage[mem=30000] # mb memory requested
+#BSUB -o {work_dir}/%J.stdout # output log (%J : JobID)
+#BSUB -eo {work_dir}/STDERR # error log
+#BSUB -L /bin/bash # Initialize the execution environment
+#
+
+set -e
+set -x
+
+echo "Subsequent stderr output redirected to stdout" >&2
+exec 2>&1
+
+export TMPDIR=/local/JOBS/mhcflurry-{work_item_num}
+export PATH=$HOME/.conda/envs/py36b/bin/:$PATH
+export PYTHONUNBUFFERED=1
+export KMP_SETTINGS=1
+export NETMHC_BUNDLE_HOME=$HOME/sinai/git/netmhc-bundle
+export NETMHC_BUNDLE_TMPDIR=/local/JOBS/netmhctmp-{work_item_num}
+export PATH=$NETMHC_BUNDLE_HOME/bin:$PATH
+
+free -m
+env
+
+cd {work_dir}
+
-- 
GitLab