Skip to content
Snippets Groups Projects
GENERATE.WITH_HPC_CLUSTER.sh 2.9 KiB
Newer Older
Tim O'Donnell's avatar
Tim O'Donnell committed
#!/bin/bash
#
Tim O'Donnell's avatar
Tim O'Donnell committed
# Train pan-allele MHCflurry Class I models. Supports re-starting a failed run.
Tim O'Donnell's avatar
Tim O'Donnell committed
#
# Uses an HPC cluster (Mount Sinai chimera cluster, which uses lsf job
# scheduler). This would need to be modified for other sites.
#
set -e
set -x

DOWNLOAD_NAME=models_class1_pan_unselected
SCRATCH_DIR=${TMPDIR-/tmp}/mhcflurry-downloads-generation
SCRIPT_ABSOLUTE_PATH="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)/$(basename "${BASH_SOURCE[0]}")"
SCRIPT_DIR=$(dirname "$SCRIPT_ABSOLUTE_PATH")

mkdir -p "$SCRATCH_DIR"
Tim O'Donnell's avatar
Tim O'Donnell committed
if [ "$1" != "continue-incomplete" ]
then
    echo "Fresh run"
    rm -rf "$SCRATCH_DIR/$DOWNLOAD_NAME"
    mkdir "$SCRATCH_DIR/$DOWNLOAD_NAME"
else
    echo "Continuing incomplete run"
fi
Tim O'Donnell's avatar
Tim O'Donnell committed

# Send stdout and stderr to a logfile included with the archive.
Timothy ODonnell's avatar
Timothy ODonnell committed
LOG="$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.$(date +%s).txt"
exec >  >(tee -ia "$LOG")
exec 2> >(tee -ia "$LOG" >&2)
Tim O'Donnell's avatar
Tim O'Donnell committed

# Log some environment info
Tim O'Donnell's avatar
Tim O'Donnell committed
echo "Invocation: $0 $@"
Tim O'Donnell's avatar
Tim O'Donnell committed
date
pip freeze
git status

Tim O'Donnell's avatar
Tim O'Donnell committed
mhcflurry-downloads fetch data_curated allele_sequences random_peptide_predictions

Tim O'Donnell's avatar
Tim O'Donnell committed
cd $SCRATCH_DIR/$DOWNLOAD_NAME

Timothy ODonnell's avatar
Timothy ODonnell committed
export OMP_NUM_THREADS=1
export PYTHONUNBUFFERED=1

Tim O'Donnell's avatar
Tim O'Donnell committed
if [ "$1" != "continue-incomplete" ]
then
    cp $SCRIPT_DIR/generate_hyperparameters.py .
    python generate_hyperparameters.py > hyperparameters.yaml
fi
Tim O'Donnell's avatar
Tim O'Donnell committed

for kind in with_mass_spec no_mass_spec
do
Tim O'Donnell's avatar
Tim O'Donnell committed
    EXTRA_TRAIN_ARGS=""
    if [ "$1" == "continue-incomplete" ] && [ -d "models.${kind}" ]
    then
        echo "Will continue existing run: $kind"
        EXTRA_TRAIN_ARGS="--continue-incomplete"
    fi

    mhcflurry-class1-train-pan-allele-models \
Tim O'Donnell's avatar
Tim O'Donnell committed
        --data "$(mhcflurry-downloads path data_curated)/curated_training_data.${kind}.csv.bz2" \
        --allele-sequences "$(mhcflurry-downloads path allele_sequences)/allele_sequences.csv" \
        --pretrain-data "$(mhcflurry-downloads path random_peptide_predictions)/predictions.csv.bz2" \
        --held-out-measurements-per-allele-fraction-and-max 0.25 100 \
        --ensemble-size 4 \
        --hyperparameters hyperparameters.yaml \
Tim O'Donnell's avatar
Tim O'Donnell committed
        --out-models-dir $(pwd)/models.${kind} \
Tim O'Donnell's avatar
Tim O'Donnell committed
        --worker-log-dir "$SCRATCH_DIR/$DOWNLOAD_NAME" \
Tim O'Donnell's avatar
Tim O'Donnell committed
        --verbosity 0 \
Tim O'Donnell's avatar
Tim O'Donnell committed
        --cluster-parallelism \
        --cluster-submit-command bsub \
Timothy ODonnell's avatar
Timothy ODonnell committed
        --cluster-results-workdir ~/mhcflurry-scratch \
Tim O'Donnell's avatar
Tim O'Donnell committed
        --cluster-script-prefix-path $SCRIPT_DIR/cluster_submit_script_header.mssm_hpc.lsf \
Tim O'Donnell's avatar
Tim O'Donnell committed
        $EXTRA_TRAIN_ARGS
Tim O'Donnell's avatar
Tim O'Donnell committed
done

cp $SCRIPT_ABSOLUTE_PATH .
Timothy ODonnell's avatar
Timothy ODonnell committed
bzip2 -f "$LOG"
for i in $(ls LOG-worker.*.txt) ; do bzip2 -f $i ; done
RESULT="$SCRATCH_DIR/${DOWNLOAD_NAME}.$(date +%Y%m%d).tar.bz2"
tar -cjf "$RESULT" *
echo "Created archive: $RESULT"

# Split into <2GB chunks for GitHub
PARTS="${RESULT}.part."
# Check for pre-existing part files and rename them.
for i in $(ls "${PARTS}"* )
do
    DEST="${i}.OLD.$(date +%s)"
    echo "WARNING: already exists: $i . Moving to $DEST"
    mv $i $DEST
done
split -b 2000M "$RESULT" "$PARTS"
echo "Split into parts:"
ls -lh "${PARTS}"*