Skip to content
Snippets Groups Projects
Commit 07351b16 authored by Timothy ODonnell's avatar Timothy ODonnell Committed by Tim O'Donnell
Browse files

fix

parent 70dd074b
No related branches found
No related tags found
No related merge requests found
......@@ -15,8 +15,8 @@ rm -rf "$SCRATCH_DIR/$DOWNLOAD_NAME"
mkdir "$SCRATCH_DIR/$DOWNLOAD_NAME"
# Send stdout and stderr to a logfile included with the archive.
#exec > >(tee -ia "$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt")
#exec 2> >(tee -ia "$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt" >&2)
exec > >(tee -ia "$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt")
exec 2> >(tee -ia "$SCRATCH_DIR/$DOWNLOAD_NAME/LOG.txt" >&2)
# Log some environment info
date
......@@ -29,17 +29,15 @@ cp $SCRIPT_DIR/write_proteome_peptides.py .
cp $SCRIPT_DIR/run_mhcflurry.py .
cp $SCRIPT_DIR/write_allele_list.py .
PEPTIDES=$(mhcflurry-downloads path data_mass_spec_annotated)/annotated_ms.csv.bz2
REFERENCES_DIR=$(mhcflurry-downloads path data_references)
#python write_proteome_peptides.py \
# "$PEPTIDES" \
# "${REFERENCES_DIR}/uniprot_proteins.csv.bz2" \
# --out proteome_peptides.csv
#ls -lh proteome_peptides.csv
#bzip2 proteome_peptides.csv
ln -s ~/Dropbox/sinai/projects/201808-mhcflurry-pan/20190622-models/proteome_peptides.csv.bz2 proteome_peptides.csv.bz2
python write_proteome_peptides.py \
"$PEPTIDES" \
"${REFERENCES_DIR}/uniprot_proteins.csv.bz2" \
--out proteome_peptides.csv
ls -lh proteome_peptides.csv
bzip2 proteome_peptides.csv
python write_allele_list.py "$PEPTIDES" --out alleles.txt
......
......@@ -55,7 +55,7 @@ parser.add_argument(
parser.add_argument(
"--chunk-size",
type=int,
default=1000000,
default=100000000,
help="Num peptides per job. Default: %(default)s")
parser.add_argument(
"--batch-size",
......
......@@ -12,8 +12,6 @@ import pandas
import tqdm # progress bar
tqdm.monitor_interval = 0 # see https://github.com/tqdm/tqdm/issues/481
import shellinford
parser = argparse.ArgumentParser(usage=__doc__)
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment