From 03f44c8f4ad20d162eabb2f7e81ff47ca9e0c86c Mon Sep 17 00:00:00 2001
From: Tim O'Donnell <timodonnell@gmail.com>
Date: Fri, 23 Sep 2016 12:28:56 -0400
Subject: [PATCH] Update docs

---
 mhcflurry/class1_allele_specific/cross_validation.py |  6 +++---
 mhcflurry/class1_allele_specific/train.py            |  4 +++-
 mhcflurry/parallelism.py                             | 11 +++++++++++
 mhcflurry/predict.py                                 |  7 ++++++-
 4 files changed, 23 insertions(+), 5 deletions(-)

diff --git a/mhcflurry/class1_allele_specific/cross_validation.py b/mhcflurry/class1_allele_specific/cross_validation.py
index 6bcd092c..5ceeb4b7 100644
--- a/mhcflurry/class1_allele_specific/cross_validation.py
+++ b/mhcflurry/class1_allele_specific/cross_validation.py
@@ -126,9 +126,9 @@ def cross_validation_folds(
     impute_kwargs : dict, optional
         Additional kwargs to pass to mhcflurry.Dataset.impute_missing_values.
 
-    n_jobs : integer, optional
-        The number of jobs to run in parallel. If -1, then the number of jobs
-        is set to the number of cores.
+    parallel_backend : mhcflurry.parallelism.ParallelBackend, optional
+        Futures implementation to use for running on multiple threads,
+        processes, or nodes
 
     Returns
     -----------
diff --git a/mhcflurry/class1_allele_specific/train.py b/mhcflurry/class1_allele_specific/train.py
index c4f29e49..7e5824b8 100644
--- a/mhcflurry/class1_allele_specific/train.py
+++ b/mhcflurry/class1_allele_specific/train.py
@@ -259,7 +259,9 @@ def train_across_models_and_folds(
     return_predictors : boolean, optional
         Include the trained predictors in the result.
 
-    parallel_backend : parallel backend, optional
+    parallel_backend : mhcflurry.parallelism.ParallelBackend, optional
+        Futures implementation to use for running on multiple threads,
+        processes, or nodes
 
     Returns
     -----------
diff --git a/mhcflurry/parallelism.py b/mhcflurry/parallelism.py
index 9bce9c01..18008b4e 100644
--- a/mhcflurry/parallelism.py
+++ b/mhcflurry/parallelism.py
@@ -5,6 +5,10 @@ DEFAULT_BACKEND = None
 
 
 class ParallelBackend(object):
+    """
+    Thin wrapper of futures implementations. Designed to support
+    concurrent.futures as well as dask.distributed's workalike implementation.
+    """
     def __init__(self, executor, module, verbose=1):
         self.executor = executor
         self.module = module
@@ -33,6 +37,9 @@ class ParallelBackend(object):
 
 
 class DaskDistributedParallelBackend(ParallelBackend):
+    """
+    ParallelBackend that uses dask.distributed
+    """
     def __init__(self, scheduler_ip_and_port, verbose=1):
         from dask import distributed  # pylint: disable=import-error
         executor = distributed.Executor(scheduler_ip_and_port)
@@ -46,6 +53,10 @@ class DaskDistributedParallelBackend(ParallelBackend):
 
 
 class ConcurrentFuturesParallelBackend(ParallelBackend):
+    """
+    ParallelBackend that uses Python's concurrent.futures module.
+    Can use either threads or processes.
+    """
     def __init__(self, num_workers=1, processes=False, verbose=1):
         if processes:
             executor = futures.ProcessPoolExecutor(num_workers)
diff --git a/mhcflurry/predict.py b/mhcflurry/predict.py
index 8b886b4e..6534971e 100644
--- a/mhcflurry/predict.py
+++ b/mhcflurry/predict.py
@@ -22,6 +22,9 @@ from .common import normalize_allele_name, UnsupportedAllele
 
 def predict(alleles, peptides, loaders=None):
     """
+    Make predictions across all combinations of the specified alleles and
+    peptides.
+
     Parameters
     ----------
     alleles : list of str
@@ -30,6 +33,9 @@ def predict(alleles, peptides, loaders=None):
     peptides : list of str
         Peptide amino acid sequences.
 
+    loaders : list of Class1AlleleSpecificPredictorLoader, optional
+        Loaders to try. Will be tried in the order given.
+
     Returns DataFrame with columns "Allele", "Peptide", and "Prediction"
     """
     if loaders is None:
@@ -64,4 +70,3 @@ def predict(alleles, peptides, loaders=None):
             result_dict["Peptide"].append(peptides[i])
             result_dict["Prediction"].append(ic50)
     return pd.DataFrame(result_dict)
- 
-- 
GitLab