Skip to content
Snippets Groups Projects
Commit 03f44c8f authored by Tim O'Donnell's avatar Tim O'Donnell
Browse files

Update docs

parent 9727cf7e
No related branches found
No related tags found
No related merge requests found
......@@ -126,9 +126,9 @@ def cross_validation_folds(
impute_kwargs : dict, optional
Additional kwargs to pass to mhcflurry.Dataset.impute_missing_values.
n_jobs : integer, optional
The number of jobs to run in parallel. If -1, then the number of jobs
is set to the number of cores.
parallel_backend : mhcflurry.parallelism.ParallelBackend, optional
Futures implementation to use for running on multiple threads,
processes, or nodes
Returns
-----------
......
......@@ -259,7 +259,9 @@ def train_across_models_and_folds(
return_predictors : boolean, optional
Include the trained predictors in the result.
parallel_backend : parallel backend, optional
parallel_backend : mhcflurry.parallelism.ParallelBackend, optional
Futures implementation to use for running on multiple threads,
processes, or nodes
Returns
-----------
......
......@@ -5,6 +5,10 @@ DEFAULT_BACKEND = None
class ParallelBackend(object):
"""
Thin wrapper of futures implementations. Designed to support
concurrent.futures as well as dask.distributed's workalike implementation.
"""
def __init__(self, executor, module, verbose=1):
self.executor = executor
self.module = module
......@@ -33,6 +37,9 @@ class ParallelBackend(object):
class DaskDistributedParallelBackend(ParallelBackend):
"""
ParallelBackend that uses dask.distributed
"""
def __init__(self, scheduler_ip_and_port, verbose=1):
from dask import distributed # pylint: disable=import-error
executor = distributed.Executor(scheduler_ip_and_port)
......@@ -46,6 +53,10 @@ class DaskDistributedParallelBackend(ParallelBackend):
class ConcurrentFuturesParallelBackend(ParallelBackend):
"""
ParallelBackend that uses Python's concurrent.futures module.
Can use either threads or processes.
"""
def __init__(self, num_workers=1, processes=False, verbose=1):
if processes:
executor = futures.ProcessPoolExecutor(num_workers)
......
......@@ -22,6 +22,9 @@ from .common import normalize_allele_name, UnsupportedAllele
def predict(alleles, peptides, loaders=None):
"""
Make predictions across all combinations of the specified alleles and
peptides.
Parameters
----------
alleles : list of str
......@@ -30,6 +33,9 @@ def predict(alleles, peptides, loaders=None):
peptides : list of str
Peptide amino acid sequences.
loaders : list of Class1AlleleSpecificPredictorLoader, optional
Loaders to try. Will be tried in the order given.
Returns DataFrame with columns "Allele", "Peptide", and "Prediction"
"""
if loaders is None:
......@@ -64,4 +70,3 @@ def predict(alleles, peptides, loaders=None):
result_dict["Peptide"].append(peptides[i])
result_dict["Prediction"].append(ic50)
return pd.DataFrame(result_dict)
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment