Newer
Older
self,
peptides,
allele_encoding=None,
batch_size=4096,
output_index=0):
Predict affinities.
If peptides are specified as EncodableSequences, then the predictions
will be cached for this predictor as long as the EncodableSequences object
remains in memory. The cache is keyed in the object identity of the
EncodableSequences, not the sequences themselves.
peptides : EncodableSequences or list of string
allele_encoding : AlleleEncoding, optional
Only required when this model is a pan-allele model
batch_size : int
batch_size passed to Keras
use_cache = (
allele_encoding is None and
isinstance(peptides, EncodableSequences))
if use_cache and peptides in self.prediction_cache:
return self.prediction_cache[peptides].copy()
x_dict = {
'peptide': self.peptides_to_network_input(peptides)
}
Tim O'Donnell
committed
if allele_encoding is not None:
(allele_encoding_input, allele_representations) = (
self.allele_encoding_to_network_input(allele_encoding))
x_dict['allele'] = allele_encoding_input
self.set_allele_representations(allele_representations)
network = self.network()
else:
network = self.network(borrow=True)
raw_predictions = network.predict(x_dict, batch_size=batch_size)
predictions = numpy.array(raw_predictions, dtype = "float64")
if output_index is not None:
predictions = predictions[:,output_index]
result = to_ic50(predictions)
if use_cache:
self.prediction_cache[peptides] = result
return result
allele_dense_layer_sizes,
peptide_dense_layer_sizes,
peptide_allele_merge_method,
peptide_allele_merge_activation,
layer_sizes,
dense_layer_l1_regularization,
dense_layer_l2_regularization,
activation,
init,
output_activation,
dropout_probability,
batch_normalization,
num_outputs=1,
"""
Helper function to make a keras network for class1 affinity prediction.
"""
# We import keras here to avoid tensorflow debug output, etc. unless we
# are actually about to use Keras.
from keras.layers import Input
import keras.layers
from keras.layers.embeddings import Embedding
from keras.layers.normalization import BatchNormalization
peptide_encoding_shape = self.peptides_to_network_input([]).shape[1:]
peptide_input = Input(
shape=peptide_encoding_shape,
dtype='float32',
name='peptide')
current_layer = peptide_input
kernel_regularizer = None
l1 = dense_layer_l1_regularization
l2 = dense_layer_l2_regularization
if l1 > 0 or l2 > 0:
kernel_regularizer = keras.regularizers.l1_l2(l1, l2)
for (i, locally_connected_params) in enumerate(locally_connected_layers):
current_layer = keras.layers.LocallyConnected1D(
**locally_connected_params)(current_layer)
current_layer = Flatten(name="flattened_0")(current_layer)
for (i, layer_size) in enumerate(peptide_dense_layer_sizes):
current_layer = Dense(
layer_size,
name="peptide_dense_%d" % i,
kernel_regularizer=kernel_regularizer,
activation=activation)(current_layer)
current_layer = BatchNormalization(name="batch_norm_early")(
current_layer)
Tim O'Donnell
committed
if allele_representations is not None:
Tim O'Donnell
committed
shape=(1,),
Tim O'Donnell
committed
Tim O'Donnell
committed
name="allele_representation",
input_dim=allele_representations.shape[0],
output_dim=numpy.product(allele_representations.shape[1:], dtype=int),
Tim O'Donnell
committed
input_length=1,
Tim O'Donnell
committed
for (i, layer_size) in enumerate(allele_dense_layer_sizes):
Tim O'Donnell
committed
allele_layer = Dense(
layer_size,
name="allele_dense_%d" % i,
kernel_regularizer=kernel_regularizer,
Tim O'Donnell
committed
activation=activation)(allele_layer)
allele_layer = Flatten(name="allele_flat")(allele_layer)
if peptide_allele_merge_method == 'concatenate':
current_layer = keras.layers.concatenate([
Tim O'Donnell
committed
current_layer, allele_layer
], name="allele_peptide_merged")
elif peptide_allele_merge_method == 'multiply':
current_layer = keras.layers.multiply([
Tim O'Donnell
committed
current_layer, allele_layer
raise ValueError(
"Unsupported peptide_allele_encoding_merge_method: %s"
% peptide_allele_merge_method)
if peptide_allele_merge_activation:
current_layer = keras.layers.Activation(
peptide_allele_merge_activation,
name="alelle_peptide_merged_%s" %
peptide_allele_merge_activation)(current_layer)
for (i, layer_size) in enumerate(layer_sizes):
current_layer = Dense(
kernel_regularizer=kernel_regularizer,
name="dense_%d" % i)(current_layer)
current_layer = BatchNormalization(
name="batch_norm_%d" % i)(current_layer)
if dropout_probability > 0:
rate=1 - dropout_probability,
name="dropout_%d" % i)(current_layer)
output = Dense(
num_outputs,
kernel_initializer=init,
activation=output_activation,
name="output")(current_layer)
model = keras.models.Model(
inputs=inputs,
outputs=[output],
name="predictor")
Tim O'Donnell
committed
Tim O'Donnell
committed
def set_allele_representations(self, allele_representations):
"""
Parameters
----------
model
allele_representations
"""
reshaped = allele_representations.reshape((allele_representations.shape[0], -1))
Tim O'Donnell
committed
layer = self.network().get_layer("allele_representation")
(existing,) = layer.get_weights()
if existing.shape == reshaped.shape:
layer.set_weights([reshaped])
Tim O'Donnell
committed
else:
raise NotImplementedError(
"Network surgery required: %s != %s" % (