From 20d68c2b36222f1c5fe7f6d970984f1dd23342ae Mon Sep 17 00:00:00 2001 From: Tim O'Donnell <timodonnell@gmail.com> Date: Wed, 14 Feb 2018 11:42:11 -0500 Subject: [PATCH] add learning rate hyperparameter --- mhcflurry/class1_neural_network.py | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/mhcflurry/class1_neural_network.py b/mhcflurry/class1_neural_network.py index 5a1f6d56..9b65a4aa 100644 --- a/mhcflurry/class1_neural_network.py +++ b/mhcflurry/class1_neural_network.py @@ -61,6 +61,7 @@ class Class1NeuralNetwork(object): compile_hyperparameter_defaults = HyperparameterDefaults( loss="custom:mse_with_inequalities", optimizer="rmsprop", + learning_rate=None, ) """ Loss and optimizer hyperparameters. Any values supported by keras may be @@ -575,6 +576,12 @@ class Class1NeuralNetwork(object): loss=loss_name_or_function, optimizer=self.hyperparameters['optimizer']) + if self.hyperparameters['learning_rate'] is not None: + from keras import backend as K + K.set_value( + self.network().optimizer.lr, + self.hyperparameters['learning_rate']) + if loss_supports_inequalities: # Do not sample negative affinities: just use an inequality. random_negative_ic50 = self.hyperparameters['random_negative_affinity_min'] -- GitLab