From 19caf8616fc194402678aa67917db334ad02852a Mon Sep 17 00:00:00 2001 From: Obe Okaiwele Date: Mon, 12 Oct 2020 13:16:20 -0400 Subject: [PATCH 1/3] Remove sklearn.externals.six usage --- mlrose/neural.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/mlrose/neural.py b/mlrose/neural.py index 16c8971a..da79c412 100644 --- a/mlrose/neural.py +++ b/mlrose/neural.py @@ -9,7 +9,6 @@ import numpy as np from sklearn.base import BaseEstimator, ClassifierMixin, RegressorMixin from sklearn.metrics import mean_squared_error, log_loss -from sklearn.externals import six from .activation import identity, relu, sigmoid, softmax, tanh from .algorithms import random_hill_climb, simulated_annealing, genetic_alg from .opt_probs import ContinuousOpt @@ -370,7 +369,7 @@ def calculate_updates(self): return updates_list -class BaseNeuralNetwork(six.with_metaclass(ABCMeta, BaseEstimator)): +class BaseNeuralNetwork(BaseEstimator, metaclass=ABCMeta): """Base class for neural networks. Warning: This class should not be used directly. From 6eecd99b39ad4f037f8fa2cc8371194f9c780bb0 Mon Sep 17 00:00:00 2001 From: Obe Okaiwele Date: Mon, 12 Oct 2020 13:19:24 -0400 Subject: [PATCH 2/3] Enable GridSearchCV estimator cloning --- mlrose/neural.py | 85 +++++++++--------------------------------------- 1 file changed, 16 insertions(+), 69 deletions(-) diff --git a/mlrose/neural.py b/mlrose/neural.py index da79c412..55e3769e 100644 --- a/mlrose/neural.py +++ b/mlrose/neural.py @@ -9,6 +9,7 @@ import numpy as np from sklearn.base import BaseEstimator, ClassifierMixin, RegressorMixin from sklearn.metrics import mean_squared_error, log_loss +from sklearn.utils.multiclass import unique_labels from .activation import identity, relu, sigmoid, softmax, tanh from .algorithms import random_hill_climb, simulated_annealing, genetic_alg from .opt_probs import ContinuousOpt @@ -394,15 +395,7 @@ def __init__(self, hidden_nodes=None, random_state=None, curve=False): - if hidden_nodes is None: - self.hidden_nodes = [] - else: - self.hidden_nodes = hidden_nodes - - self.activation_dict = {'identity': identity, - 'relu': relu, - 'sigmoid': sigmoid, - 'tanh': tanh} + self.hidden_nodes = hidden_nodes self.activation = activation self.algorithm = algorithm self.max_iters = max_iters @@ -419,13 +412,6 @@ def __init__(self, hidden_nodes=None, self.random_state = random_state self.curve = curve - self.node_list = [] - self.fitted_weights = [] - self.loss = np.inf - self.output_activation = None - self.predicted_probs = [] - self.fitness_curve = [] - def _validate(self): if (not isinstance(self.max_iters, int) and self.max_iters != np.inf and not self.max_iters.is_integer()) or (self.max_iters < 0): @@ -489,6 +475,19 @@ def fit(self, X, y=None, init_weights=None): Numpy array containing starting weights for algorithm. If :code:`None`, then a random state is used. """ + if self.hidden_nodes is None: + self.hidden_nodes = [] + self.activation_dict = {'identity': identity, + 'relu': relu, + 'sigmoid': sigmoid, + 'tanh': tanh} + self.node_list = [] + self.fitted_weights = [] + self.loss = np.inf + self.output_activation = None + self.predicted_probs = [] + self.fitness_curve = [] + self._validate() # Make sure y is an array and not a list @@ -508,6 +507,7 @@ def fit(self, X, y=None, init_weights=None): node_list = [input_nodes] + self.hidden_nodes + [output_nodes] num_nodes = 0 + self.classes_ = unique_labels(y) for i in range(len(node_list) - 1): num_nodes += node_list[i]*node_list[i+1] @@ -688,59 +688,6 @@ def predict(self, X): return y_pred - def get_params(self, deep=False): - """Get parameters for this estimator. - - Returns - ------- - params : dictionary - Parameter names mapped to their values. - """ - params = {'hidden_nodes': self.hidden_nodes, - 'max_iters': self.max_iters, - 'bias': self.bias, - 'is_classifier': self.is_classifier, - 'learning_rate': self.learning_rate, - 'early_stopping': self.early_stopping, - 'clip_max': self.clip_max, - 'restarts': self.restarts, - 'schedule': self.schedule, - 'pop_size': self.pop_size, - 'mutation_prob': self.mutation_prob} - - return params - - def set_params(self, **in_params): - """Set the parameters of this estimator. - - Parameters - ------- - in_params: dictionary - Dictionary of parameters to be set and the value to be set to. - """ - if 'hidden_nodes' in in_params.keys(): - self.hidden_nodes = in_params['hidden_nodes'] - if 'max_iters' in in_params.keys(): - self.max_iters = in_params['max_iters'] - if 'bias' in in_params.keys(): - self.bias = in_params['bias'] - if 'is_classifier' in in_params.keys(): - self.is_classifier = in_params['is_classifier'] - if 'learning_rate' in in_params.keys(): - self.learning_rate = in_params['learning_rate'] - if 'early_stopping' in in_params.keys(): - self.early_stopping = in_params['early_stopping'] - if 'clip_max' in in_params.keys(): - self.clip_max = in_params['clip_max'] - if 'restarts' in in_params.keys(): - self.restarts = in_params['restarts'] - if 'schedule' in in_params.keys(): - self.schedule = in_params['schedule'] - if 'pop_size' in in_params.keys(): - self.pop_size = in_params['pop_size'] - if 'mutation_prob' in in_params.keys(): - self.mutation_prob = in_params['mutation_prob'] - class NeuralNetwork(BaseNeuralNetwork, ClassifierMixin): """Class for defining neural network classifier weights optimization From 3d7c47a50924d51730215003ddcd9fab9e443405 Mon Sep 17 00:00:00 2001 From: Obe Okaiwele Date: Mon, 12 Oct 2020 13:21:13 -0400 Subject: [PATCH 3/3] Bump version to 1.4.0 --- docs/conf.py | 2 +- setup.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/docs/conf.py b/docs/conf.py index ccdca71f..35e7130a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -26,7 +26,7 @@ # The short X.Y version version = '' # The full version, including alpha/beta/rc tags -release = '1.3.0' +release = '1.4.0' # -- General configuration --------------------------------------------------- diff --git a/setup.py b/setup.py index 9e8eec20..b79fa2f2 100644 --- a/setup.py +++ b/setup.py @@ -15,7 +15,7 @@ def readme(): setup(name='mlrose', - version='1.3.0', + version='1.4.0', description="MLROSe: Machine Learning, Randomized Optimization and" + " Search", long_description=readme(),