import numpy as np
from sklearn.base import BaseEstimator, ClassifierMixin
from sklearn.utils import validation
from sklearn.utils.multiclass import unique_labels
class _LvqBaseModel(BaseEstimator, ClassifierMixin):
def __init__(self, prototypes_per_class=1, initial_prototypes=None,
max_iter=2500, gtol=1e-5, display=False, random_state=None):
self.random_state = random_state
self.initial_prototypes = initial_prototypes
self.prototypes_per_class = prototypes_per_class
self.display = display
self.max_iter = max_iter
self.gtol = gtol
def _validate_train_parms(self, train_set, train_lab):
random_state = validation.check_random_state(self.random_state)
if not isinstance(self.display, bool):
raise ValueError("display must be a boolean")
if not isinstance(self.max_iter, int) or self.max_iter < 1:
raise ValueError("max_iter must be an positive integer")
if not isinstance(self.gtol, float) or self.gtol <= 0:
raise ValueError("gtol must be a positive float")
train_set, train_lab = validation.check_X_y(train_set, train_lab)
self.classes_ = unique_labels(train_lab)
nb_classes = len(self.classes_)
nb_samples, nb_features = train_set.shape # nb_samples unused
# set prototypes per class
if isinstance(self.prototypes_per_class, int):
if self.prototypes_per_class < 0 or not isinstance(
self.prototypes_per_class, int):
raise ValueError("prototypes_per_class must be a positive int")
nb_ppc = np.ones([nb_classes],
dtype='int') * self.prototypes_per_class
else:
nb_ppc = validation.column_or_1d(
validation.check_array(self.prototypes_per_class,
ensure_2d=False, dtype='int'))
if nb_ppc.min() <= 0:
raise ValueError(
"values in prototypes_per_class must be positive")
if nb_ppc.size != nb_classes:
raise ValueError(
"length of prototypes per class"
" does not fit the number of classes"
"classes=%d"
"length=%d" % (nb_classes, nb_ppc.size))
# initialize prototypes
if self.initial_prototypes is None:
self.w_ = np.empty([np.sum(nb_ppc), nb_features], dtype=np.double)
self.c_w_ = np.empty([nb_ppc.sum()], dtype=self.classes_.dtype)
pos = 0
for actClass in range(nb_classes):
nb_prot = nb_ppc[actClass]
mean = np.mean(
train_set[train_lab == self.classes_[actClass], :], 0)
self.w_[pos:pos + nb_prot] = mean + (
random_state.rand(nb_prot, nb_features) * 2 - 1)
self.c_w_[pos:pos + nb_prot] = self.classes_[actClass]
pos += nb_prot
else:
x = validation.check_array(self.initial_prototypes)
self.w_ = x[:, :-1]
self.c_w_ = x[:, -1]
if self.w_.shape != (np.sum(nb_ppc), nb_features):
raise ValueError("the initial prototypes have wrong shape\n"
"found=(%d,%d)\n"
"expected=(%d,%d)" % (
self.w_.shape[0], self.w_.shape[1],
nb_ppc.sum(), nb_features))
if set(self.c_w_) != set(self.classes_):
raise ValueError(
"prototype labels and test data classes do not match\n"
"classes={}\n"
"prototype labels={}\n".format(self.classes_, self.c_w_))
return train_set, train_lab, random_state
def fit(self, x, y):
"""Fit the LVQ model to the given training data and parameters using
l-bfgs-b.
Parameters
----------
x : array-like, shape = [n_samples, n_features]
Training vector, where n_samples in the number of samples and
n_features is the number of features.
y : array, shape = [n_samples]
Target values (integers in classification, real numbers in
regression)
Returns
--------
self
"""
x, y, random_state = self._validate_train_parms(x, y)
if len(np.unique(y)) == 1:
raise ValueError("fitting " + type(
self).__name__ + " with only one class is not possible")
self._optimize(x, y, random_state)
return self
def project(self, x, dims, print_variance_covered=False):
"""Projects the data input data X using the relevance matrix of trained
model to dimension dim
Parameters
----------
x : array-like, shape = [n,n_features]
input data for project
dims : int
dimension to project to
print_variance_covered : boolean
flag to print the covered variance of the projection
Returns
--------
C : array, shape = [n,n_features]
Returns predicted values.
"""
if print_variance_covered:
print('not implemented!')
return x[:, :dims]