forked from jmetzen/kernel_regression
-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathkernel_regression.py
More file actions
160 lines (131 loc) · 5.94 KB
/
kernel_regression.py
File metadata and controls
160 lines (131 loc) · 5.94 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
"""The :mod:`sklearn.kernel_regressor` module implements the Kernel Regressor.
"""
# Author: Jan Hendrik Metzen <janmetzen@mailbox.de>
#
# License: BSD 3 clause
import numpy as np
# from sklearn.metrics.pairwise import pairwise_kernels
from pairwise import pairwise_kernels
from sklearn.base import BaseEstimator, RegressorMixin
class KernelRegression(BaseEstimator, RegressorMixin):
"""Nadaraya-Watson kernel regression with automatic bandwidth selection.
This implements Nadaraya-Watson kernel regression with (optional) automatic
bandwith selection of the kernel via leave-one-out cross-validation. Kernel
regression is a simple non-parametric kernelized technique for learning
a non-linear relationship between input variable(s) and a target variable.
Parameters
----------
kernel : string or callable, default="rbf"
Kernel map to be approximated. A callable should accept two arguments
and the keyword arguments passed to this object as kernel_params, and
should return a floating point number.
gamma : float, default=None
Gamma parameter for the RBF ("bandwidth"), polynomial,
exponential chi2 and sigmoid kernels. Interpretation of the default
value is left to the kernel; see the documentation for
sklearn.metrics.pairwise. Ignored by other kernels. If a sequence of
values is given, one of these values is selected which minimizes
the mean-squared-error of leave-one-out cross-validation.
See also
--------
sklearn.metrics.pairwise.kernel_metrics : List of built-in kernels.
"""
def __init__(self, kernel="rbf", gamma=None):
self.kernel = kernel
self.gamma = gamma
def fit(self, X, y, indX=None, pary=None, plotcv=False):
"""Fit the model
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The training input samples.
y : array-like, shape = [n_samples]
The target values
pary (optional): if provided, should be independent sample of y values
evaluated at X, or else at X[indX,:], to use parallel-validation
instead of cross-validation.
indX (optional): if provided, pary should be independent sample
evaluated at X[indX,:]
Returns
-------
self : object
Returns self.
"""
self.X = X
self.y = y
if hasattr(self.gamma, "__iter__"):
gamma_in = self.gamma
if (pary is not None):
if (indX is not None):
parX = X[indX,:]
else:
parX = X
self.gamma = self._optimize_gamma_pv(self.gamma, pary, parX,
plot=plotcv)
else:
self.gamma = self._optimize_gamma(self.gamma, plot=plotcv)
if (self.gamma == gamma_in.min()) or (self.gamma == gamma_in.max()):
print 'Chosen gamma at extreme of input choices:'
print self.gamma
print gamma_in.min(), gamma_in.max()
return self
def predict(self, X):
"""Predict target values for X.
Parameters
----------
X : array-like of shape = [n_samples, n_features]
The input samples.
Returns
-------
y : array of shape = [n_samples]
The predicted target value.
"""
K = pairwise_kernels(self.X, X,
metric=self.kernel,
gamma=self.gamma)
K = K - K.max(axis=0)[np.newaxis, :] # only relative values along 0 axis matter
np.exp(K, K)
return ysmooth(self.y, K)
def _optimize_gamma(self, gamma_values, plot=False):
# Select specific value of gamma from the range of given gamma_values
# by minimizing mean-squared error in leave-one-out cross validation
mse = np.empty_like(gamma_values, dtype=np.float)
for i, gamma in enumerate(gamma_values):
K = pairwise_kernels(self.X, self.X, metric=self.kernel,
gamma=gamma)
np.fill_diagonal(K, -np.inf) # leave-one-out - exponentiates to zero
K = K - K.max(axis=0)[np.newaxis, :] # only relative values along 0 axis matter
np.exp(K, K)
y_pred = ysmooth(self.y, K)
mse[i] = ((y_pred - self.y) ** 2).mean()
if (plot):
import matplotlib.pyplot as plt
plt.semilogx(gamma_values, mse, 'bd-')
plt.plot(gamma_values, gamma_values*0, 'kd')
plt.plot(gamma_values[np.nanargmin(mse)], mse[np.nanargmin(mse)], 'rs')
plt.show()
self.mse = np.nanmin(mse)
return gamma_values[np.nanargmin(mse)]
def _optimize_gamma_pv(self, gamma_values, pary, parX, plot=False):
# Select specific value of gamma from the range of given gamma_values
# by minimizing mean-squared error in parallel validation
mse = np.empty_like(gamma_values, dtype=np.float)
for i, gamma in enumerate(gamma_values):
K = pairwise_kernels(self.X, parX, metric=self.kernel,
gamma=gamma)
K = K - K.max(axis=0)[np.newaxis, :] # only relative values along 0 axis matter
np.exp(K, K)
y_pred = ysmooth(self.y, K)
mse[i] = ((y_pred - pary) ** 2).mean()
if (plot):
import matplotlib.pyplot as plt
plt.semilogx(gamma_values, mse, 'bd-')
plt.plot(gamma_values, gamma_values*0, 'kd')
plt.plot(gamma_values[np.nanargmin(mse)], mse[np.nanargmin(mse)], 'rs')
plt.show()
self.mse = np.nanmin(mse)
return gamma_values[np.nanargmin(mse)]
def ysmooth(y, K):
"""Smooth y measured at old points onto new points as specified by kernel K[old,new]
using Nadaraya-Watson"""
return (K * y[:, np.newaxis]).sum(axis=0) / K.sum(axis=0)