machine learning - Implementation of Gaussian Process Regression in Python y(n_samples, n_targets) -


i working on price data x = day1, day2, day3,...etc. on day1, have let's 15 price points(y), day2, have 30 price points(y2), , on.

when read documentation of gaussian process regression: http://scikit-learn.org/stable/modules/generated/sklearn.gaussian_process.gaussianprocess.html#sklearn.gaussian_process.gaussianprocess.fit

y shape (n_samples, n_targets) observations of output predicted.

i assume n_targets refers price points observed on each day. however, number of price points on each day not same. wonder how deal case this?

many thanks!

i have made implementation of gaussian process regression in python using numpy. aim understand implementing it. may helpful you.

https://github.com/muatik/machine-learning-examples/blob/master/gaussianprocess2.ipynb

import numpy np matplotlib import pyplot plt import seaborn sns sns.set(color_codes=true)  %matplotlib inline   class gp(object):      @classmethod     def kernel_bell_shape(cls, x, y, delta=1.0):         return np.exp(-1/2.0 * np.power(x - y, 2) / delta)      @classmethod     def kernel_laplacian(cls, x, y, delta=1):         return np.exp(-1/2.0 * np.abs(x - y) / delta)      @classmethod     def generate_kernel(cls, kernel, delta=1):         def wrapper(*args, **kwargs):             kwargs.update({"delta": delta})             return kernel(*args, **kwargs)         return wrapper      def __init__(self, x, y, cov_f=none, r=0):         super().__init__()         self.x = x         self.y = y         self.n = len(self.x)         self.r = r          self.sigma = []         self.mean = []         self.cov_f = cov_f if cov_f else self.kernel_bell_shape         self.setup_sigma()      @classmethod     def calculate_sigma(cls, x, cov_f, r=0):         n = len(x)         sigma = np.ones((n, n))         in range(n):             j in range(i+1, n):                 cov = cov_f(x[i], x[j])                 sigma[i][j] = cov                 sigma[j][i] = cov          sigma = sigma + r * np.eye(n)         return sigma      def setup_sigma(self):         self.sigma = self.calculate_sigma(self.x, self.cov_f, self.r)      def predict(self, x):         cov = 1 + self.r * self.cov_f(x, x)         sigma_1_2 = np.zeros((self.n, 1))         in range(self.n):             sigma_1_2[i] = self.cov_f(self.x[i], x)          # sigma_1_2 * sigma_1_1.i * (y.t -m)         # m 0         m_expt = (sigma_1_2.t * np.mat(self.sigma).i) * np.mat(self.y).t         # sigma_expt = cov - (sigma_1_2.t * np.mat(self.sigma).i) * sigma_1_2         sigma_expt = cov + self.r - (sigma_1_2.t * np.mat(self.sigma).i) * sigma_1_2         return m_expt, sigma_expt      @staticmethod     def get_probability(sigma, y, r):         multiplier = np.power(np.linalg.det(2 * np.pi * sigma), -0.5)         return multiplier * np.exp(             (-0.5) * (np.mat(y) * np.dot(np.mat(sigma).i, y).t))      def optimize(self, r_list, b_list):          def cov_f_proxy(delta, f):             def wrapper(*args, **kwargs):                 kwargs.update({"delta": delta})                 return f(*args, **kwargs)             return wrapper          best = (0, 0, 0)         history = []         r in r_list:             best_beta = (0, 0)             b in b_list:                 sigma = gaus.calculate_sigma(self.x, cov_f_proxy(b, self.cov_f), r)                 marginal = b* float(self.get_probability(sigma, self.y, r))                 if marginal > best_beta[0]:                     best_beta = (marginal, b)             history.append((best_beta[0], r, best_beta[1]))         return sorted(history)[-1], np.mat(history) 

now can try follows:

# setting gp x = np.array([-2, -1, 0, 3.5, 4]); y = np.array([4.1, 0.9, 2, 12.3, 15.8]) gaus = gp(x, y)  x_guess = np.linspace(-5, 16, 400) y_pred = np.vectorize(gaus.predict)(x_guess)  plt.scatter(x, y, c="black") plt.plot(x_guess, y_pred[0], c="b") plt.plot(x_guess, y_pred[0] - np.sqrt(y_pred[1]) * 3, "r:") plt.plot(x_guess, y_pred[0] + np.sqrt(y_pred[1]) * 3, "r:") 

enter image description here

the effects of regularization parameter

def create_case(kernel, r=0):     x = np.array([-2, -1, 0, 3.5, 4]);     y = np.array([4.1, 0.9, 2, 12.3, 15.8])     gaus = gp(x, y, kernel, r=r)      x_guess = np.linspace(-4, 6, 400)     y_pred = np.vectorize(gaus.predict)(x_guess)      plt.scatter(x, y, c="black")     plt.plot(x_guess, y_pred[0], c="b")     plt.plot(x_guess, y_pred[0] - np.sqrt(y_pred[1]) * 3, "r:")     plt.plot(x_guess, y_pred[0] + np.sqrt(y_pred[1]) * 3, "r:")  plt.figure(figsize=(16, 16)) i, r in enumerate([0.0001, 0.03, 0.09, 0.8, 1.5, 5.0]):     plt.subplot("32{}".format(i+1))     plt.title("kernel={}, delta={}, beta={}".format("bell shape", 1, r))     create_case(         gp.generate_kernel(gp.kernel_bell_shape, delta=1), r=r) 

enter image description here

plt.figure(figsize=(16, 16)) i, d in enumerate([0.05, 0.5, 1, 3.2, 5.0, 7.0]):     plt.subplot("32{}".format(i+1))     plt.title("kernel={}, delta={}, beta={}".format("kernel_laplacian", d, 1))     create_case(         gp.generate_kernel(gp.kernel_bell_shape, delta=d), r=0) 

enter image description here


Comments

Popular posts from this blog

Delphi XE2 Indy10 udp client-server interchange using SendBuffer-ReceiveBuffer -

Qt ActiveX WMI QAxBase::dynamicCallHelper: ItemIndex(int): No such property in -

Enable autocomplete or intellisense in Atom editor for PHP -