-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathtry.py
79 lines (59 loc) · 2.46 KB
/
try.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
import torch
import gpytorch
import math
from matplotlib import pyplot as plt
import numpy as np
torch.set_default_dtype(torch.float64)
class GPModelWithDerivatives(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y, likelihood):
super(GPModelWithDerivatives, self).__init__(train_x, train_y, likelihood)
self.mean_module = gpytorch.means.ConstantMeanGrad()
self.base_kernel = gpytorch.kernels.RBFKernelGrad()
self.base_kernel.lengthscale = 0.2
self.covar_module = gpytorch.kernels.ScaleKernel(self.base_kernel)
def forward(self, x):
mean_x = self.mean_module(x)
covar_x = self.covar_module(x)
return gpytorch.distributions.MultitaskMultivariateNormal(mean_x, covar_x)
num_dims = 6
alpha = 0.01
likelihood = gpytorch.likelihoods.MultitaskGaussianLikelihood(num_tasks=num_dims + 1) # Value + Derivative
likelihood.noise = 1e-3
likelihood.task_noises = torch.tensor([1e-4] * (num_dims + 1))
likelihood#.cuda()
stop_iters = []
for j in range(30): # Repeat the experiment 10 times
train_x = None
train_y = None
model = GPModelWithDerivatives(train_x, train_y, likelihood)
model.eval()
model.cuda()
x = torch.tensor([[0.0] * num_dims]).cuda()
for i in range(250):
pred = model(x)
func_and_grad = pred.rsample() # get f(x) and df/dx
y = func_and_grad[-1, 0]
dy = func_and_grad[-1, 1:] # For higher dimensions, the gradient is d dimensional
if train_x is None:
train_x = x # First training point
train_y = func_and_grad # First train_y (both function and derivative)
else:
train_x = torch.cat((train_x, x), dim=0)
train_y = torch.cat((train_y, func_and_grad[-1, :].unsqueeze(-2)), dim=0)
# Update x
x = x - alpha * dy
if torch.norm(dy) < 1e-3:
print(f'Convergence reached after {i} iterations')
stop_iters.append(i)
break
# Update model with new data
model = GPModelWithDerivatives(train_x, train_y, likelihood)
model.cuda()
model.eval()
# Computes cond(K + \sigma^2 I)
covar_check = model.covar_module(x).add_jitter(likelihood.noise).evaluate()
cond_num = torch.linalg.cond(covar_check)
print(f'Covarince: \n{covar_check}')
print(f'Condition number: {cond_num}')
experiment_mean = torch.tensor(stop_iters).float().mean()
print(f'Mean convergence itertations: {experiment_mean}\n')