Skip to content

Commit a0f5cfb

Browse files
committed
implment Surrogate class for fitting a GP on existing data from optimizations
implements methods for: - fitting the GP - estimating the objectives functions - estimating the feasible objectives space - conditioning the GP
1 parent d52e14e commit a0f5cfb

File tree

4 files changed

+134
-0
lines changed

4 files changed

+134
-0
lines changed

CADETProcess/optimization/__init__.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -83,3 +83,4 @@
8383
from .optimizer import *
8484
from .scipyAdapter import COBYLA, TrustConstr, NelderMead, SLSQP
8585
from .pymooAdapter import NSGA2, U_NSGA3
86+
from .surrogate import Surrogate

CADETProcess/optimization/optimizationProblem.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -143,6 +143,10 @@ def wrapper(self, x, *args, get_dependent_values=False, **kwargs):
143143

144144
return wrapper
145145

146+
def get_variable_index(self, variable):
147+
var_names = self.variable_names
148+
return [i for i, v in enumerate(var_names) if variable == v][0]
149+
146150
def ensures2d(func):
147151
"""Make sure population is ndarray with ndmin=2."""
148152
@wraps(func)

CADETProcess/optimization/results.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -390,6 +390,8 @@ def plot_figures(self, show=True):
390390
show=show, plot_directory=self.plot_directory
391391
)
392392

393+
self.plot_partial_dependence()
394+
393395
def plot_objectives(
394396
self,
395397
include_meta=True,
@@ -540,6 +542,14 @@ def plot_corner(self, *args, **kwargs):
540542
except AssertionError:
541543
pass
542544

545+
546+
547+
548+
def plot_partial_dependence(
549+
self,
550+
):
551+
pass
552+
543553
def setup_convergence_figure(self, target, plot_individual=False):
544554
if target == 'objectives':
545555
n = self.optimization_problem.n_objectives
Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
import numpy as np
2+
3+
from sklearn.gaussian_process import (
4+
GaussianProcessRegressor, GaussianProcessClassifier)
5+
from sklearn.base import BaseEstimator
6+
7+
from CADETProcess.optimization import (
8+
Population, OptimizationProblem, OptimizationResults)
9+
10+
class Surrogate:
11+
def __init__(
12+
self,
13+
optimization_problem: OptimizationProblem,
14+
population: Population
15+
):
16+
self.optimization_problem = optimization_problem
17+
self.surrogate_model_F: BaseEstimator = None
18+
self.surrogate_model_G: BaseEstimator = None
19+
self.surrogate_model_M: BaseEstimator = None
20+
self.surrogate_model_CV: BaseEstimator = None
21+
self.fit_gaussian_process(population)
22+
23+
# save a backup of bounds
24+
self.lower_bounds_copy = optimization_problem.lower_bounds.copy()
25+
self.upper_bounds_copy = optimization_problem.upper_bounds.copy()
26+
27+
def _reset_bounds_on_variables(self):
28+
for var, lb, ub in zip(
29+
self.optimization_problem.variables,
30+
self.lower_bounds_copy,
31+
self.upper_bounds_copy
32+
):
33+
var.lb = lb
34+
var.ub = ub
35+
36+
def fit_gaussian_process(self, population: Population):
37+
X = population.x
38+
F = population.f
39+
G = population.g
40+
M = population.m
41+
CV = population.cv
42+
43+
gp_f = GaussianProcessRegressor()
44+
gp_f.fit(X, F)
45+
self.surrogate_model_F = gp_f
46+
47+
if G is not None:
48+
gp_g = GaussianProcessRegressor()
49+
gp_g.fit(X, G)
50+
self.surrogate_model_G = gp_g
51+
52+
if M is not None:
53+
gp_m = GaussianProcessRegressor()
54+
gp_m.fit(X, M)
55+
self.surrogate_model_M = gp_m
56+
57+
if CV is not None:
58+
gp_cv = GaussianProcessClassifier()
59+
gp_cv.fit(X, CV)
60+
self.surrogate_model_CV = gp_cv
61+
62+
63+
64+
def estimate_objectives(self, X):
65+
objectives = []
66+
F_est = self.surrogate_model_F.predict(X)
67+
objectives.append(F_est)
68+
69+
if self.surrogate_model_G is not None:
70+
G_est = self.surrogate_model_G.predict(X)
71+
objectives.append(G_est)
72+
73+
if self.surrogate_model_M is not None:
74+
M_est = self.surrogate_model_M.predict(X)
75+
objectives.append(M_est)
76+
77+
if self.surrogate_model_CV is not None:
78+
CV_est = self.surrogate_model_CV.predict(X)
79+
objectives.append(CV_est)
80+
81+
return np.array(objectives).T
82+
83+
84+
def estimate_feasible_objectives_space(self, n_samples=1000):
85+
X = self.optimization_problem.create_initial_values(
86+
n_samples=n_samples,
87+
method="random",
88+
)
89+
F = self.estimate_objectives(X)
90+
91+
return X, F
92+
93+
94+
def condition_objectives(
95+
self,
96+
conditional_vars: dict = {},
97+
n_samples=1000,
98+
eps=1e-5
99+
):
100+
101+
# TODO: should check if the condition is inside the constriants
102+
# otherwise Hopsy throws an error
103+
104+
free_vars = {}
105+
for var in self.optimization_problem.variables:
106+
var_index = self.optimization_problem.get_variable_index(var.name)
107+
if var.name in conditional_vars:
108+
conditioning_value = conditional_vars[var.name]
109+
var.lb = conditioning_value - eps
110+
var.ub = conditioning_value + eps
111+
112+
else:
113+
free_vars.update({var.name: var_index})
114+
115+
X, F = self.approximate_objectives(n_samples=n_samples)
116+
117+
self._reset_bounds_on_variables()
118+
119+
return X, F, free_vars

0 commit comments

Comments
 (0)