-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathmutation.py
83 lines (53 loc) · 2.13 KB
/
mutation.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
from pymoo.core.mutation import Mutation
from pymoo.core.variable import get, Real
from pymoo.operators.crossover.binx import mut_binomial
from pymoo.operators.repair.to_bound import set_to_bounds_if_outside
import numpy as np
def mut_pm(X, xl, xu, eta, prob, at_least_once):
n, n_var = X.shape
assert len(eta) == n
assert len(prob) == n
Xp = np.full(X.shape, np.inf)
mut = mut_binomial(n, n_var, prob, at_least_once=at_least_once)
mut[:, xl == xu] = False
Xp[:, :] = X
_xl = np.repeat(xl[None, :], X.shape[0], axis=0)[mut]
_xu = np.repeat(xu[None, :], X.shape[0], axis=0)[mut]
X = X[mut]
eta = np.tile(eta[:, None], (1, n_var))[mut]
delta1 = (X - _xl) / (_xu - _xl)
delta2 = (_xu - X) / (_xu - _xl)
mut_pow = 1.0 / (eta + 1.0)
rand = np.random.exponential(scale=0.2 ,size=X.shape)
if rand.size >= 2:
rand = (rand - np.min(rand)) / (np.max(rand) - np.min(rand))
if rand.size == 1 and rand[0] > 1:
rand[0] = 1
mask = rand <= 0.5
mask_not = np.logical_not(mask)
deltaq = np.zeros(X.shape)
xy = 1.0 - delta1
val = 2.0 * rand + (1.0 - 2.0 * rand) * (np.power(xy, (eta + 1.0)))
d = np.power(val, mut_pow) - 1.0
deltaq[mask] = d[mask]
xy = 1.0 - delta2
val = 2.0 * (1.0 - rand) + 2.0 * (rand - 0.5) * (np.power(xy, (eta + 1.0)))
d = 1.0 - (np.power(val, mut_pow))
deltaq[mask_not] = d[mask_not]
_Y = X + deltaq * (_xu - _xl)
_Y[_Y < _xl] = _xl[_Y < _xl]
_Y[_Y > _xu] = _xu[_Y > _xu]
Xp[mut] = _Y
Xp = set_to_bounds_if_outside(Xp, xl, xu)
return Xp
class CustomMutation(Mutation):
def __init__(self, prob=0.9, eta=20, at_least_once=False, **kwargs):
super().__init__(prob=prob, **kwargs)
self.at_least_once = at_least_once
self.eta = Real(eta, bounds=(3.0, 30.0), strict=(1.0, 100.0))
def _do(self, problem, X, params=None, **kwargs):
X = X.astype(float)
eta = get(self.eta, size=len(X))
prob_var = self.get_prob_var(problem, size=len(X))
Xp = mut_pm(X, problem.xl, problem.xu, eta, prob_var, at_least_once=self.at_least_once)
return Xp