Skip to content

Commit fb15ba6

Browse files
committed
遗传算法解决旅行商问题
1 parent ad3af26 commit fb15ba6

File tree

2 files changed

+299
-0
lines changed

2 files changed

+299
-0
lines changed

cities.txt

+127
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,127 @@
1+
1 9860 14152
2+
2 9396 14616
3+
3 11252 14848
4+
4 11020 13456
5+
5 9512 15776
6+
6 10788 13804
7+
7 10208 14384
8+
8 11600 13456
9+
9 11252 14036
10+
10 10672 15080
11+
11 11136 14152
12+
12 9860 13108
13+
13 10092 14964
14+
14 9512 13340
15+
15 10556 13688
16+
16 9628 14036
17+
17 10904 13108
18+
18 11368 12644
19+
19 11252 13340
20+
20 10672 13340
21+
21 11020 13108
22+
22 11020 13340
23+
23 11136 13572
24+
24 11020 13688
25+
25 8468 11136
26+
26 8932 12064
27+
27 9512 12412
28+
28 7772 11020
29+
29 8352 10672
30+
30 9164 12876
31+
31 9744 12528
32+
32 8352 10324
33+
33 8236 11020
34+
34 8468 12876
35+
35 8700 14036
36+
36 8932 13688
37+
37 9048 13804
38+
38 8468 12296
39+
39 8352 12644
40+
40 8236 13572
41+
41 9164 13340
42+
42 8004 12760
43+
43 8584 13108
44+
44 7772 14732
45+
45 7540 15080
46+
46 7424 17516
47+
47 8352 17052
48+
48 7540 16820
49+
49 7888 17168
50+
50 9744 15196
51+
51 9164 14964
52+
52 9744 16240
53+
53 7888 16936
54+
54 8236 15428
55+
55 9512 17400
56+
56 9164 16008
57+
57 8700 15312
58+
58 11716 16008
59+
59 12992 14964
60+
60 12412 14964
61+
61 12296 15312
62+
62 12528 15196
63+
63 15312 6612
64+
64 11716 16124
65+
65 11600 19720
66+
66 10324 17516
67+
67 12412 13340
68+
68 12876 12180
69+
69 13688 10904
70+
70 13688 11716
71+
71 13688 12528
72+
72 11484 13224
73+
73 12296 12760
74+
74 12064 12528
75+
75 12644 10556
76+
76 11832 11252
77+
77 11368 12296
78+
78 11136 11020
79+
79 10556 11948
80+
80 10324 11716
81+
81 11484 9512
82+
82 11484 7540
83+
83 11020 7424
84+
84 11484 9744
85+
85 16936 12180
86+
86 17052 12064
87+
87 16936 11832
88+
88 17052 11600
89+
89 13804 18792
90+
90 12064 14964
91+
91 12180 15544
92+
92 14152 18908
93+
93 5104 14616
94+
94 6496 17168
95+
95 5684 13224
96+
96 15660 10788
97+
97 5336 10324
98+
98 812 6264
99+
99 14384 20184
100+
100 11252 15776
101+
101 9744 3132
102+
102 10904 3480
103+
103 7308 14848
104+
104 16472 16472
105+
105 10440 14036
106+
106 10672 13804
107+
107 1160 18560
108+
108 10788 13572
109+
109 15660 11368
110+
110 15544 12760
111+
111 5336 18908
112+
112 6264 19140
113+
113 11832 17516
114+
114 10672 14152
115+
115 10208 15196
116+
116 12180 14848
117+
117 11020 10208
118+
118 7656 17052
119+
119 16240 8352
120+
120 10440 14732
121+
121 9164 15544
122+
122 8004 11020
123+
123 5684 11948
124+
124 9512 16472
125+
125 13688 17516
126+
126 11484 8468
127+
127 3248 14152

genetic_algorithm.py

+172
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,172 @@
1+
import numpy as np
2+
import math
3+
import time
4+
5+
def load_data(data_path):
6+
"""
7+
导入数据,得到城市坐标信息
8+
:param data_path: 数据文件地址 str
9+
:return: 所有城市的坐标信息 二维 list
10+
"""
11+
cities = []
12+
with open(data_path, 'r') as f:
13+
lines = f.readlines()
14+
for line in lines:
15+
x_str, y_str = line.split()[1:]
16+
x, y = int(x_str), int(y_str)
17+
cities.append((x, y))
18+
return cities
19+
20+
def get_cities_distance(cities):
21+
"""
22+
计算两两城市的距离
23+
:param cities: 所有城市的坐标 二维 list
24+
:return: 城市距离矩阵 numpy数组
25+
"""
26+
dist_matrix = np.zeros((127, 127))
27+
n_cities = len(cities)
28+
for i in range(n_cities-1):
29+
for j in range(i+1, n_cities):
30+
dist = get_two_cities_dist(cities[i], cities[j])
31+
dist_matrix[i, j] = dist
32+
dist_matrix[j, i] = dist
33+
return dist_matrix
34+
35+
def get_two_cities_dist(city1, city2):
36+
"""
37+
计算两个城市的距离
38+
:param city1: 第一个城市 长度为2的list
39+
:param city2: 第二个城市 长度为2的list
40+
:return: 两城市的距离 double
41+
"""
42+
x_1, y_1 = city1
43+
x_2, y_2 = city2
44+
return math.sqrt(math.pow(x_1-x_2, 2) + math.pow(y_1-y_2, 2))
45+
46+
def get_route_fitness_value(route, dist_matrix):
47+
"""
48+
计算某一路线的适应度
49+
:param route: 路线 长度为城市个数的 ndarray
50+
:param dist_matrix: 距离矩阵 ndarray
51+
:return: 路线的适应度 double
52+
"""
53+
dist_sum = 0
54+
for i in range(len(route)-1):
55+
dist_sum += dist_matrix[route[i], route[i+1]]
56+
dist_sum += dist_matrix[route[-1], route[0]]
57+
return 1/dist_sum
58+
59+
def get_all_routes_fitness_value(routes, dist_matrix):
60+
"""
61+
计算所有路线的适应度
62+
:param routes: 所有路线 ndarray
63+
:param dist_matrix: 距离矩阵 ndarray
64+
:return: 所有路线的适应度 ndarray
65+
"""
66+
fitness_values = np.zeros(len(routes))
67+
for i in range(len(routes)):
68+
f_value = get_route_fitness_value(routes[i], dist_matrix)
69+
fitness_values[i] = f_value
70+
return fitness_values
71+
72+
def init_route(n_route, n_cities):
73+
"""
74+
随机初始化路线
75+
:param n_route: 初始化的路线数量 int
76+
:param n_cities: 城市的数量 int
77+
:return: 路线矩阵 二维ndarray
78+
"""
79+
routes = np.zeros((n_route, n_cities)).astype(int)
80+
for i in range(n_route):
81+
routes[i] = np.random.choice(range(n_cities), size=n_cities, replace=False)
82+
return routes
83+
84+
def selection(routes, fitness_values):
85+
"""
86+
选择操作
87+
:param routes: 所有路线 ndarray
88+
:param fitness_values: 所有路线的适应度 ndarray
89+
:return: 选择后的所有路线 ndarray
90+
"""
91+
selected_routes = np.zeros(routes.shape).astype(int)
92+
probability = fitness_values / np.sum(fitness_values)
93+
n_routes = routes.shape[0]
94+
for i in range(n_routes):
95+
choice = np.random.choice(range(n_routes), p=probability)
96+
selected_routes[i] = routes[choice]
97+
return selected_routes
98+
99+
def crossover(routes, n_cities):
100+
"""
101+
交叉操作
102+
:param routes: 所有路线 ndarray
103+
:param n_cities: 城市数量 int
104+
:return: 交叉后的所有路线 ndarray
105+
"""
106+
for i in range(0, len(routes), 2):
107+
r1_new, r2_new = np.zeros(n_cities), np.zeros(n_cities)
108+
seg_point = np.random.randint(0, n_cities)
109+
cross_len = n_cities - seg_point
110+
r1, r2 = routes[i], routes[i+1]
111+
r1_cross, r2_cross = r2[seg_point:], r1[seg_point:]
112+
r1_non_cross = r1[np.in1d(r1, r1_cross)==False]
113+
r2_non_cross = r2[np.in1d(r2, r2_cross)==False]
114+
r1_new[:cross_len], r2_new[:cross_len] = r1_cross, r2_cross
115+
r1_new[cross_len:], r2_new[cross_len:] = r1_non_cross, r2_non_cross
116+
routes[i], routes[i+1] = r1_new, r2_new
117+
return routes
118+
119+
def mutation(routes, n_cities):
120+
"""
121+
变异操作,变异概率为 0.01
122+
:param routes: 所有路线 ndarray
123+
:param n_cities: 城市数量 int
124+
:return: 变异后的所有路线 ndarray
125+
"""
126+
prob = 0.01
127+
p_rand = np.random.rand(len(routes))
128+
for i in range(len(routes)):
129+
if p_rand[i] < prob:
130+
mut_position = np.random.choice(range(n_cities), size=2, replace=False)
131+
l, r = mut_position[0], mut_position[1]
132+
routes[i, l], routes[i, r] = routes[i, r], routes[i, l]
133+
return routes
134+
135+
if __name__ == '__main__':
136+
start = time.time()
137+
138+
n_routes = 100 # 路线
139+
epoch = 100000 # 迭代次数
140+
141+
cities = load_data('./cities.txt') # 导入数据
142+
dist_matrix = get_cities_distance(cities) # 计算城市距离矩阵
143+
routes = init_route(n_routes, dist_matrix.shape[0]) # 初始化所有路线
144+
fitness_values = get_all_routes_fitness_value(routes, dist_matrix) # 计算所有初始路线的适应度
145+
best_index = fitness_values.argmax()
146+
best_route, best_fitness = routes[best_index], fitness_values[best_index] # 保存最优路线及其适应度
147+
148+
# 开始迭代
149+
not_improve_time = 0
150+
for i in range(epoch):
151+
routes = selection(routes, fitness_values) # 选择
152+
routes = crossover(routes, len(cities)) # 交叉
153+
routes = mutation(routes, len(cities)) # 变异
154+
fitness_values = get_all_routes_fitness_value(routes, dist_matrix)
155+
best_route_index = fitness_values.argmax()
156+
if fitness_values[best_route_index] > best_fitness:
157+
not_improve_time = 0
158+
best_route, best_fitness = routes[best_route_index], fitness_values[best_route_index] # 保存最优路线及其适应度
159+
else:
160+
not_improve_time += 1
161+
if (i+1) % 200 == 0:
162+
print('epoch: {}, 当前最优路线距离: {}'.format(i+1, 1/get_route_fitness_value(best_route, dist_matrix)))
163+
if not_improve_time >= 2000:
164+
print('连续2000次迭代都没有改变最优路线,结束迭代')
165+
break
166+
167+
print('最优路线为:')
168+
print(best_route)
169+
print('总距离为: {}'.format(1/get_route_fitness_value(best_route, dist_matrix)))
170+
171+
end = time.time()
172+
print('耗时: {}s'.format(end-start))

0 commit comments

Comments
 (0)