Skip to content

Commit 50cd289

Browse files
committed
update the project version
1 parent bfde1f9 commit 50cd289

File tree

34 files changed

+201
-108
lines changed

34 files changed

+201
-108
lines changed

flink-learning-common/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -54,7 +54,7 @@
5454
<dependency>
5555
<groupId>org.apache.flink</groupId>
5656
<artifactId>flink-statebackend-rocksdb_${scala.binary.version}</artifactId>
57-
<version>1.9.0</version>
57+
<version>${flink.version}</version>
5858
</dependency>
5959
</dependencies>
6060
</project>

flink-learning-common/src/main/java/com/zhisheng/common/utils/ExecutionEnvUtil.java

Lines changed: 7 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -7,8 +7,6 @@
77
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
88

99
import java.io.IOException;
10-
import java.util.HashMap;
11-
import java.util.Map;
1210

1311
/**
1412
* blog:http://www.54tianzhisheng.cn/
@@ -19,8 +17,7 @@ public static ParameterTool createParameterTool(final String[] args) throws Exce
1917
return ParameterTool
2018
.fromPropertiesFile(ExecutionEnvUtil.class.getResourceAsStream(PropertiesConstants.PROPERTIES_FILE_NAME))
2119
.mergeWith(ParameterTool.fromArgs(args))
22-
.mergeWith(ParameterTool.fromSystemProperties())
23-
.mergeWith(ParameterTool.fromMap(getenv()));
20+
.mergeWith(ParameterTool.fromSystemProperties());
2421
}
2522

2623
public static final ParameterTool PARAMETER_TOOL = createParameterTool();
@@ -29,32 +26,23 @@ private static ParameterTool createParameterTool() {
2926
try {
3027
return ParameterTool
3128
.fromPropertiesFile(ExecutionEnvUtil.class.getResourceAsStream(PropertiesConstants.PROPERTIES_FILE_NAME))
32-
.mergeWith(ParameterTool.fromSystemProperties())
33-
.mergeWith(ParameterTool.fromMap(getenv()));
29+
.mergeWith(ParameterTool.fromSystemProperties());
3430
} catch (IOException e) {
3531
e.printStackTrace();
3632
}
37-
return null;
33+
return ParameterTool.fromSystemProperties();
3834
}
3935

4036
public static StreamExecutionEnvironment prepare(ParameterTool parameterTool) throws Exception {
4137
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
4238
env.setParallelism(parameterTool.getInt(PropertiesConstants.STREAM_PARALLELISM, 5));
4339
env.getConfig().disableSysoutLogging();
44-
env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(4, 10000));
40+
env.getConfig().setRestartStrategy(RestartStrategies.fixedDelayRestart(4, 60000));
4541
if (parameterTool.getBoolean(PropertiesConstants.STREAM_CHECKPOINT_ENABLE, true)) {
46-
env.enableCheckpointing(parameterTool.getInt(PropertiesConstants.STREAM_CHECKPOINT_INTERVAL, 1000)); // create a checkpoint every 5 seconds
42+
env.enableCheckpointing(parameterTool.getInt(PropertiesConstants.STREAM_CHECKPOINT_INTERVAL, 10000));
4743
}
48-
env.getConfig().setGlobalJobParameters(parameterTool); // make parameters available in the web interface
44+
env.getConfig().setGlobalJobParameters(parameterTool);
4945
env.setStreamTimeCharacteristic(TimeCharacteristic.EventTime);
5046
return env;
5147
}
52-
53-
private static Map<String, String> getenv() {
54-
Map<String, String> map = new HashMap<>();
55-
for (Map.Entry<String, String> entry : System.getenv().entrySet()) {
56-
map.put(entry.getKey(), entry.getValue());
57-
}
58-
return map;
59-
}
60-
}
48+
}

flink-learning-common/src/main/java/com/zhisheng/common/utils/HttpUtil.java

Lines changed: 49 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,33 @@ public class HttpUtil {
2929

3030
private static final CloseableHttpClient httpClient = HttpClients.createDefault();
3131

32+
public static String doPostString(String url, String jsonParams) throws Exception {
33+
CloseableHttpResponse response = null;
34+
HttpPost httpPost = new HttpPost(url);
35+
36+
String httpStr;
37+
try {
38+
StringEntity entity = new StringEntity(jsonParams, "UTF-8");
39+
entity.setContentEncoding("UTF-8");
40+
entity.setContentType("application/json");
41+
42+
httpPost.setEntity(entity);
43+
httpPost.setHeader("content-type", "application/json");
44+
//如果要设置 Basic Auth 的话
45+
// httpPost.setHeader("Authorization", getHeader());
46+
response = httpClient.execute(httpPost);
47+
httpStr = EntityUtils.toString(response.getEntity(), "UTF-8");
48+
49+
} finally {
50+
if (response != null) {
51+
EntityUtils.consume(response.getEntity());
52+
response.close();
53+
}
54+
}
55+
return httpStr;
56+
}
57+
58+
3259
/**
3360
* 通过GET方式发起http请求
3461
*/
@@ -60,52 +87,21 @@ public static String doGet(String url) {
6087
}
6188

6289

63-
/**
64-
* 发送 POST 请求(HTTP),JSON形式
65-
*
66-
* @param url 调用的地址
67-
* @param jsonParams 调用的参数
68-
* @return
69-
* @throws Exception
70-
*/
71-
public static CloseableHttpResponse doPostResponse(String url, String jsonParams) throws Exception {
72-
CloseableHttpResponse response = null;
73-
HttpPost httpPost = new HttpPost(url);
74-
75-
try {
76-
StringEntity entity = new StringEntity(jsonParams, "UTF-8");
77-
entity.setContentEncoding("UTF-8");
78-
entity.setContentType("application/json");
79-
80-
httpPost.setEntity(entity);
81-
httpPost.setHeader("content-type", "application/json");
82-
//如果要设置 Basic Auth 的话
83-
// httpPost.setHeader("Authorization", getHeader());
84-
response = httpClient.execute(httpPost);
85-
} finally {
86-
if (response != null) {
87-
EntityUtils.consume(response.getEntity());
88-
}
89-
}
90-
return response;
91-
}
92-
93-
94-
public static String doPostString(String url, String jsonParams) throws Exception {
90+
public static String doPutString(String url, String jsonParams) throws Exception {
9591
CloseableHttpResponse response = null;
96-
HttpPost httpPost = new HttpPost(url);
92+
HttpPut httpPut = new HttpPut(url);
9793

9894
String httpStr;
9995
try {
10096
StringEntity entity = new StringEntity(jsonParams, "UTF-8");
10197
entity.setContentEncoding("UTF-8");
10298
entity.setContentType("application/json");
10399

104-
httpPost.setEntity(entity);
105-
httpPost.setHeader("content-type", "application/json");
100+
httpPut.setEntity(entity);
101+
httpPut.setHeader("content-type", "application/json");
106102
//如果要设置 Basic Auth 的话
107-
// httpPost.setHeader("Authorization", getHeader());
108-
response = httpClient.execute(httpPost);
103+
// httpPut.setHeader("Authorization", getHeader());
104+
response = httpClient.execute(httpPut);
109105
httpStr = EntityUtils.toString(response.getEntity(), "UTF-8");
110106

111107
} finally {
@@ -118,30 +114,34 @@ public static String doPostString(String url, String jsonParams) throws Exceptio
118114
}
119115

120116

121-
public static String doPutString(String url, String jsonParams) throws Exception {
117+
/**
118+
* 发送 POST 请求(HTTP),JSON形式
119+
*
120+
* @param url 调用的地址
121+
* @param jsonParams 调用的参数
122+
* @return
123+
* @throws Exception
124+
*/
125+
public static CloseableHttpResponse doPostResponse(String url, String jsonParams) throws Exception {
122126
CloseableHttpResponse response = null;
123-
HttpPut httpPut = new HttpPut(url);
127+
HttpPost httpPost = new HttpPost(url);
124128

125-
String httpStr;
126129
try {
127130
StringEntity entity = new StringEntity(jsonParams, "UTF-8");
128131
entity.setContentEncoding("UTF-8");
129132
entity.setContentType("application/json");
130133

131-
httpPut.setEntity(entity);
132-
httpPut.setHeader("content-type", "application/json");
134+
httpPost.setEntity(entity);
135+
httpPost.setHeader("content-type", "application/json");
133136
//如果要设置 Basic Auth 的话
134-
// httpPut.setHeader("Authorization", getHeader());
135-
response = httpClient.execute(httpPut);
136-
httpStr = EntityUtils.toString(response.getEntity(), "UTF-8");
137-
137+
// httpPost.setHeader("Authorization", getHeader());
138+
response = httpClient.execute(httpPost);
138139
} finally {
139140
if (response != null) {
140141
EntityUtils.consume(response.getEntity());
141-
response.close();
142142
}
143143
}
144-
return httpStr;
144+
return response;
145145
}
146146

147147

@@ -156,4 +156,4 @@ private static String getHeader() {
156156
String authHeader = "Basic " + new String(encodedAuth);
157157
return authHeader;
158158
}
159-
}
159+
}

flink-learning-common/src/main/java/com/zhisheng/common/utils/KafkaConfigUtil.java

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,4 @@ private static Map<KafkaTopicPartition, Long> buildOffsetByTime(Properties props
100100
consumer.close();
101101
return partitionOffset;
102102
}
103-
104-
public static SingleOutputStreamOperator<MetricEvent> parseSource(DataStreamSource<MetricEvent> dataStreamSource) {
105-
return dataStreamSource.assignTimestampsAndWatermarks(new MetricWatermark());
106-
}
107103
}

flink-learning-connectors/flink-learning-connectors-akka/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
<dependency>
1616
<groupId>com.zhisheng.flink</groupId>
1717
<artifactId>flink-learning-common</artifactId>
18-
<version>1.0-SNAPSHOT</version>
18+
<version>${project.version}</version>
1919
</dependency>
2020
</dependencies>
2121
</project>

flink-learning-connectors/flink-learning-connectors-es2/src/main/java/com/zhisheng/connectors/es2/Sink2ES2Main.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
package com.zhisheng.connectors.es2;
22

33
/**
4-
* Desc:
4+
* Desc: sink data to es2
55
* Created by zhisheng on 2019/10/22 下午5:10
66
* blog:http://www.54tianzhisheng.cn/
77
* 微信公众号:zhisheng

flink-learning-connectors/flink-learning-connectors-es5/src/main/java/com/zhisheng/connectors/es5/Sink2ES5Main.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
package com.zhisheng.connectors.es5;
22

33
/**
4-
* Desc:
4+
* Desc: sink data to es5
55
* Created by zhisheng on 2019/10/22 下午5:10
66
* blog:http://www.54tianzhisheng.cn/
77
* 微信公众号:zhisheng

flink-learning-connectors/flink-learning-connectors-es6/src/main/java/com/zhisheng/connectors/es6/Sink2ES6Main.java

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -21,6 +21,7 @@
2121
import static com.zhisheng.common.constant.PropertiesConstants.*;
2222

2323
/**
24+
* sink data to es6
2425
* blog:http://www.54tianzhisheng.cn/
2526
* 微信公众号:zhisheng
2627
*/

flink-learning-connectors/flink-learning-connectors-es6/src/main/java/com/zhisheng/connectors/es6/utils/ESSinkUtil.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@
1212
import java.util.List;
1313

1414
/**
15-
* Desc: ES Sink util(get ES host、addSink)//todo: index template & x-pack
15+
* Desc: ES Sink utils(get ES host、addSink)//todo: index template & x-pack
1616
* Created by zhisheng on 2019/10/21 下午3:05
1717
* blog:http://www.54tianzhisheng.cn/
1818
* 微信公众号:zhisheng

flink-learning-connectors/flink-learning-connectors-es7/src/main/java/com/zhisheng/connectors/es7/Sink2ES7Main.java

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
package com.zhisheng.connectors.es7;
22

33
/**
4-
* Desc:
4+
* Desc: sink data to es7
55
* Created by zhisheng on 2019/10/22 下午5:10
66
* blog:http://www.54tianzhisheng.cn/
77
* 微信公众号:zhisheng

flink-learning-connectors/flink-learning-connectors-kudu/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
<dependency>
2121
<groupId>com.zhisheng.flink</groupId>
2222
<artifactId>flink-learning-common</artifactId>
23-
<version>1.0-SNAPSHOT</version>
23+
<version>${project.version}</version>
2424
</dependency>
2525
<dependency>
2626
<groupId>org.apache.kudu</groupId>

flink-learning-connectors/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@
3838
<dependency>
3939
<groupId>com.zhisheng.flink</groupId>
4040
<artifactId>flink-learning-common</artifactId>
41-
<version>1.0-SNAPSHOT</version>
41+
<version>${project.version}</version>
4242
</dependency>
4343
</dependencies>
4444

flink-learning-data-sinks/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
<dependency>
1616
<groupId>com.zhisheng.flink</groupId>
1717
<artifactId>flink-learning-common</artifactId>
18-
<version>1.0-SNAPSHOT</version>
18+
<version>${project.version}</version>
1919
</dependency>
2020
<dependency>
2121
<groupId>mysql</groupId>

flink-learning-data-sources/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
<dependency>
1616
<groupId>com.zhisheng.flink</groupId>
1717
<artifactId>flink-learning-common</artifactId>
18-
<version>1.0-SNAPSHOT</version>
18+
<version>${project.version}</version>
1919
</dependency>
2020
<dependency>
2121
<groupId>mysql</groupId>

flink-learning-examples/pom.xml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@
1515
<dependency>
1616
<groupId>com.zhisheng.flink</groupId>
1717
<artifactId>flink-learning-common</artifactId>
18-
<version>1.0-SNAPSHOT</version>
18+
<version>${project.version}</version>
1919
</dependency>
2020
<dependency>
2121
<groupId>mysql</groupId>

flink-learning-examples/src/main/resources/application.properties

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,15 @@
11
kafka.brokers=localhost:9092
22
kafka.group.id=zhisheng
33
kafka.zookeeper.connect=localhost:2181
4-
metrics.topic=zhisheng
4+
metrics.topic=zhisheng_metrics
55
stream.parallelism=5
66
stream.checkpoint.interval=1000
77
stream.checkpoint.enable=false
88
#stream.checkpoint.type=memory
9-
stream.checkpoint.type=fs
10-
#stream.checkpoint.type=rocksdb
11-
stream.checkpoint.dir=file:///usr/local/state/
9+
#stream.checkpoint.type=fs
10+
stream.checkpoint.type=rocksdb
11+
#stream.checkpoint.dir=file:///usr/local/state/
12+
stream.checkpoint.dir=/Users/zhisheng/Desktop
1213

1314

1415
#mysql

flink-learning-libraries/flink-learning-libraries-cep/pom.xml

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,12 @@
1212
<artifactId>flink-learning-libraries-cep</artifactId>
1313

1414
<dependencies>
15-
15+
<dependency>
16+
<groupId>org.apache.flink</groupId>
17+
<artifactId>flink-cep_${scala.binary.version}</artifactId>
18+
<version>${flink.version}</version>
19+
<!--<scope>provided</scope>-->
20+
</dependency>
1621
</dependencies>
1722

1823
<build>
@@ -36,7 +41,7 @@
3641

3742
<transformer
3843
implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
39-
<mainClass>com.zhisheng.libraries.cep.Main</mainClass>
44+
<mainClass>com.zhisheng.libraries.cep.CEPMain</mainClass>
4045
</transformer>
4146
<transformer
4247
implementation="org.apache.maven.plugins.shade.resource.AppendingTransformer">
Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
kafka.brokers=localhost:9092
2-
kafka.group.id=metrics-group
2+
kafka.group.id=zhisheng
33
kafka.zookeeper.connect=localhost:2181
4-
metrics.topic=alert-metrics
5-
stream.parallelism=5
6-
stream.checkpoint.interval=1000
4+
metrics.topic=zhisheng_metrics
5+
stream.parallelism=1
76
stream.checkpoint.enable=false

flink-learning-libraries/flink-learning-libraries-machine-learning/README.md

Lines changed: 13 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,4 +2,16 @@
22

33
TODO:
44

5-
因在 Flink 1.9 版本中移除了 flink-ml 模块,所以暂不对这块做讲解,后面升级 Flink 版本时,该模块可以加入更多东西,doing!
5+
因在 Flink 1.9 版本中移除了 flink-ml 模块,所以暂不对这块做讲解,后面升级 Flink 版本时,该模块可以加入更多东西,doing!
6+
7+
//可以根据 1.8 版本的 Flink-ml 进行学习,引入依赖:
8+
9+
```xml
10+
<dependency>
11+
<groupId>org.apache.flink</groupId>
12+
<artifactId>flink-ml_${scala.binary.version}</artifactId>
13+
<version>1.8.0</version>
14+
</dependency>
15+
```
16+
17+
haberman.data 数据集是 '乳腺癌手术患者生存率的研究的病例',数据以逗号分隔的,其中前三列是特征,最后一列是类别(患者存活 5 年或更长时间的是 1,在 5 年内死亡的是 2)。

0 commit comments

Comments
 (0)