Skip to content

Commit 5c3ca9c

Browse files
committed
add: image process
1 parent 4537147 commit 5c3ca9c

File tree

7 files changed

+195
-10
lines changed

7 files changed

+195
-10
lines changed

spark-best-practice/pom.xml

Lines changed: 11 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
<modules>
1212
<module>simple-demo</module>
1313
<module>spark-phoenix</module>
14+
<module>spark-image-process</module>
1415
</modules>
1516

1617
<parent>
@@ -36,12 +37,6 @@
3637
<version>2.8</version>
3738
</dependency>
3839

39-
<dependency>
40-
<groupId>io.delta</groupId>
41-
<artifactId>delta-core_2.11</artifactId>
42-
<version>0.2.0</version>
43-
</dependency>
44-
4540
<dependency>
4641
<groupId>org.apache.logging.log4j</groupId>
4742
<artifactId>log4j-core</artifactId>
@@ -75,26 +70,32 @@
7570
<dependency>
7671
<groupId>org.apache.spark</groupId>
7772
<artifactId>spark-sql_2.12</artifactId>
78-
<version>3.2.1</version>
73+
<version>3.1.2</version>
7974
</dependency>
8075

76+
<dependency>
77+
<groupId>org.apache.spark</groupId>
78+
<artifactId>spark-mllib_2.12</artifactId>
79+
<version>3.1.2</version>
80+
</dependency>
81+
8182
<dependency>
8283
<groupId>org.apache.spark</groupId>
8384
<artifactId>spark-core_2.12</artifactId>
84-
<version>3.2.1</version>
85+
<version>3.1.2</version>
8586
</dependency>
8687

8788

8889
<dependency>
8990
<groupId>org.apache.spark</groupId>
9091
<artifactId>spark-avro_2.12</artifactId>
91-
<version>3.2.1</version>
92+
<version>3.1.2</version>
9293
</dependency>
9394

9495
<dependency>
9596
<groupId>org.apache.spark</groupId>
9697
<artifactId>spark-sql-kafka-0-10_2.12</artifactId>
97-
<version>3.2.1</version>
98+
<version>3.1.2</version>
9899
</dependency>
99100

100101
<dependency>

spark-best-practice/simple-demo/src/main/java/com/wxmimperio/spark/SparkSessionShow.java

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,11 @@ public static void main(String[] args) {
2424
conf.setMaster("local");
2525
try (JavaSparkContext sc = new JavaSparkContext(conf);
2626
SparkSession sparkSession = SparkSession.builder().sparkContext(sc.sc()).getOrCreate()) {
27+
28+
sparkSession.read().format("image")
29+
.load("E:\\coding\\github\\hadoop-code-snippets\\spark-best-practice\\spark-image-process\\src\\main\\resources\\gorilla_PNG18712.png")
30+
.printSchema();
31+
2732
List<String> wordList = new ArrayList<>();
2833
wordList.add("While this code used the built-in support for accumulators of type Long, programmers can also create their own types by subclassing");
2934
wordList.add("While this code used the built-in support for accumulators of type Long, programmers can also create their own types by subclassing");
Lines changed: 94 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,94 @@
1+
<?xml version="1.0" encoding="UTF-8"?>
2+
3+
<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
4+
xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
5+
<parent>
6+
<artifactId>spark-best-practice</artifactId>
7+
<groupId>com.wxmimperio.spark</groupId>
8+
<version>1.0-SNAPSHOT</version>
9+
</parent>
10+
<modelVersion>4.0.0</modelVersion>
11+
12+
<artifactId>spark-image-process</artifactId>
13+
14+
<name>spark-image-process</name>
15+
<url>http://www.example.com</url>
16+
17+
<properties>
18+
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
19+
<maven.compiler.source>1.8</maven.compiler.source>
20+
<maven.compiler.target>1.8</maven.compiler.target>
21+
<spark.version>2.12.15</spark.version>
22+
<scala.version>2.12.15</scala.version>
23+
</properties>
24+
25+
<dependencies>
26+
<dependency>
27+
<groupId>junit</groupId>
28+
<artifactId>junit</artifactId>
29+
<version>4.12</version>
30+
<scope>test</scope>
31+
</dependency>
32+
33+
<dependency>
34+
<groupId>org.apache.spark</groupId>
35+
<artifactId>spark-sql_2.12</artifactId>
36+
<version>3.1.2</version>
37+
</dependency>
38+
39+
<dependency>
40+
<groupId>org.apache.spark</groupId>
41+
<artifactId>spark-core_2.12</artifactId>
42+
<version>3.1.2</version>
43+
</dependency>
44+
</dependencies>
45+
46+
<dependencyManagement>
47+
<dependencies>
48+
<dependency>
49+
<groupId>org.scala-lang</groupId>
50+
<artifactId>scala-compiler</artifactId>
51+
<version>${scala.version}</version>
52+
</dependency>
53+
<dependency>
54+
<groupId>org.scala-lang</groupId>
55+
<artifactId>scala-reflect</artifactId>
56+
<version>${scala.version}</version>
57+
</dependency>
58+
<dependency>
59+
<groupId>org.scala-lang</groupId>
60+
<artifactId>scala-library</artifactId>
61+
<version>${scala.version}</version>
62+
</dependency>
63+
</dependencies>
64+
</dependencyManagement>
65+
66+
<build>
67+
<plugins>
68+
<plugin>
69+
<groupId>org.scala-tools</groupId>
70+
<artifactId>maven-scala-plugin</artifactId>
71+
<version>2.15.2</version>
72+
<executions>
73+
<execution>
74+
<id>scala-compile-first</id>
75+
<goals>
76+
<goal>compile</goal>
77+
</goals>
78+
<configuration>
79+
<includes>
80+
<include>**/*.scala</include>
81+
</includes>
82+
</configuration>
83+
</execution>
84+
<execution>
85+
<id>scala-test-compile</id>
86+
<goals>
87+
<goal>testCompile</goal>
88+
</goals>
89+
</execution>
90+
</executions>
91+
</plugin>
92+
</plugins>
93+
</build>
94+
</project>
Loading
Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
# Rules reminder:
2+
# DEBUG < INFO < WARN < ERROR < FATAL
3+
4+
# Global logging configuration
5+
log4j.rootLogger = info,stdout,D,E
6+
7+
## Console output...
8+
log4j.appender.stdout=org.apache.log4j.ConsoleAppender
9+
log4j.appender.stdout.Target=System.out
10+
log4j.appender.stdout.layout=org.apache.log4j.PatternLayout
11+
log4j.appender.stdout.layout.ConversionPattern=%t %d{ABSOLUTE} %5p %c{1}:%L - %m%n
12+
13+
14+
### 输出INFO 级别以上的日志到
15+
log4j.appender.D = org.apache.log4j.DailyRollingFileAppender
16+
log4j.appender.D.File = ./logs/info.log
17+
log4j.appender.D.Append = true
18+
log4j.appender.D.Threshold = INFO
19+
log4j.appender.D.datePattern = '_'yyyy-MM-dd
20+
log4j.appender.D.layout = org.apache.log4j.PatternLayout
21+
log4j.appender.D.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
22+
23+
### 输出ERROR 级别以上的日志到
24+
log4j.appender.E = org.apache.log4j.DailyRollingFileAppender
25+
log4j.appender.E.File = ./logs/error.log
26+
log4j.appender.E.Append = true
27+
log4j.appender.E.Threshold = ERROR
28+
log4j.appender.E.datePattern = '_'yyyy-MM-dd
29+
log4j.appender.E.layout = org.apache.log4j.PatternLayout
30+
log4j.appender.E.layout.ConversionPattern = %-d{yyyy-MM-dd HH:mm:ss} [ %t:%r ] - [ %p ] %m%n
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
package com.wxmimperio.spark
2+
3+
import org.apache.spark.SparkConf
4+
import org.apache.spark.sql.SparkSession
5+
6+
object SimpleDemo {
7+
8+
def main(args: Array[String]): Unit = {
9+
val conf = new SparkConf().setMaster("local[*]")
10+
val spark = SparkSession.builder()
11+
.config(conf)
12+
.getOrCreate()
13+
14+
val imageDF = spark.read
15+
.format("image")
16+
.load("E:\\coding\\github\\hadoop-code-snippets\\spark-best-practice\\spark-image-process\\src\\main\\resources\\gorilla_PNG18712.png")
17+
18+
imageDF.printSchema()
19+
20+
val row = imageDF.select(
21+
"image.origin",
22+
"image.width",
23+
"image.height",
24+
"image.nChannels",
25+
"image.mode",
26+
"image.data"
27+
)
28+
29+
row.foreach(row => {
30+
val data = row.getAs[Array[Byte]]("data")
31+
println(data)
32+
})
33+
34+
}
35+
}
Lines changed: 20 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
package com.wxmimperio.spark;
2+
3+
import static org.junit.Assert.assertTrue;
4+
5+
import org.junit.Test;
6+
7+
/**
8+
* Unit test for simple App.
9+
*/
10+
public class AppTest
11+
{
12+
/**
13+
* Rigorous Test :-)
14+
*/
15+
@Test
16+
public void shouldAnswerWithTrue()
17+
{
18+
assertTrue( true );
19+
}
20+
}

0 commit comments

Comments
 (0)