Skip to content

Commit 1859408

Browse files
committed
wip: some patchfiles for hadoop 3.4.1
1 parent 3c6b91b commit 1859408

32 files changed

+1718
-0
lines changed
Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,35 @@
1+
diff --git a/pom.xml b/pom.xml
2+
index 16a3733..c309dc6 100644
3+
--- a/pom.xml
4+
+++ b/pom.xml
5+
@@ -118,7 +118,7 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x
6+
<spotbugs-maven-plugin.version>4.2.0</spotbugs-maven-plugin.version>
7+
<jsonschema2pojo-maven-plugin.version>1.1.1</jsonschema2pojo-maven-plugin.version>
8+
<maven-compiler-plugin.version>3.10.1</maven-compiler-plugin.version>
9+
- <cyclonedx.version>2.7.10</cyclonedx.version>
10+
+ <cyclonedx.version>2.8.0</cyclonedx.version>
11+
12+
<shell-executable>bash</shell-executable>
13+
14+
@@ -770,6 +770,11 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x
15+
<groupId>org.cyclonedx</groupId>
16+
<artifactId>cyclonedx-maven-plugin</artifactId>
17+
<version>${cyclonedx.version}</version>
18+
+ <configuration>
19+
+ <projectType>application</projectType>
20+
+ <schemaVersion>1.5</schemaVersion>
21+
+ <skipNotDeployed>false</skipNotDeployed>
22+
+ </configuration>
23+
<executions>
24+
<execution>
25+
<phase>package</phase>
26+
@@ -778,9 +783,6 @@ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 https://maven.apache.org/x
27+
</goals>
28+
</execution>
29+
</executions>
30+
- <configuration>
31+
- <outputFormat>xml</outputFormat>
32+
- </configuration>
33+
</plugin>
34+
</plugins>
35+
</build>
Lines changed: 213 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,213 @@
1+
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
2+
index b2d2a8d100..97e281ba85 100644
3+
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
4+
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/serializer/avro/TestAvroSerialization.java
5+
@@ -33,7 +33,7 @@ public class TestAvroSerialization {
6+
@Test
7+
public void testSpecific() throws Exception {
8+
AvroRecord before = new AvroRecord();
9+
- before.intField = 5;
10+
+ before.setIntField(5);
11+
AvroRecord after = SerializationTestUtil.testSerialization(conf, before);
12+
assertEquals(before, after);
13+
}
14+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java
15+
index 66f3781239..aa70bbff62 100644
16+
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java
17+
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobQueueChangeEvent.java
18+
@@ -31,8 +31,8 @@ public class JobQueueChangeEvent implements HistoryEvent {
19+
private JobQueueChange datum = new JobQueueChange();
20+
21+
public JobQueueChangeEvent(JobID id, String queueName) {
22+
- datum.jobid = new Utf8(id.toString());
23+
- datum.jobQueueName = new Utf8(queueName);
24+
+ datum.setJobid(new Utf8(id.toString()));
25+
+ datum.setJobQueueName(new Utf8(queueName));
26+
}
27+
28+
JobQueueChangeEvent() { }
29+
@@ -54,13 +54,13 @@ public class JobQueueChangeEvent implements HistoryEvent {
30+
31+
/** Get the Job ID */
32+
public JobID getJobId() {
33+
- return JobID.forName(datum.jobid.toString());
34+
+ return JobID.forName(datum.getJobid().toString());
35+
}
36+
37+
/** Get the new Job queue name */
38+
public String getJobQueueName() {
39+
- if (datum.jobQueueName != null) {
40+
- return datum.jobQueueName.toString();
41+
+ if (datum.getJobQueueName() != null) {
42+
+ return datum.getJobQueueName().toString();
43+
}
44+
return null;
45+
}
46+
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
47+
index bcba56eced..2d52e3ebb1 100644
48+
--- a/hadoop-project/pom.xml
49+
+++ b/hadoop-project/pom.xml
50+
@@ -37,7 +37,7 @@
51+
<!--Whether to proceed to next module if any test failures exist-->
52+
<maven.test.failure.ignore>true</maven.test.failure.ignore>
53+
<maven.test.redirectTestOutputToFile>true</maven.test.redirectTestOutputToFile>
54+
- <jetty.version>9.4.53.v20231009</jetty.version>
55+
+ <jetty.version>9.4.56.v20240826</jetty.version>
56+
<test.exclude>_</test.exclude>
57+
<test.exclude.pattern>_</test.exclude.pattern>
58+
59+
@@ -63,7 +63,7 @@
60+
<java.security.egd>file:///dev/urandom</java.security.egd>
61+
62+
<!-- avro version -->
63+
- <avro.version>1.9.2</avro.version>
64+
+ <avro.version>1.11.4</avro.version>
65+
66+
<!-- jersey version -->
67+
<jersey.version>1.19.4</jersey.version>
68+
@@ -108,7 +108,7 @@
69+
<findbugs.version>3.0.5</findbugs.version>
70+
<dnsjava.version>3.6.1</dnsjava.version>
71+
72+
- <guava.version>27.0-jre</guava.version>
73+
+ <guava.version>32.0.1-jre</guava.version>
74+
<guice.version>4.2.3</guice.version>
75+
76+
<bouncycastle.version>1.78.1</bouncycastle.version>
77+
@@ -143,7 +143,7 @@
78+
<jna.version>5.2.0</jna.version>
79+
<gson.version>2.9.0</gson.version>
80+
<metrics.version>3.2.4</metrics.version>
81+
- <netty4.version>4.1.100.Final</netty4.version>
82+
+ <netty4.version>4.1.108.Final</netty4.version>
83+
<snappy-java.version>1.1.10.4</snappy-java.version>
84+
<lz4-java.version>1.7.1</lz4-java.version>
85+
86+
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
87+
index 1213e6a46f..603b248f6e 100644
88+
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
89+
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobBuilder.java
90+
@@ -460,7 +460,7 @@ public class JobBuilder {
91+
}
92+
task.setFinishTime(event.getFinishTime());
93+
task.setTaskStatus(getPre21Value(event.getTaskStatus()));
94+
- task.incorporateCounters(((TaskFinished) event.getDatum()).counters);
95+
+ task.incorporateCounters(((TaskFinished) event.getDatum()).getCounters());
96+
}
97+
98+
private void processTaskFailedEvent(TaskFailedEvent event) {
99+
@@ -472,7 +472,7 @@ public class JobBuilder {
100+
task.setFinishTime(event.getFinishTime());
101+
task.setTaskStatus(getPre21Value(event.getTaskStatus()));
102+
TaskFailed t = (TaskFailed)(event.getDatum());
103+
- task.putDiagnosticInfo(t.error.toString());
104+
+ task.putDiagnosticInfo(t.getError().toString());
105+
// killed task wouldn't have failed attempt.
106+
if (t.getFailedDueToAttempt() != null) {
107+
task.putFailedDueToAttemptId(t.getFailedDueToAttempt().toString());
108+
@@ -542,7 +542,7 @@ public class JobBuilder {
109+
}
110+
attempt.setFinishTime(event.getFinishTime());
111+
attempt
112+
- .incorporateCounters(((TaskAttemptFinished) event.getDatum()).counters);
113+
+ .incorporateCounters(((TaskAttemptFinished) event.getDatum()).getCounters());
114+
}
115+
116+
private void processReduceAttemptFinishedEvent(
117+
@@ -568,7 +568,7 @@ public class JobBuilder {
118+
attempt.setShuffleFinished(event.getShuffleFinishTime());
119+
attempt.setSortFinished(event.getSortFinishTime());
120+
attempt
121+
- .incorporateCounters(((ReduceAttemptFinished) event.getDatum()).counters);
122+
+ .incorporateCounters(((ReduceAttemptFinished) event.getDatum()).getCounters());
123+
attempt.arraySetClockSplits(event.getClockSplits());
124+
attempt.arraySetCpuUsages(event.getCpuUsages());
125+
attempt.arraySetVMemKbytes(event.getVMemKbytes());
126+
@@ -596,7 +596,7 @@ public class JobBuilder {
127+
// is redundant, but making this will add future-proofing.
128+
attempt.setFinishTime(event.getFinishTime());
129+
attempt
130+
- .incorporateCounters(((MapAttemptFinished) event.getDatum()).counters);
131+
+ .incorporateCounters(((MapAttemptFinished) event.getDatum()).getCounters());
132+
attempt.arraySetClockSplits(event.getClockSplits());
133+
attempt.arraySetCpuUsages(event.getCpuUsages());
134+
attempt.arraySetVMemKbytes(event.getVMemKbytes());
135+
@@ -661,11 +661,11 @@ public class JobBuilder {
136+
137+
JobFinished job = (JobFinished)event.getDatum();
138+
Map<String, Long> countersMap =
139+
- JobHistoryUtils.extractCounters(job.totalCounters);
140+
+ JobHistoryUtils.extractCounters(job.getTotalCounters());
141+
result.putTotalCounters(countersMap);
142+
- countersMap = JobHistoryUtils.extractCounters(job.mapCounters);
143+
+ countersMap = JobHistoryUtils.extractCounters(job.getMapCounters());
144+
result.putMapCounters(countersMap);
145+
- countersMap = JobHistoryUtils.extractCounters(job.reduceCounters);
146+
+ countersMap = JobHistoryUtils.extractCounters(job.getReduceCounters());
147+
result.putReduceCounters(countersMap);
148+
}
149+
150+
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java
151+
index 6ae87bbd40..34ef95f337 100644
152+
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java
153+
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobHistoryUtils.java
154+
@@ -157,9 +157,9 @@ public class JobHistoryUtils {
155+
static Map<String, Long> extractCounters(JhCounters counters) {
156+
Map<String, Long> countersMap = new HashMap<String, Long>();
157+
if (counters != null) {
158+
- for (JhCounterGroup group : counters.groups) {
159+
- for (JhCounter counter : group.counts) {
160+
- countersMap.put(counter.name.toString(), counter.value);
161+
+ for (JhCounterGroup group : counters.getGroups()) {
162+
+ for (JhCounter counter : group.getCounts()) {
163+
+ countersMap.put(counter.getName().toString(), counter.getValue());
164+
}
165+
}
166+
}
167+
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
168+
index 4ae33a7661..2308e58690 100644
169+
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
170+
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTask.java
171+
@@ -268,11 +268,11 @@ public class LoggedTask implements DeepCompare {
172+
String counterName) {
173+
counterName = canonicalizeCounterName(counterName);
174+
175+
- for (JhCounterGroup group : counters.groups) {
176+
- for (JhCounter counter : group.counts) {
177+
+ for (JhCounterGroup group : counters.getGroups()) {
178+
+ for (JhCounter counter : group.getCounts()) {
179+
if (counterName
180+
- .equals(canonicalizeCounterName(counter.name.toString()))) {
181+
- thunk.set(counter.value);
182+
+ .equals(canonicalizeCounterName(counter.getName().toString()))) {
183+
+ thunk.set(counter.getValue());
184+
return;
185+
}
186+
}
187+
diff --git a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
188+
index 5c6abd372c..fae53b2926 100644
189+
--- a/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
190+
+++ b/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
191+
@@ -636,11 +636,11 @@ public class LoggedTaskAttempt implements DeepCompare {
192+
String counterName) {
193+
counterName = canonicalizeCounterName(counterName);
194+
195+
- for (JhCounterGroup group : counters.groups) {
196+
- for (JhCounter counter : group.counts) {
197+
+ for (JhCounterGroup group : counters.getGroups()) {
198+
+ for (JhCounter counter : group.getCounts()) {
199+
if (counterName
200+
- .equals(canonicalizeCounterName(counter.name.toString()))) {
201+
- thunk.set(counter.value);
202+
+ .equals(canonicalizeCounterName(counter.getName().toString()))) {
203+
+ thunk.set(counter.getValue());
204+
return;
205+
}
206+
}
207+
@@ -769,4 +769,4 @@ public class LoggedTaskAttempt implements DeepCompare {
208+
compare1(vMemKbytes, other.vMemKbytes, loc, "vMemKbytes");
209+
compare1(physMemKbytes, other.physMemKbytes, loc, "physMemKbytes");
210+
}
211+
-}
212+
\ No newline at end of file
213+
+}
Lines changed: 89 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,89 @@
1+
diff --git a/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml b/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
2+
index c01d9c42820..ee9e9040ec8 100644
3+
--- a/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
4+
+++ b/hadoop-assemblies/src/main/resources/assemblies/hadoop-tools.xml
5+
@@ -23,14 +23,6 @@
6+
</formats>
7+
<includeBaseDirectory>false</includeBaseDirectory>
8+
<fileSets>
9+
- <fileSet>
10+
- <directory>../hadoop-archive-logs/src/main/shellprofile.d</directory>
11+
- <includes>
12+
- <include>*</include>
13+
- </includes>
14+
- <outputDirectory>/libexec/shellprofile.d</outputDirectory>
15+
- <fileMode>0755</fileMode>
16+
- </fileSet>
17+
<fileSet>
18+
<directory>../hadoop-archives/src/main/shellprofile.d</directory>
19+
<includes>
20+
@@ -98,13 +90,6 @@
21+
<include>*-sources.jar</include>
22+
</includes>
23+
</fileSet>
24+
- <fileSet>
25+
- <directory>../hadoop-archive-logs/target</directory>
26+
- <outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
27+
- <includes>
28+
- <include>*-sources.jar</include>
29+
- </includes>
30+
- </fileSet>
31+
<fileSet>
32+
<directory>../hadoop-datajoin/target</directory>
33+
<outputDirectory>/share/hadoop/${hadoop.component}/sources</outputDirectory>
34+
diff --git a/hadoop-project/pom.xml b/hadoop-project/pom.xml
35+
index 2d52e3ebb12..84a618f8ab6 100644
36+
--- a/hadoop-project/pom.xml
37+
+++ b/hadoop-project/pom.xml
38+
@@ -646,11 +646,6 @@
39+
<artifactId>hadoop-archives</artifactId>
40+
<version>${hadoop.version}</version>
41+
</dependency>
42+
- <dependency>
43+
- <groupId>org.apache.hadoop</groupId>
44+
- <artifactId>hadoop-archive-logs</artifactId>
45+
- <version>${hadoop.version}</version>
46+
- </dependency>
47+
<dependency>
48+
<groupId>org.apache.hadoop</groupId>
49+
<artifactId>hadoop-distcp</artifactId>
50+
diff --git a/hadoop-project/src/site/site.xml b/hadoop-project/src/site/site.xml
51+
index 8e85f379ef7..014df29226d 100644
52+
--- a/hadoop-project/src/site/site.xml
53+
+++ b/hadoop-project/src/site/site.xml
54+
@@ -192,7 +192,6 @@
55+
<menu name="Tools" inherit="top">
56+
<item name="Hadoop Streaming" href="hadoop-streaming/HadoopStreaming.html"/>
57+
<item name="Hadoop Archives" href="hadoop-archives/HadoopArchives.html"/>
58+
- <item name="Hadoop Archive Logs" href="hadoop-archive-logs/HadoopArchiveLogs.html"/>
59+
<item name="DistCp" href="hadoop-distcp/DistCp.html"/>
60+
<item name="HDFS Federation Balance" href="hadoop-federation-balance/HDFSFederationBalance.html"/>
61+
<item name="GridMix" href="hadoop-gridmix/GridMix.html"/>
62+
diff --git a/hadoop-tools/hadoop-tools-dist/pom.xml b/hadoop-tools/hadoop-tools-dist/pom.xml
63+
index afd7d33b494..68a5b04a0b2 100644
64+
--- a/hadoop-tools/hadoop-tools-dist/pom.xml
65+
+++ b/hadoop-tools/hadoop-tools-dist/pom.xml
66+
@@ -60,11 +60,6 @@
67+
<artifactId>hadoop-archives</artifactId>
68+
<scope>compile</scope>
69+
</dependency>
70+
- <dependency>
71+
- <groupId>org.apache.hadoop</groupId>
72+
- <artifactId>hadoop-archive-logs</artifactId>
73+
- <scope>compile</scope>
74+
- </dependency>
75+
<dependency>
76+
<groupId>org.apache.hadoop</groupId>
77+
<artifactId>hadoop-rumen</artifactId>
78+
diff --git a/hadoop-tools/pom.xml b/hadoop-tools/pom.xml
79+
index 1818c65e313..64927f70b6e 100644
80+
--- a/hadoop-tools/pom.xml
81+
+++ b/hadoop-tools/pom.xml
82+
@@ -35,7 +35,6 @@
83+
<module>hadoop-federation-balance</module>
84+
<module>hadoop-dynamometer</module>
85+
<module>hadoop-archives</module>
86+
- <module>hadoop-archive-logs</module>
87+
<module>hadoop-rumen</module>
88+
<module>hadoop-gridmix</module>
89+
<module>hadoop-datajoin</module>

0 commit comments

Comments
 (0)