Skip to content

Commit

Permalink
Revert "[FLINK-36181] Use Java 17 by default (apache#25898)"
Browse files Browse the repository at this point in the history
This reverts commit 997b483.
  • Loading branch information
JunRuiLee committed Jan 20, 2025
1 parent 72d0148 commit d5472df
Show file tree
Hide file tree
Showing 12 changed files with 68 additions and 100 deletions.
6 changes: 3 additions & 3 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -29,11 +29,11 @@ jobs:
name: "Pre-compile Checks"
uses: ./.github/workflows/template.pre-compile-checks.yml
ci:
name: "Default (Java 17)"
name: "Default (Java 11)"
uses: ./.github/workflows/template.flink-ci.yml
with:
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk17 -Pjava17-target"'
jdk_version: 17
environment: 'PROFILE="-Dinclude_hadoop_aws"'
jdk_version: 11
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
Expand Down
21 changes: 16 additions & 5 deletions .github/workflows/nightly.yml
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,23 @@ jobs:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
java17:
name: "Java 17"
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: java17
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk11 -Djdk17 -Pjava17-target"'
jdk_version: 17
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
s3_secret_key: ${{ secrets.IT_CASE_S3_SECRET_KEY }}
java21:
name: "Java 21"
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: java21
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk21 -Pjava21-target"'
environment: 'PROFILE="-Dinclude_hadoop_aws -Djdk11 -Djdk17 -Djdk21 -Pjava21-target"'
jdk_version: 21
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
Expand All @@ -55,8 +66,8 @@ jobs:
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: hadoop313
environment: 'PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk17 -Pjava17-target"'
jdk_version: 17
environment: 'PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3"'
jdk_version: 11
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
Expand All @@ -66,8 +77,8 @@ jobs:
uses: ./.github/workflows/template.flink-ci.yml
with:
workflow-caller-id: adaptive-scheduler
environment: 'PROFILE="-Penable-adaptive-scheduler -Djdk17 -Pjava17-target"'
jdk_version: 17
environment: 'PROFILE="-Penable-adaptive-scheduler"'
jdk_version: 11
secrets:
s3_bucket: ${{ secrets.IT_CASE_S3_BUCKET }}
s3_access_key: ${{ secrets.IT_CASE_S3_ACCESS_KEY }}
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/template.flink-ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ on:
type: string
jdk_version:
description: "The Java version to use."
default: 17
default: 11
type: number
secrets:
s3_bucket:
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/template.pre-compile-checks.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,15 +23,15 @@ on:
inputs:
jdk_version:
description: "The JDK version that shall be used as a default within the Flink CI Docker container."
default: "17"
default: "11"
type: choice
options: ["11", "17", "21"]

workflow_call:
inputs:
jdk_version:
description: "The JDK version that shall be used as a default within the Flink CI Docker container."
default: 17
default: 11
type: number

permissions: read-all
Expand Down
37 changes: 3 additions & 34 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -104,46 +104,15 @@ Prerequisites for building Flink:
* Unix-like environment (we use Linux, Mac OS X, Cygwin, WSL)
* Git
* Maven (we require version 3.8.6)
* Java (version 11, 17, or 21)

### Basic Build Instructions

First, clone the repository:
* Java 11

```
git clone https://github.com/apache/flink.git
cd flink
./mvnw clean package -DskipTests # this will take up to 10 minutes
```

Then, choose one of the following commands based on your preferred Java version:

**For Java 11**

```
./mvnw clean package -DskipTests -Djdk11 -Pjava11-target
```

**For Java 17 (Default)**

```
./mvnw clean package -DskipTests -Djdk17 -Pjava17-target
```

**For Java 21**

```
./mvnw clean package -DskipTests -Djdk21 -Pjava21-target
```

The build process will take approximately 10 minutes to complete.
Flink will be installed in `build-target`.

### Notes

* Make sure your JAVA_HOME environment variable points to the correct JDK version
* The build command uses Maven wrapper (mvnw) which ensures the correct Maven version is used
* The -DskipTests flag skips running tests to speed up the build process
* Each Java version requires its corresponding profile (-Pjava<version>-target) and JDK flag (-Djdk<version>)
Flink is now installed in `build-target`.

## Developing Flink

Expand Down
4 changes: 2 additions & 2 deletions azure-pipelines.yml
Original file line number Diff line number Diff line change
Expand Up @@ -76,10 +76,10 @@ stages:
vmImage: 'ubuntu-22.04'
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2"
run_end_to_end: false
container: flink-build-container
jdk: 17
jdk: 11
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-22.04'
Expand Down
8 changes: 4 additions & 4 deletions flink-dist-scala/src/main/resources/META-INF/NOTICE
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,7 @@ This project bundles the following dependencies under the Apache Software Licens

The following dependencies all share the same BSD license which you find under licenses/LICENSE.scala.

- org.scala-lang:scala-compiler:2.12.20
- org.scala-lang:scala-library:2.12.20
- org.scala-lang:scala-reflect:2.12.20
- org.scala-lang.modules:scala-xml_2.12:2.3.0
- org.scala-lang:scala-compiler:2.12.7
- org.scala-lang:scala-library:2.12.7
- org.scala-lang:scala-reflect:2.12.7
- org.scala-lang.modules:scala-xml_2.12:1.0.6
4 changes: 2 additions & 2 deletions flink-end-to-end-tests/test-scripts/common_docker.sh
Original file line number Diff line number Diff line change
Expand Up @@ -46,10 +46,10 @@ function build_image() {
start_file_server
local server_pid=$!

echo "Preparing Dockerfiles"
echo "Preparing Dockeriles"
retry_times_with_exponential_backoff 5 git clone https://github.com/apache/flink-docker.git --branch dev-master --single-branch

local java_version=17
local java_version=11
if [[ ${PROFILE} == *"jdk17"* ]]; then
java_version=17
fi
Expand Down
2 changes: 2 additions & 0 deletions flink-rpc/flink-rpc-akka/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,8 @@ under the License.

<properties>
<pekko.version>1.1.2</pekko.version>
<scala.binary.version>2.12</scala.binary.version>
<scala.version>2.12.16</scala.version>
</properties>

<dependencies>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,6 @@ The Apache Software Foundation (http://www.apache.org/).

The following dependencies all share the same BSD license which you find under licenses/LICENSE.scala.

- org.scala-lang:scala-compiler:2.12.20
- org.scala-lang:scala-library:2.12.20
- org.scala-lang:scala-reflect:2.12.20
- org.scala-lang:scala-compiler:2.12.7
- org.scala-lang:scala-library:2.12.7
- org.scala-lang:scala-reflect:2.12.7
44 changes: 15 additions & 29 deletions pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -124,8 +124,7 @@ under the License.
<flink.shaded.jackson.version>2.15.3</flink.shaded.jackson.version>
<flink.shaded.jsonpath.version>2.7.0</flink.shaded.jsonpath.version>
<flink.markBundledAsOptional>true</flink.markBundledAsOptional>
<source.java.version>11</source.java.version>
<target.java.version>17</target.java.version>
<target.java.version>11</target.java.version>
<slf4j.version>1.7.36</slf4j.version>
<log4j.version>2.24.1</log4j.version>
<!-- Overwrite default values from parent pom.
Expand All @@ -134,7 +133,9 @@ under the License.
<maven.compiler.source>${target.java.version}</maven.compiler.source>
<maven.compiler.target>${target.java.version}</maven.compiler.target>
<scala.macros.version>2.1.1</scala.macros.version>
<scala.version>2.12.20</scala.version>
<!-- Default scala versions, must be overwritten by build profiles, so we set something
invalid here -->
<scala.version>2.12.7</scala.version>
<scala.binary.version>2.12</scala.binary.version>
<chill.version>0.7.6</chill.version>
<!-- keep FlinkTestcontainersConfigurator.configureZookeeperContainer in sync -->
Expand Down Expand Up @@ -943,7 +944,7 @@ under the License.
<profile>
<id>scala-2.12</id>
<properties>
<scala.version>2.12.20</scala.version>
<scala.version>2.12.7</scala.version>
<scala.binary.version>2.12</scala.binary.version>
</properties>
<activation>
Expand Down Expand Up @@ -1063,34 +1064,15 @@ under the License.
</build>
</profile>

<profile>
<id>java11-target</id>

<properties>
<target.java.version>11</target.java.version>
</properties>

<build>
<plugins>
<plugin>
<groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-compiler-plugin</artifactId>
<configuration>
<source>11</source>
<target>11</target>
</configuration>
</plugin>
</plugins>
</build>
</profile>

<profile>
<id>java17</id>
<activation>
<jdk>[17,)</jdk>
</activation>

<properties>
<!-- Bump Scala because 2.12.7 doesn't compile on Java 17. -->
<scala.version>2.12.15</scala.version>
<surefire.excludedGroups.jdk>org.apache.flink.testutils.junit.FailsOnJava17</surefire.excludedGroups.jdk>
</properties>

Expand Down Expand Up @@ -1146,6 +1128,11 @@ under the License.
<jdk>[21,)</jdk>
</activation>

<properties>
<!-- Bump Scala because before 2.12.18 doesn't compile on Java 21. -->
<scala.version>2.12.18</scala.version>
</properties>

<build>
<pluginManagement>
<plugins>
Expand Down Expand Up @@ -1399,7 +1386,7 @@ under the License.
</property>
</activation>
<properties>
<target.java.version>17</target.java.version>
<target.java.version>11</target.java.version>
</properties>
<build>
<plugins>
Expand Down Expand Up @@ -1431,7 +1418,7 @@ under the License.
<!-- versions for certain build tools are enforced to match the CI setup -->
<!-- the rules below should stay in sync with Flink Release wiki documentation and the CI scripts -->
<requireJavaVersion>
<version>[${target.java.version}.*)</version>
<version>[11.0.0,11.1.0)</version>
</requireJavaVersion>
</rules>
</configuration>
Expand Down Expand Up @@ -2088,8 +2075,7 @@ under the License.
<artifactId>maven-compiler-plugin</artifactId>
<version>3.8.0</version>
<configuration>
<!-- Make sure that we only use Java 11 compatible APIs -->
<source>${source.java.version}</source>
<source>${target.java.version}</source>
<target>${target.java.version}</target>
<!-- The semantics of this option are reversed, see MCOMPILER-209. -->
<useIncrementalCompilation>false</useIncrementalCompilation>
Expand Down
30 changes: 15 additions & 15 deletions tools/azure-pipelines/build-apache-repo.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,10 +69,10 @@ stages:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target"
run_end_to_end: false
container: flink-build-container
jdk: 17
jdk: 11
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-22.04'
Expand Down Expand Up @@ -103,50 +103,50 @@ stages:
- template: build-nightly-dist.yml
parameters:
stage_name: cron_snapshot_deployment
environment: PROFILE="-Djdk17 -Pjava17-target"
environment: PROFILE="-Djdk11 -Pjava11-target"
container: flink-build-container
jdk: 17
jdk: 11
- template: jobs-template.yml
parameters:
stage_name: cron_azure
test_pool_definition:
vmImage: 'ubuntu-22.04'
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target"
run_end_to_end: true
container: flink-build-container
jdk: 17
jdk: 11
- template: jobs-template.yml
parameters:
stage_name: cron_hadoop313
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk17 -Pjava17-target"
environment: PROFILE="-Dflink.hadoop.version=3.2.3 -Phadoop3-tests,hive3 -Djdk11 -Pjava11-target"
run_end_to_end: true
container: flink-build-container
jdk: 17
jdk: 11
- template: jobs-template.yml
parameters:
stage_name: cron_jdk11
stage_name: cron_jdk17
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Pjava11-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Djdk17 -Pjava17-target"
run_end_to_end: true
container: flink-build-container
jdk: 11
jdk: 17
- template: jobs-template.yml
parameters:
stage_name: cron_jdk21
test_pool_definition:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk21 -Pjava21-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk11 -Djdk17 -Djdk21 -Pjava21-target"
run_end_to_end: true
container: flink-build-container
jdk: 21
Expand All @@ -157,10 +157,10 @@ stages:
name: Default
e2e_pool_definition:
vmImage: 'ubuntu-22.04'
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Penable-adaptive-scheduler -Djdk17 -Pjava17-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Penable-adaptive-scheduler -Djdk11 -Pjava11-target"
run_end_to_end: true
container: flink-build-container
jdk: 17
jdk: 11
- job: docs_404_check # run on a MSFT provided machine
pool:
vmImage: 'ubuntu-22.04'
Expand All @@ -172,5 +172,5 @@ stages:
- template: build-python-wheels.yml
parameters:
stage_name: cron_python_wheels
environment: PROFILE="-Dflink.hadoop.version=2.10.2 -Djdk17 -Pjava17-target"
environment: PROFILE="-Dflink.hadoop.version=2.10.2"
container: flink-build-container

0 comments on commit d5472df

Please sign in to comment.