[build] Bump Flink version to 1.18.0 (#2463)

This closes #2670.
pull/2699/head
Maciej Bryński 1 year ago committed by GitHub
parent 07818221d2
commit 1e6b983bdd
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -73,8 +73,8 @@ This command automatically starts all the containers defined in the Docker Compo
We can also visit [http://localhost:5601/](http://localhost:5601/) to see if Kibana is running normally.
### Preparing Flink and JAR package required
1. Download [Flink 1.17.0](https://archive.apache.org/dist/flink/flink-1.17.0/flink-1.17.0-bin-scala_2.12.tgz) and unzip it to the directory `flink-1.17.0`
2. Download following JAR package required and put them under `flink-1.17.0/lib/`:
1. Download [Flink 1.18.0](https://archive.apache.org/dist/flink/flink-1.18.0/flink-1.18.0-bin-scala_2.12.tgz) and unzip it to the directory `flink-1.18.0`
2. Download following JAR package required and put them under `flink-1.18.0/lib/`:
**Download links are available only for stable releases, SNAPSHOT dependencies need to be built based on master or release- branches by yourself.**
- [flink-sql-connector-elasticsearch7-3.0.1-1.17.jar](https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-elasticsearch7/3.0.1-1.17/flink-sql-connector-elasticsearch7-3.0.1-1.17.jar)
@ -151,7 +151,7 @@ We can also visit [http://localhost:5601/](http://localhost:5601/) to see if Kib
1. Use the following command to change to the Flink directory:
```
cd flink-1.17.0
cd flink-1.18.0
```
2. Use the following command to start a Flink cluster:
@ -311,7 +311,7 @@ After finishing the tutorial, run the following command to stop all containers i
```shell
docker-compose down
```
Run the following command to stop the Flink cluster in the directory of Flink `flink-1.17.0`:
Run the following command to stop the Flink cluster in the directory of Flink `flink-1.18.0`:
```shell
./bin/stop-cluster.sh
```

@ -63,8 +63,8 @@ This command automatically starts all the containers defined in the Docker Compo
We can also visit [http://localhost:5601/](http://localhost:5601/) to see if Kibana is running normally.
### Preparing Flink and JAR package required
1. Download [Flink 1.17.0](https://archive.apache.org/dist/flink/flink-1.17.0/flink-1.17.0-bin-scala_2.12.tgz) and unzip it to the directory `flink-1.17.0`
2. Download following JAR package required and put them under `flink-1.17.0/lib/`:
1. Download [Flink 1.18.0](https://archive.apache.org/dist/flink/flink-1.18.0/flink-1.18.0-bin-scala_2.12.tgz) and unzip it to the directory `flink-1.18.0`
2. Download following JAR package required and put them under `flink-1.18.0/lib/`:
**Download links are available only for stable releases, SNAPSHOT dependencies need to be built based on master or release- branches by yourself.**
- flink-sql-connector-mysql-cdc-2.5-SNAPSHOT.jar
@ -116,7 +116,7 @@ We can also visit [http://localhost:5601/](http://localhost:5601/) to see if Kib
1. Use the following command to change to the Flink directory:
```
cd flink-1.17.0
cd flink-1.18.0
```
2. Use the following command to start a Flink cluster:
@ -255,7 +255,7 @@ After finishing the tutorial, run the following command to stop all containers i
```shell
docker-compose down
```
Run the following command to stop the Flink cluster in the directory of Flink `flink-1.17.0`:
Run the following command to stop the Flink cluster in the directory of Flink `flink-1.18.0`:
```shell
./bin/stop-cluster.sh
```

@ -69,8 +69,8 @@ docker-compose up -d
该命令将以 detached 模式自动启动 Docker Compose 配置中定义的所有容器。你可以通过 docker ps 来观察上述的容器是否正常启动了,也可以通过访问 [http://localhost:5601/](http://localhost:5601/) 来查看 Kibana 是否运行正常。
### 下载 Flink 和所需要的依赖包
1. 下载 [Flink 1.17.0](https://archive.apache.org/dist/flink/flink-1.17.0/flink-1.17.0-bin-scala_2.12.tgz) 并将其解压至目录 `flink-1.17.0`
2. 下载下面列出的依赖包,并将它们放到目录 `flink-1.17.0/lib/` 下:
1. 下载 [Flink 1.18.0](https://archive.apache.org/dist/flink/flink-1.18.0/flink-1.18.0-bin-scala_2.12.tgz) 并将其解压至目录 `flink-1.18.0`
2. 下载下面列出的依赖包,并将它们放到目录 `flink-1.18.0/lib/` 下:
**下载链接只对已发布的版本有效, SNAPSHOT 版本需要本地基于 master 或 release- 分支编译**
- [flink-sql-connector-elasticsearch7-3.0.1-1.17.jar](https://repo.maven.apache.org/maven2/org/apache/flink/flink-sql-connector-elasticsearch7/3.0.1-1.17/flink-sql-connector-elasticsearch7-3.0.1-1.17.jar)
@ -147,7 +147,7 @@ docker-compose up -d
1. 使用下面的命令跳转至 Flink 目录下
```
cd flink-1.17.0
cd flink-1.18.0
```
2. 使用下面的命令启动 Flink 集群
@ -308,7 +308,7 @@ Flink SQL> INSERT INTO enriched_orders
```shell
docker-compose down
```
在 Flink 所在目录 `flink-1.17.0` 下执行如下命令停止 Flink 集群:
在 Flink 所在目录 `flink-1.18.0` 下执行如下命令停止 Flink 集群:
```shell
./bin/stop-cluster.sh
```

@ -105,8 +105,8 @@ VALUES (default, '2020-07-30 10:08:22', 'Jark', 50.50, 102, false),
```
### 下载 Flink 和所需要的依赖包
1. 下载 [Flink 1.17.0](https://archive.apache.org/dist/flink/flink-1.17.0/flink-1.17.0-bin-scala_2.12.tgz) 并将其解压至目录 `flink-1.17.0`
2. 下载下面列出的依赖包,并将它们放到目录 `flink-1.17.0/lib/` 下
1. 下载 [Flink 1.18.0](https://archive.apache.org/dist/flink/flink-1.18.0/flink-1.18.0-bin-scala_2.12.tgz) 并将其解压至目录 `flink-1.18.0`
2. 下载下面列出的依赖包,并将它们放到目录 `flink-1.18.0/lib/` 下
```下载链接只对已发布的版本有效, SNAPSHOT 版本需要本地基于 master 或 release- 分支编译```
- 用于订阅PolarDB-X Binlog: flink-sql-connector-mysql-cdc-2.5-SNAPSHOT.jar

@ -35,7 +35,7 @@ import java.util.Map;
import java.util.Optional;
import java.util.regex.Pattern;
import static org.apache.flink.shaded.guava30.com.google.common.base.Preconditions.checkNotNull;
import static org.apache.flink.shaded.guava31.com.google.common.base.Preconditions.checkNotNull;
/** Parser for converting YAML formatted pipeline definition to {@link PipelineDef}. */
public class YamlPipelineDefinitionParser implements PipelineDefinitionParser {

@ -16,7 +16,7 @@
package com.ververica.cdc.cli;
import org.apache.flink.shaded.guava30.com.google.common.io.Resources;
import org.apache.flink.shaded.guava31.com.google.common.io.Resources;
import com.ververica.cdc.cli.utils.FlinkEnvironmentUtils;
import com.ververica.cdc.composer.PipelineComposer;

@ -18,8 +18,8 @@ package com.ververica.cdc.cli.parser;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.shaded.guava30.com.google.common.collect.ImmutableMap;
import org.apache.flink.shaded.guava30.com.google.common.io.Resources;
import org.apache.flink.shaded.guava31.com.google.common.collect.ImmutableMap;
import org.apache.flink.shaded.guava31.com.google.common.io.Resources;
import com.ververica.cdc.composer.definition.PipelineDef;
import com.ververica.cdc.composer.definition.RouteDef;

@ -22,7 +22,7 @@ import org.apache.flink.api.connector.source.SplitEnumerator;
import org.apache.flink.api.connector.source.SplitEnumeratorContext;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.shaded.guava30.com.google.common.collect.Lists;
import org.apache.flink.shaded.guava31.com.google.common.collect.Lists;
import com.ververica.cdc.connectors.base.config.SourceConfig;
import com.ververica.cdc.connectors.base.source.assigner.SplitAssigner;

@ -19,7 +19,7 @@ package com.ververica.cdc.connectors.base.source.reader.external;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.shaded.guava30.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.flink.shaded.guava31.com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.ververica.cdc.connectors.base.source.meta.split.SnapshotSplit;
import com.ververica.cdc.connectors.base.source.meta.split.SourceRecords;

@ -18,7 +18,7 @@ package com.ververica.cdc.connectors.base.source.reader.external;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.shaded.guava30.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.flink.shaded.guava31.com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.ververica.cdc.connectors.base.source.meta.offset.Offset;
import com.ververica.cdc.connectors.base.source.meta.split.FinishedSnapshotSplitInfo;

@ -37,7 +37,7 @@ import org.apache.flink.streaming.api.functions.source.RichSourceFunction;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.shaded.guava30.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.flink.shaded.guava31.com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.ververica.cdc.debezium.internal.DebeziumChangeConsumer;
import com.ververica.cdc.debezium.internal.DebeziumChangeFetcher;

@ -20,7 +20,7 @@ import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.shaded.guava30.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.flink.shaded.guava31.com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.github.shyiko.mysql.binlog.event.Event;
import com.github.shyiko.mysql.binlog.event.EventType;

@ -19,7 +19,7 @@ package com.ververica.cdc.connectors.mysql.debezium.reader;
import org.apache.flink.annotation.VisibleForTesting;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.shaded.guava30.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.flink.shaded.guava31.com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.ververica.cdc.connectors.mysql.debezium.dispatcher.SignalEventDispatcher;
import com.ververica.cdc.connectors.mysql.debezium.task.MySqlBinlogSplitReadTask;

@ -19,7 +19,7 @@ package com.ververica.cdc.connectors.mysql.source.assigners;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.util.Preconditions;
import org.apache.flink.shaded.guava30.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.flink.shaded.guava31.com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.ververica.cdc.connectors.mysql.debezium.DebeziumUtils;
import com.ververica.cdc.connectors.mysql.schema.MySqlSchema;

@ -22,7 +22,7 @@ import org.apache.flink.api.connector.source.SplitEnumerator;
import org.apache.flink.api.connector.source.SplitEnumeratorContext;
import org.apache.flink.util.FlinkRuntimeException;
import org.apache.flink.shaded.guava30.com.google.common.collect.Lists;
import org.apache.flink.shaded.guava31.com.google.common.collect.Lists;
import com.ververica.cdc.connectors.mysql.source.assigners.MySqlBinlogSplitAssigner;
import com.ververica.cdc.connectors.mysql.source.assigners.MySqlHybridSplitAssigner;

@ -19,7 +19,7 @@ package com.ververica.cdc.connectors.mysql.source.assigners;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.shaded.guava30.com.google.common.collect.Lists;
import org.apache.flink.shaded.guava31.com.google.common.collect.Lists;
import com.ververica.cdc.connectors.mysql.source.MySqlSourceTestBase;
import com.ververica.cdc.connectors.mysql.source.assigners.state.ChunkSplitterState;

@ -31,7 +31,7 @@ import org.apache.flink.types.Row;
import org.apache.flink.util.CloseableIterator;
import org.apache.flink.util.ExceptionUtils;
import org.apache.flink.shaded.guava30.com.google.common.collect.Lists;
import org.apache.flink.shaded.guava31.com.google.common.collect.Lists;
import com.ververica.cdc.connectors.mysql.debezium.DebeziumUtils;
import com.ververica.cdc.connectors.mysql.source.MySqlSourceTestBase;

@ -22,8 +22,8 @@ import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.planner.factories.TestValuesTableFactory;
import org.apache.flink.shaded.guava30.com.google.common.collect.Lists;
import org.apache.flink.shaded.guava30.com.google.common.util.concurrent.RateLimiter;
import org.apache.flink.shaded.guava31.com.google.common.collect.Lists;
import org.apache.flink.shaded.guava31.com.google.common.util.concurrent.RateLimiter;
import org.junit.After;
import org.junit.Assume;

@ -30,7 +30,7 @@ import org.apache.flink.streaming.api.functions.source.RichParallelSourceFunctio
import org.apache.flink.util.Collector;
import org.apache.flink.util.Preconditions;
import org.apache.flink.shaded.guava30.com.google.common.util.concurrent.ThreadFactoryBuilder;
import org.apache.flink.shaded.guava31.com.google.common.util.concurrent.ThreadFactoryBuilder;
import com.ververica.cdc.connectors.tidb.table.StartupMode;
import com.ververica.cdc.connectors.tidb.table.utils.TableKeyRangeUtils;

@ -18,7 +18,7 @@ package com.ververica.cdc.connectors.tidb.table.utils;
import org.apache.flink.util.Preconditions;
import org.apache.flink.shaded.guava30.com.google.common.collect.ImmutableList;
import org.apache.flink.shaded.guava31.com.google.common.collect.ImmutableList;
import org.tikv.common.key.RowKey;
import org.tikv.common.util.KeyRangeUtils;

@ -16,9 +16,9 @@
package org.tikv.cdc;
import org.apache.flink.shaded.guava30.com.google.common.base.Preconditions;
import org.apache.flink.shaded.guava30.com.google.common.collect.Range;
import org.apache.flink.shaded.guava30.com.google.common.collect.TreeMultiset;
import org.apache.flink.shaded.guava31.com.google.common.base.Preconditions;
import org.apache.flink.shaded.guava31.com.google.common.collect.Range;
import org.apache.flink.shaded.guava31.com.google.common.collect.TreeMultiset;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@ -18,7 +18,7 @@ package org.tikv.cdc;
import org.apache.flink.util.Preconditions;
import org.apache.flink.shaded.guava30.com.google.common.collect.ImmutableSet;
import org.apache.flink.shaded.guava31.com.google.common.collect.ImmutableSet;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

@ -16,7 +16,7 @@
package com.ververica.cdc.connectors.vitess;
import org.apache.flink.shaded.guava30.com.google.common.collect.Maps;
import org.apache.flink.shaded.guava31.com.google.common.collect.Maps;
import com.ververica.cdc.debezium.Validator;
import io.debezium.connector.vitess.VitessConnector;

@ -26,11 +26,13 @@ under the License.
<artifactId>flink-cdc-source-e2e-tests</artifactId>
<properties>
<flink-1.14>1.14.4</flink-1.14>
<flink-1.15>1.15.2</flink-1.15>
<flink-1.16>1.16.0</flink-1.16>
<flink-1.17>1.17.0</flink-1.17>
<jdbc.version-1.17>3.1.0-1.17</jdbc.version-1.17>
<flink-1.14>1.14.6</flink-1.14>
<flink-1.15>1.15.4</flink-1.15>
<flink-1.16>1.16.2</flink-1.16>
<flink-1.17>1.17.1</flink-1.17>
<flink-1.18>1.18.0</flink-1.18>
<jdbc.version-1.17>3.1.1-1.17</jdbc.version-1.17>
<jdbc.version-1.18>3.1.1-1.17</jdbc.version-1.18>
<mysql.driver.version>8.0.27</mysql.driver.version>
<postgresql.driver.version>42.5.1</postgresql.driver.version>
</properties>
@ -273,6 +275,16 @@ under the License.
</outputDirectory>
</artifactItem>
<artifactItem>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc</artifactId>
<version>${jdbc.version-1.18}</version>
<destFileName>jdbc-connector_${flink-1.18}.jar</destFileName>
<type>jar</type>
<outputDirectory>${project.build.directory}/dependencies
</outputDirectory>
</artifactItem>
<artifactItem>
<groupId>com.ververica</groupId>
<artifactId>flink-sql-connector-mysql-cdc</artifactId>
@ -358,4 +370,4 @@ under the License.
</plugins>
</build>
</project>
</project>

@ -124,11 +124,11 @@ public abstract class FlinkContainerTestEnvironment extends TestLogger {
@Parameterized.Parameters(name = "flinkVersion: {0}")
public static List<String> getFlinkVersion() {
return Arrays.asList("1.14.4", "1.15.2", "1.16.0", "1.17.0");
return Arrays.asList("1.14.6", "1.15.4", "1.16.2", "1.17.1", "1.18.0");
}
private static final List<String> FLINK_VERSION_WITH_SCALA_212 =
Arrays.asList("1.15.2", "1.16.0", "1.17.0");
Arrays.asList("1.15.4", "1.16.2", "1.17.1", "1.18.0");
@Before
public void before() {

@ -71,14 +71,14 @@ under the License.
<flink.reuseForks>true</flink.reuseForks>
<!-- dependencies versions -->
<flink.version>1.17.1</flink.version>
<flink.version>1.18.0</flink.version>
<debezium.version>1.9.7.Final</debezium.version>
<tikv.version>3.2.0</tikv.version>
<geometry.version>2.2.0</geometry.version>
<testcontainers.version>1.18.3</testcontainers.version>
<hamcrest.version>1.3</hamcrest.version>
<version.awaitility>4.2.0</version.awaitility>
<slf4j.version>1.7.15</slf4j.version>
<slf4j.version>1.7.36</slf4j.version>
<log4j.version>2.17.1</log4j.version>
<spotless.version>2.4.2</spotless.version>
<oblogclient.version>1.1.0</oblogclient.version>
@ -152,7 +152,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-shaded-guava</artifactId>
<version>30.1.1-jre-16.1</version>
<version>31.1-jre-17.0</version>
</dependency>
<!-- test dependencies -->

Loading…
Cancel
Save