[common] Bump Flink version to 1.15.2 (#1504)

This closes #1363. 

Co-authored-by: Hang Ruan <ruanhang1993@hotmail.com>
pull/1662/head
Tigran Manasyan 2 years ago committed by GitHub
parent 8976f25925
commit 5b3dd9884e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -54,14 +54,14 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-runtime_${scala.binary.version}</artifactId>
<artifactId>flink-table-runtime</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<artifactId>flink-test-utils</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
@ -82,7 +82,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<type>test-jar</type>
<scope>test</scope>

@ -30,6 +30,7 @@ under the License.
<properties>
<flink-1.13>1.13.6</flink-1.13>
<flink-1.14>1.14.4</flink-1.14>
<flink-1.15>1.15.2</flink-1.15>
<mysql.driver.version>8.0.27</mysql.driver.version>
</properties>
@ -209,6 +210,16 @@ under the License.
</outputDirectory>
</artifactItem>
<artifactItem>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-jdbc</artifactId>
<version>${flink-1.15}</version>
<destFileName>jdbc-connector_${flink-1.15}.jar</destFileName>
<type>jar</type>
<outputDirectory>${project.build.directory}/dependencies
</outputDirectory>
</artifactItem>
<artifactItem>
<groupId>com.ververica</groupId>
<artifactId>flink-sql-connector-mysql-cdc</artifactId>

@ -121,7 +121,7 @@ public abstract class FlinkContainerTestEnvironment extends TestLogger {
@Parameterized.Parameters(name = "flinkVersion: {0}")
public static List<String> getFlinkVersion() {
return Arrays.asList("1.13.6", "1.14.4");
return Arrays.asList("1.13.6", "1.14.4", "1.15.2");
}
@Before
@ -258,6 +258,9 @@ public abstract class FlinkContainerTestEnvironment extends TestLogger {
}
private String getFlinkDockerImageTag() {
if ("1.15.2".equals(flinkVersion)) {
return String.format("flink:%s-scala_2.12", flinkVersion);
}
return String.format("flink:%s-scala_2.11", flinkVersion);
}

@ -52,7 +52,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<artifactId>flink-test-utils</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>

@ -81,7 +81,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<artifactId>flink-test-utils</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
@ -96,7 +96,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<type>test-jar</type>
<scope>test</scope>

@ -108,14 +108,14 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-runtime_${scala.binary.version}</artifactId>
<artifactId>flink-table-runtime</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<artifactId>flink-test-utils</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
@ -136,7 +136,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<type>test-jar</type>
<scope>test</scope>

@ -21,7 +21,6 @@ import org.apache.flink.table.data.StringData;
import org.apache.flink.table.types.logical.ArrayType;
import org.apache.flink.table.types.logical.LogicalType;
import org.apache.flink.table.types.logical.LogicalTypeFamily;
import org.apache.flink.table.types.logical.utils.LogicalTypeChecks;
import com.esri.core.geometry.ogc.OGCGeometry;
import com.fasterxml.jackson.databind.JsonNode;
@ -115,8 +114,7 @@ public class MySqlDeserializationConverterFactory {
private static Optional<DeserializationRuntimeConverter> createArrayConverter(
ArrayType arrayType) {
if (LogicalTypeChecks.hasFamily(
arrayType.getElementType(), LogicalTypeFamily.CHARACTER_STRING)) {
if (hasFamily(arrayType.getElementType(), LogicalTypeFamily.CHARACTER_STRING)) {
// only map MySQL SET type to Flink ARRAY<STRING> type
return Optional.of(
new DeserializationRuntimeConverter() {
@ -148,4 +146,8 @@ public class MySqlDeserializationConverterFactory {
return Optional.empty();
}
}
private static boolean hasFamily(LogicalType logicalType, LogicalTypeFamily family) {
return logicalType.getTypeRoot().getFamilies().contains(family);
}
}

@ -16,19 +16,23 @@
package com.ververica.cdc.connectors.mysql;
import org.apache.flink.api.common.JobStatus;
import org.apache.flink.api.common.state.BroadcastState;
import org.apache.flink.api.common.state.KeyedStateStore;
import org.apache.flink.api.common.state.ListState;
import org.apache.flink.api.common.state.ListStateDescriptor;
import org.apache.flink.api.common.state.MapStateDescriptor;
import org.apache.flink.api.common.state.OperatorStateStore;
import org.apache.flink.api.common.time.Deadline;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.core.execution.JobClient;
import org.apache.flink.runtime.state.FunctionInitializationContext;
import org.apache.flink.streaming.runtime.streamrecord.StreamRecord;
import org.apache.flink.streaming.util.MockStreamingRuntimeContext;
import org.apache.flink.util.Collector;
import org.apache.flink.util.Preconditions;
import org.apache.flink.util.function.SupplierWithException;
import com.ververica.cdc.connectors.mysql.testutils.UniqueDatabase;
import com.ververica.cdc.connectors.utils.TestSourceContext;
@ -43,6 +47,7 @@ import java.util.Properties;
import java.util.Set;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
/** Utils to help test. */
public class MySqlTestUtils {
@ -108,6 +113,53 @@ public class MySqlTestUtils {
return allRecords;
}
public static void waitUntilCondition(
SupplierWithException<Boolean, Exception> condition,
Deadline timeout,
long retryIntervalMillis,
String errorMsg)
throws Exception {
while (timeout.hasTimeLeft() && !(Boolean) condition.get()) {
long timeLeft = Math.max(0L, timeout.timeLeft().toMillis());
Thread.sleep(Math.min(retryIntervalMillis, timeLeft));
}
if (!timeout.hasTimeLeft()) {
throw new TimeoutException(errorMsg);
}
}
public static void waitForJobStatus(
JobClient client, List<JobStatus> expectedStatus, Deadline deadline) throws Exception {
waitUntilCondition(
() -> {
JobStatus currentStatus = (JobStatus) client.getJobStatus().get();
if (expectedStatus.contains(currentStatus)) {
return true;
} else if (currentStatus.isTerminalState()) {
try {
client.getJobExecutionResult().get();
} catch (Exception var4) {
throw new IllegalStateException(
String.format(
"Job has entered %s state, but expecting %s",
currentStatus, expectedStatus),
var4);
}
throw new IllegalStateException(
String.format(
"Job has entered a terminal state %s, but expecting %s",
currentStatus, expectedStatus));
} else {
return false;
}
},
deadline,
100L,
"Condition was not met in given timeout.");
}
private static Properties createDebeziumProperties(boolean useLegacyImplementation) {
Properties debeziumProps = new Properties();
if (useLegacyImplementation) {

@ -57,8 +57,8 @@ import java.util.stream.Collectors;
import java.util.stream.Stream;
import static com.ververica.cdc.connectors.mysql.LegacyMySqlSourceTest.currentMySqlLatestOffset;
import static com.ververica.cdc.connectors.mysql.MySqlTestUtils.waitForJobStatus;
import static org.apache.flink.api.common.JobStatus.RUNNING;
import static org.apache.flink.runtime.testutils.CommonTestUtils.waitForJobStatus;
import static org.junit.Assert.assertEquals;
/** Integration tests for MySQL Table source. */
@ -70,8 +70,7 @@ public class MySqlConnectorITCase extends MySqlSourceTestBase {
private static final String TEST_USER = "mysqluser";
private static final String TEST_PASSWORD = "mysqlpw";
private static final MySqlContainer MYSQL8_CONTAINER =
(MySqlContainer) createMySqlContainer(MySqlVersion.V8_0).withExposedPorts(3307);
private static final MySqlContainer MYSQL8_CONTAINER = createMySqlContainer(MySqlVersion.V8_0);
private final UniqueDatabase inventoryDatabase =
new UniqueDatabase(MYSQL_CONTAINER, "inventory", TEST_USER, TEST_PASSWORD);

@ -71,22 +71,34 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-runtime_${scala.binary.version}</artifactId>
<artifactId>flink-table-runtime</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<artifactId>flink-test-utils</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-connector-test-utils</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
<exclusions>
<exclusion>
<groupId>org.testcontainers</groupId>
<artifactId>testcontainers</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
@ -99,7 +111,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<type>test-jar</type>
<scope>test</scope>

@ -87,7 +87,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<artifactId>flink-test-utils</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
@ -102,7 +102,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<type>test-jar</type>
<scope>test</scope>

@ -89,7 +89,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<artifactId>flink-test-utils</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
@ -104,7 +104,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<type>test-jar</type>
<scope>test</scope>

@ -77,7 +77,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<artifactId>flink-test-utils</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
@ -92,7 +92,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<type>test-jar</type>
<scope>test</scope>

@ -42,7 +42,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<artifactId>flink-test-utils</artifactId>
<version>${flink.version}</version>
</dependency>

@ -88,14 +88,14 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-runtime_${scala.binary.version}</artifactId>
<artifactId>flink-table-runtime</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-test-utils_${scala.binary.version}</artifactId>
<artifactId>flink-test-utils</artifactId>
<version>${flink.version}</version>
<scope>test</scope>
</dependency>
@ -116,7 +116,7 @@ under the License.
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<type>test-jar</type>
<scope>test</scope>

@ -71,7 +71,7 @@ under the License.
</distributionManagement>
<properties>
<flink.version>1.14.4</flink.version>
<flink.version>1.15.2</flink.version>
<debezium.version>1.6.4.Final</debezium.version>
<tikv.version>3.2.0</tikv.version>
<geometry.version>2.2.0</geometry.version>
@ -81,7 +81,7 @@ under the License.
See more https://github.com/testcontainers/testcontainers-java/issues/4297. -->
<testcontainers.version>1.15.3</testcontainers.version>
<java.version>1.8</java.version>
<scala.binary.version>2.11</scala.binary.version>
<scala.binary.version>2.12</scala.binary.version>
<maven.compiler.source>${java.version}</maven.compiler.source>
<maven.compiler.target>${java.version}</maven.compiler.target>
<hamcrest.version>1.3</hamcrest.version>
@ -107,7 +107,13 @@ under the License.
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge_${scala.binary.version}</artifactId>
<artifactId>flink-connector-base</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-table-api-java-bridge</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>
@ -119,7 +125,7 @@ under the License.
</dependency>
<dependency>
<groupId>org.apache.flink</groupId>
<artifactId>flink-streaming-java_${scala.binary.version}</artifactId>
<artifactId>flink-streaming-java</artifactId>
<version>${flink.version}</version>
<scope>provided</scope>
</dependency>

Loading…
Cancel
Save