statements =
+ Arrays.stream(
+ Files.readAllLines(Paths.get(ddlTestFile.toURI())).stream()
+ .map(String::trim)
+ .filter(x -> !x.startsWith("--") && !x.isEmpty())
+ .map(
+ x -> {
+ final Matcher m =
+ COMMENT_PATTERN.matcher(x);
+ return m.matches() ? m.group(1) : x;
+ })
+ .collect(Collectors.joining("\n"))
+ .split(";"))
+ .collect(Collectors.toList());
+ for (String stmt : statements) {
+ statement.execute(stmt);
+ }
+ } catch (Exception e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git a/flink-connector-oceanbase-cdc/src/test/java/com/ververica/cdc/connectors/oceanbase/table/OceanBaseConnectorITCase.java b/flink-connector-oceanbase-cdc/src/test/java/com/ververica/cdc/connectors/oceanbase/table/OceanBaseConnectorITCase.java
new file mode 100644
index 000000000..20a34c3d3
--- /dev/null
+++ b/flink-connector-oceanbase-cdc/src/test/java/com/ververica/cdc/connectors/oceanbase/table/OceanBaseConnectorITCase.java
@@ -0,0 +1,455 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.ververica.cdc.connectors.oceanbase.table;
+
+import org.apache.flink.runtime.minicluster.RpcServiceSharing;
+import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration;
+import org.apache.flink.streaming.api.CheckpointingMode;
+import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
+import org.apache.flink.table.api.EnvironmentSettings;
+import org.apache.flink.table.api.TableResult;
+import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
+import org.apache.flink.table.planner.factories.TestValuesTableFactory;
+import org.apache.flink.table.utils.LegacyRowResource;
+import org.apache.flink.test.util.MiniClusterWithClientResource;
+
+import com.ververica.cdc.connectors.oceanbase.OceanBaseTestBase;
+import org.junit.Before;
+import org.junit.ClassRule;
+import org.junit.Rule;
+import org.junit.Test;
+
+import java.sql.Connection;
+import java.sql.Statement;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZoneOffset;
+import java.util.Arrays;
+import java.util.List;
+
+import static org.junit.Assert.assertTrue;
+
+/** Integration tests for OceanBase change stream event SQL source. */
+public class OceanBaseConnectorITCase extends OceanBaseTestBase {
+
+ private static final int DEFAULT_PARALLELISM = 2;
+
+ private final StreamExecutionEnvironment env =
+ StreamExecutionEnvironment.getExecutionEnvironment()
+ .setParallelism(DEFAULT_PARALLELISM);
+ private final StreamTableEnvironment tEnv =
+ StreamTableEnvironment.create(
+ env,
+ EnvironmentSettings.newInstance().useBlinkPlanner().inStreamingMode().build());
+
+ @ClassRule public static LegacyRowResource usesLegacyRows = LegacyRowResource.INSTANCE;
+
+ @Rule
+ public final MiniClusterWithClientResource miniClusterResource =
+ new MiniClusterWithClientResource(
+ new MiniClusterResourceConfiguration.Builder()
+ .setNumberTaskManagers(1)
+ .setNumberSlotsPerTaskManager(DEFAULT_PARALLELISM)
+ .setRpcServiceSharing(RpcServiceSharing.DEDICATED)
+ .withHaLeadershipControl()
+ .build());
+
+ @Before
+ public void before() {
+ TestValuesTableFactory.clearAllData();
+ env.enableCheckpointing(1000);
+ env.getCheckpointConfig().setCheckpointingMode(CheckpointingMode.AT_LEAST_ONCE);
+ env.getCheckpointConfig().setMinPauseBetweenCheckpoints(500);
+ }
+
+ @Test
+ public void testConsumingAllEvents() throws Exception {
+ initializeTable("inventory");
+
+ String sourceDDL =
+ String.format(
+ "CREATE TABLE ob_source ("
+ + " `id` INT NOT NULL,"
+ + " name STRING,"
+ + " description STRING,"
+ + " weight DECIMAL(20, 10),"
+ + " PRIMARY KEY (`id`) NOT ENFORCED"
+ + ") WITH ("
+ + " 'connector' = 'oceanbase-cdc',"
+ + " 'scan.startup.mode' = 'initial',"
+ + " 'username' = '%s',"
+ + " 'password' = '%s',"
+ + " 'tenant-name' = '%s',"
+ + " 'database-name' = '%s',"
+ + " 'table-name' = '%s',"
+ + " 'hostname' = '%s',"
+ + " 'port' = '%s',"
+ + " 'rootserver-list' = '%s',"
+ + " 'logproxy.host' = '%s',"
+ + " 'logproxy.port' = '%s'"
+ + ")",
+ OB_SYS_USERNAME,
+ OB_SYS_PASSWORD,
+ "sys",
+ "inventory",
+ "products",
+ OB_WITH_LOG_PROXY.getContainerIpAddress(),
+ OB_WITH_LOG_PROXY.getMappedPort(OB_SERVER_SQL_PORT),
+ "127.0.0.1:2882:2881",
+ OB_WITH_LOG_PROXY.getContainerIpAddress(),
+ OB_WITH_LOG_PROXY.getMappedPort(OB_LOG_PROXY_PORT));
+
+ String sinkDDL =
+ "CREATE TABLE sink ("
+ + " `id` INT NOT NULL,"
+ + " name STRING,"
+ + " description STRING,"
+ + " weight DECIMAL(20, 10),"
+ + " PRIMARY KEY (`id`) NOT ENFORCED"
+ + ") WITH ("
+ + " 'connector' = 'values',"
+ + " 'sink-insert-only' = 'false',"
+ + " 'sink-expected-messages-num' = '30'"
+ + ")";
+
+ tEnv.executeSql(sourceDDL);
+ tEnv.executeSql(sinkDDL);
+
+ TableResult result = tEnv.executeSql("INSERT INTO sink SELECT * FROM ob_source");
+
+ waitForSinkSize("sink", 9);
+ int snapshotSize = sinkSize("sink");
+
+ try (Connection connection = getJdbcConnection("inventory");
+ Statement statement = connection.createStatement()) {
+ statement.execute(
+ "UPDATE products SET description='18oz carpenter hammer' WHERE id=106;");
+ statement.execute("UPDATE products SET weight='5.1' WHERE id=107;");
+ statement.execute(
+ "INSERT INTO products VALUES (default,'jacket','water resistent white wind breaker',0.2);"); // 110
+ statement.execute(
+ "INSERT INTO products VALUES (default,'scooter','Big 2-wheel scooter ',5.18);");
+ statement.execute(
+ "UPDATE products SET description='new water resistent white wind breaker', weight='0.5' WHERE id=110;");
+ statement.execute("UPDATE products SET weight='5.17' WHERE id=111;");
+ statement.execute("DELETE FROM products WHERE id=111;");
+ }
+
+ waitForSinkSize("sink", snapshotSize + 7);
+
+ /*
+ *
+ * The final database table looks like this:
+ *
+ * > SELECT * FROM products;
+ * +-----+--------------------+---------------------------------------------------------+--------+
+ * | id | name | description | weight |
+ * +-----+--------------------+---------------------------------------------------------+--------+
+ * | 101 | scooter | Small 2-wheel scooter | 3.14 |
+ * | 102 | car battery | 12V car battery | 8.1 |
+ * | 103 | 12-pack drill bits | 12-pack of drill bits with sizes ranging from #40 to #3 | 0.8 |
+ * | 104 | hammer | 12oz carpenter's hammer | 0.75 |
+ * | 105 | hammer | 14oz carpenter's hammer | 0.875 |
+ * | 106 | hammer | 18oz carpenter hammer | 1 |
+ * | 107 | rocks | box of assorted rocks | 5.1 |
+ * | 108 | jacket | water resistent black wind breaker | 0.1 |
+ * | 109 | spare tire | 24 inch spare tire | 22.2 |
+ * | 110 | jacket | new water resistent white wind breaker | 0.5 |
+ * +-----+--------------------+---------------------------------------------------------+--------+
+ *
+ */
+
+ List expected =
+ Arrays.asList(
+ "+I(101,scooter,Small 2-wheel scooter,3.1400000000)",
+ "+I(102,car battery,12V car battery,8.1000000000)",
+ "+I(103,12-pack drill bits,12-pack of drill bits with sizes ranging from #40 to #3,0.8000000000)",
+ "+I(104,hammer,12oz carpenter's hammer,0.7500000000)",
+ "+I(105,hammer,14oz carpenter's hammer,0.8750000000)",
+ "+I(106,hammer,16oz carpenter's hammer,1.0000000000)",
+ "+I(107,rocks,box of assorted rocks,5.3000000000)",
+ "+I(108,jacket,water resistent black wind breaker,0.1000000000)",
+ "+I(109,spare tire,24 inch spare tire,22.2000000000)",
+ "+U(106,hammer,18oz carpenter hammer,1.0000000000)",
+ "+U(107,rocks,box of assorted rocks,5.1000000000)",
+ "+I(110,jacket,water resistent white wind breaker,0.2000000000)",
+ "+I(111,scooter,Big 2-wheel scooter ,5.1800000000)",
+ "+U(110,jacket,new water resistent white wind breaker,0.5000000000)",
+ "+U(111,scooter,Big 2-wheel scooter ,5.1700000000)",
+ "-D(111,scooter,Big 2-wheel scooter ,5.1700000000)");
+ List actual = TestValuesTableFactory.getRawResults("sink");
+ assertContainsInAnyOrder(expected, actual);
+
+ result.getJobClient().get().cancel().get();
+ }
+
+ @Test
+ public void testMetadataColumns() throws Exception {
+ initializeTable("inventory_meta");
+
+ String sourceDDL =
+ String.format(
+ "CREATE TABLE ob_source ("
+ + " tenant STRING METADATA FROM 'tenant_name' VIRTUAL,"
+ + " database STRING METADATA FROM 'database_name' VIRTUAL,"
+ + " `table` STRING METADATA FROM 'table_name' VIRTUAL,"
+ + " `id` INT NOT NULL,"
+ + " name STRING,"
+ + " description STRING,"
+ + " weight DECIMAL(20, 10),"
+ + " PRIMARY KEY (`id`) NOT ENFORCED"
+ + ") WITH ("
+ + " 'connector' = 'oceanbase-cdc',"
+ + " 'scan.startup.mode' = 'initial',"
+ + " 'username' = '%s',"
+ + " 'password' = '%s',"
+ + " 'tenant-name' = '%s',"
+ + " 'database-name' = '%s',"
+ + " 'table-name' = '%s',"
+ + " 'hostname' = '%s',"
+ + " 'port' = '%s',"
+ + " 'rootserver-list' = '%s',"
+ + " 'logproxy.host' = '%s',"
+ + " 'logproxy.port' = '%s'"
+ + ")",
+ OB_SYS_USERNAME,
+ OB_SYS_PASSWORD,
+ "sys",
+ "inventory_meta",
+ "products",
+ OB_WITH_LOG_PROXY.getContainerIpAddress(),
+ OB_WITH_LOG_PROXY.getMappedPort(OB_SERVER_SQL_PORT),
+ "127.0.0.1:2882:2881",
+ OB_WITH_LOG_PROXY.getContainerIpAddress(),
+ OB_WITH_LOG_PROXY.getMappedPort(OB_LOG_PROXY_PORT));
+
+ String sinkDDL =
+ "CREATE TABLE sink ("
+ + " tenant STRING,"
+ + " database STRING,"
+ + " `table` STRING,"
+ + " `id` DECIMAL(20, 0) NOT NULL,"
+ + " name STRING,"
+ + " description STRING,"
+ + " weight DECIMAL(20, 10),"
+ + " primary key (tenant, database, `table`, `id`) not enforced"
+ + ") WITH ("
+ + " 'connector' = 'values',"
+ + " 'sink-insert-only' = 'false',"
+ + " 'sink-expected-messages-num' = '20'"
+ + ")";
+ tEnv.executeSql(sourceDDL);
+ tEnv.executeSql(sinkDDL);
+
+ // async submit job
+ TableResult result = tEnv.executeSql("INSERT INTO sink SELECT * FROM ob_source");
+
+ waitForSinkSize("sink", 9);
+ int snapshotSize = sinkSize("sink");
+
+ try (Connection connection = getJdbcConnection("inventory_meta");
+ Statement statement = connection.createStatement()) {
+ statement.execute(
+ "UPDATE products SET description='18oz carpenter hammer' WHERE id=106;");
+ }
+
+ waitForSinkSize("sink", snapshotSize + 1);
+
+ List expected =
+ Arrays.asList(
+ "+I(sys,inventory_meta,products,101,scooter,Small 2-wheel scooter,3.1400000000)",
+ "+I(sys,inventory_meta,products,102,car battery,12V car battery,8.1000000000)",
+ "+I(sys,inventory_meta,products,103,12-pack drill bits,12-pack of drill bits with sizes ranging from #40 to #3,0.8000000000)",
+ "+I(sys,inventory_meta,products,104,hammer,12oz carpenter's hammer,0.7500000000)",
+ "+I(sys,inventory_meta,products,105,hammer,14oz carpenter's hammer,0.8750000000)",
+ "+I(sys,inventory_meta,products,106,hammer,16oz carpenter's hammer,1.0000000000)",
+ "+I(sys,inventory_meta,products,107,rocks,box of assorted rocks,5.3000000000)",
+ "+I(sys,inventory_meta,products,108,jacket,water resistent black wind breaker,0.1000000000)",
+ "+I(sys,inventory_meta,products,109,spare tire,24 inch spare tire,22.2000000000)",
+ "+U(sys,inventory_meta,products,106,hammer,18oz carpenter hammer,1.0000000000)");
+ List actual = TestValuesTableFactory.getRawResults("sink");
+ assertContainsInAnyOrder(expected, actual);
+ result.getJobClient().get().cancel().get();
+ }
+
+ @Test
+ public void testAllDataTypes() throws Exception {
+ ZoneId serverTimeZone = ZoneId.systemDefault();
+ ZoneOffset zoneOffset = serverTimeZone.getRules().getOffset(Instant.now());
+ try (Connection connection = getJdbcConnection("");
+ Statement statement = connection.createStatement()) {
+ statement.execute(String.format("SET GLOBAL time_zone = '%s';", zoneOffset.getId()));
+ }
+ tEnv.getConfig().setLocalTimeZone(serverTimeZone);
+ initializeTable("column_type_test");
+ String sourceDDL =
+ String.format(
+ "CREATE TABLE ob_source (\n"
+ + " `id` INT NOT NULL,\n"
+ + " bool_c TINYINT,\n"
+ + " tiny_c TINYINT,\n"
+ + " tiny_un_c SMALLINT,\n"
+ + " small_c SMALLINT ,\n"
+ + " small_un_c INT ,\n"
+ + " medium_c INT,\n"
+ + " medium_un_c INT,\n"
+ + " int_c INT,\n"
+ + " int_un_c BIGINT,\n"
+ + " big_c BIGINT,\n"
+ + " big_un_c DECIMAL(20, 0),\n"
+ + " real_c FLOAT,\n"
+ + " float_c FLOAT,\n"
+ + " double_c DOUBLE,\n"
+ + " decimal_c DECIMAL(8, 4),\n"
+ + " numeric_c DECIMAL(6, 0),\n"
+ + " big_decimal_c STRING,\n"
+ + " date_c DATE,\n"
+ + " time_c TIME(0),\n"
+ + " datetime3_c TIMESTAMP(3),\n"
+ + " datetime6_c TIMESTAMP(6),\n"
+ + " timestamp_c TIMESTAMP,\n"
+ + " char_c CHAR(3),\n"
+ + " varchar_c VARCHAR(255),\n"
+ + " bit_c BINARY(8),\n"
+ + " text_c STRING,\n"
+ + " tiny_blob_c BYTES,\n"
+ + " medium_blob_c BYTES,\n"
+ + " long_blob_c BYTES,\n"
+ + " blob_c BYTES,\n"
+ + " year_c INT,\n"
+ + " set_c STRING,\n"
+ + " enum_c STRING,\n"
+ + " primary key (`id`) not enforced"
+ + ") WITH ("
+ + " 'connector' = 'oceanbase-cdc',"
+ + " 'scan.startup.mode' = 'initial',"
+ + " 'username' = '%s',"
+ + " 'password' = '%s',"
+ + " 'tenant-name' = '%s',"
+ + " 'database-name' = '%s',"
+ + " 'table-name' = '%s',"
+ + " 'hostname' = '%s',"
+ + " 'port' = '%s',"
+ + " 'rootserver-list' = '%s',"
+ + " 'logproxy.host' = '%s',"
+ + " 'logproxy.port' = '%s',"
+ + " 'server-time-zone' = '%s'"
+ + ")",
+ OB_SYS_USERNAME,
+ OB_SYS_PASSWORD,
+ "sys",
+ "column_type_test",
+ "full_types",
+ OB_WITH_LOG_PROXY.getContainerIpAddress(),
+ OB_WITH_LOG_PROXY.getMappedPort(OB_SERVER_SQL_PORT),
+ "127.0.0.1:2882:2881",
+ OB_WITH_LOG_PROXY.getContainerIpAddress(),
+ OB_WITH_LOG_PROXY.getMappedPort(OB_LOG_PROXY_PORT),
+ serverTimeZone);
+ String sinkDDL =
+ "CREATE TABLE sink ("
+ + " `id` INT NOT NULL,\n"
+ + " bool_c TINYINT,\n"
+ + " tiny_c TINYINT,\n"
+ + " tiny_un_c SMALLINT,\n"
+ + " small_c SMALLINT ,\n"
+ + " small_un_c INT ,\n"
+ + " medium_c INT,\n"
+ + " medium_un_c INT,\n"
+ + " int_c INT,\n"
+ + " int_un_c BIGINT,\n"
+ + " big_c BIGINT,\n"
+ + " big_un_c DECIMAL(20, 0),\n"
+ + " real_c FLOAT,\n"
+ + " float_c FLOAT,\n"
+ + " double_c DOUBLE,\n"
+ + " decimal_c DECIMAL(8, 4),\n"
+ + " numeric_c DECIMAL(6, 0),\n"
+ + " big_decimal_c STRING,\n"
+ + " date_c DATE,\n"
+ + " time_c TIME(0),\n"
+ + " datetime3_c TIMESTAMP(3),\n"
+ + " datetime6_c TIMESTAMP(6),\n"
+ + " timestamp_c TIMESTAMP,\n"
+ + " char_c CHAR(3),\n"
+ + " varchar_c VARCHAR(255),\n"
+ + " bit_c BINARY(8),\n"
+ + " text_c STRING,\n"
+ + " tiny_blob_c BYTES,\n"
+ + " medium_blob_c BYTES,\n"
+ + " blob_c BYTES,\n"
+ + " long_blob_c BYTES,\n"
+ + " year_c INT,\n"
+ + " enum_c STRING,\n"
+ + " set_c STRING,\n"
+ + " primary key (`id`) not enforced"
+ + ") WITH ("
+ + " 'connector' = 'values',"
+ + " 'sink-insert-only' = 'false',"
+ + " 'sink-expected-messages-num' = '3'"
+ + ")";
+ tEnv.executeSql(sourceDDL);
+ tEnv.executeSql(sinkDDL);
+
+ TableResult result = tEnv.executeSql("INSERT INTO sink SELECT * FROM ob_source");
+
+ waitForSinkSize("sink", 1);
+ int snapshotSize = sinkSize("sink");
+
+ try (Connection connection = getJdbcConnection("column_type_test");
+ Statement statement = connection.createStatement()) {
+ statement.execute(
+ "UPDATE full_types SET timestamp_c = '2020-07-17 18:33:22' WHERE id=1;");
+ }
+
+ waitForSinkSize("sink", snapshotSize + 1);
+
+ List expected =
+ Arrays.asList(
+ "+I(1,1,127,255,32767,65535,8388607,16777215,2147483647,4294967295,9223372036854775807,18446744073709551615,123.102,123.102,404.4443,123.4567,346,34567892.1,2020-07-17,18:00:22,2020-07-17T18:00:22.123,2020-07-17T18:00:22.123456,2020-07-17T18:00:22,abc,Hello World,[4, 4, 4, 4, 4, 4, 4, 4],text,[16],[16],[16],[16],2022,a,red)",
+ "+U(1,1,127,255,32767,65535,8388607,16777215,2147483647,4294967295,9223372036854775807,18446744073709551615,123.102,123.102,404.4443,123.4567,346,34567892.1,2020-07-17,18:00:22,2020-07-17T18:00:22.123,2020-07-17T18:00:22.123456,2020-07-17T18:33:22,abc,Hello World,[4, 4, 4, 4, 4, 4, 4, 4],text,[16],[16],[16],[16],2022,a,red)");
+
+ List actual = TestValuesTableFactory.getRawResults("sink");
+ assertContainsInAnyOrder(expected, actual);
+ result.getJobClient().get().cancel().get();
+ }
+
+ private static void waitForSinkSize(String sinkName, int expectedSize)
+ throws InterruptedException {
+ while (sinkSize(sinkName) < expectedSize) {
+ Thread.sleep(100);
+ }
+ }
+
+ private static int sinkSize(String sinkName) {
+ synchronized (TestValuesTableFactory.class) {
+ try {
+ return TestValuesTableFactory.getRawResults(sinkName).size();
+ } catch (IllegalArgumentException e) {
+ // job is not started yet
+ return 0;
+ }
+ }
+ }
+
+ public static void assertContainsInAnyOrder(List expected, List actual) {
+ assertTrue(expected != null && actual != null);
+ assertTrue(actual.containsAll(expected));
+ }
+}
diff --git a/flink-connector-oceanbase-cdc/src/test/java/com/ververica/cdc/connectors/oceanbase/table/OceanBaseTableFactoryTest.java b/flink-connector-oceanbase-cdc/src/test/java/com/ververica/cdc/connectors/oceanbase/table/OceanBaseTableFactoryTest.java
new file mode 100644
index 000000000..55e7bc868
--- /dev/null
+++ b/flink-connector-oceanbase-cdc/src/test/java/com/ververica/cdc/connectors/oceanbase/table/OceanBaseTableFactoryTest.java
@@ -0,0 +1,222 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.ververica.cdc.connectors.oceanbase.table;
+
+import org.apache.flink.configuration.Configuration;
+import org.apache.flink.table.api.DataTypes;
+import org.apache.flink.table.api.Schema;
+import org.apache.flink.table.catalog.CatalogTable;
+import org.apache.flink.table.catalog.Column;
+import org.apache.flink.table.catalog.ObjectIdentifier;
+import org.apache.flink.table.catalog.ResolvedCatalogTable;
+import org.apache.flink.table.catalog.ResolvedSchema;
+import org.apache.flink.table.catalog.UniqueConstraint;
+import org.apache.flink.table.connector.source.DynamicTableSource;
+import org.apache.flink.table.factories.FactoryUtil;
+import org.apache.flink.util.ExceptionUtils;
+
+import org.junit.Test;
+
+import java.time.Duration;
+import java.time.ZoneId;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.Map;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+/** Test for {@link OceanBaseTableSource} created by {@link OceanBaseTableSourceFactory}. */
+public class OceanBaseTableFactoryTest {
+
+ private static final ResolvedSchema SCHEMA =
+ new ResolvedSchema(
+ Arrays.asList(
+ Column.physical("aaa", DataTypes.INT().notNull()),
+ Column.physical("bbb", DataTypes.STRING().notNull()),
+ Column.physical("ccc", DataTypes.DOUBLE()),
+ Column.physical("ddd", DataTypes.DECIMAL(31, 18)),
+ Column.physical("eee", DataTypes.TIMESTAMP(3))),
+ Collections.emptyList(),
+ UniqueConstraint.primaryKey("pk", Collections.singletonList("aaa")));
+
+ private static final ResolvedSchema SCHEMA_WITH_METADATA =
+ new ResolvedSchema(
+ Arrays.asList(
+ Column.physical("aaa", DataTypes.INT().notNull()),
+ Column.physical("bbb", DataTypes.STRING().notNull()),
+ Column.physical("ccc", DataTypes.DOUBLE()),
+ Column.physical("ddd", DataTypes.DECIMAL(31, 18)),
+ Column.physical("eee", DataTypes.TIMESTAMP(3)),
+ Column.metadata("time", DataTypes.TIMESTAMP_LTZ(3), "op_ts", true),
+ Column.metadata("tenant", DataTypes.STRING(), "tenant_name", true),
+ Column.metadata("database", DataTypes.STRING(), "database_name", true),
+ Column.metadata("table", DataTypes.STRING(), "table_name", true)),
+ Collections.emptyList(),
+ UniqueConstraint.primaryKey("pk", Collections.singletonList("aaa")));
+
+ private static final String STARTUP_MODE = "latest-offset";
+ private static final String USERNAME = "user@sys";
+ private static final String PASSWORD = "pswd";
+ private static final String TENANT_NAME = "sys";
+ private static final String DATABASE_NAME = "db";
+ private static final String TABLE_NAME = "table";
+ private static final String RS_LIST = "127.0.0.1:2882:2881";
+ private static final String LOG_PROXY_HOST = "127.0.0.1";
+ private static final String LOG_PROXY_PORT = "2983";
+
+ @Test
+ public void testCommonProperties() {
+ Map properties = getRequiredOptions();
+
+ DynamicTableSource actualSource = createTableSource(SCHEMA, properties);
+ OceanBaseTableSource expectedSource =
+ new OceanBaseTableSource(
+ SCHEMA,
+ StartupMode.LATEST_OFFSET,
+ null,
+ USERNAME,
+ PASSWORD,
+ TENANT_NAME,
+ DATABASE_NAME,
+ TABLE_NAME,
+ null,
+ null,
+ Duration.ofSeconds(30),
+ ZoneId.of("UTC"),
+ RS_LIST,
+ LOG_PROXY_HOST,
+ 2983);
+ assertEquals(expectedSource, actualSource);
+ }
+
+ @Test
+ public void testOptionalProperties() {
+ Map options = getRequiredOptions();
+ options.put("scan.startup.mode", "timestamp");
+ options.put("scan.startup.timestamp", "0");
+ options.put("hostname", "127.0.0.1");
+ options.put("port", "2881");
+ DynamicTableSource actualSource = createTableSource(SCHEMA, options);
+
+ OceanBaseTableSource expectedSource =
+ new OceanBaseTableSource(
+ SCHEMA,
+ StartupMode.TIMESTAMP,
+ 0L,
+ USERNAME,
+ PASSWORD,
+ TENANT_NAME,
+ DATABASE_NAME,
+ TABLE_NAME,
+ "127.0.0.1",
+ 2881,
+ Duration.ofSeconds(30),
+ ZoneId.of("UTC"),
+ RS_LIST,
+ LOG_PROXY_HOST,
+ 2983);
+ assertEquals(expectedSource, actualSource);
+ }
+
+ @Test
+ public void testMetadataColumns() {
+ Map properties = getRequiredOptions();
+
+ DynamicTableSource actualSource = createTableSource(SCHEMA_WITH_METADATA, properties);
+ OceanBaseTableSource oceanBaseTableSource = (OceanBaseTableSource) actualSource;
+ oceanBaseTableSource.applyReadableMetadata(
+ Arrays.asList("op_ts", "tenant_name", "database_name", "table_name"),
+ SCHEMA_WITH_METADATA.toSourceRowDataType());
+ actualSource = oceanBaseTableSource.copy();
+
+ OceanBaseTableSource expectedSource =
+ new OceanBaseTableSource(
+ SCHEMA_WITH_METADATA,
+ StartupMode.LATEST_OFFSET,
+ null,
+ USERNAME,
+ PASSWORD,
+ TENANT_NAME,
+ DATABASE_NAME,
+ TABLE_NAME,
+ null,
+ null,
+ Duration.ofSeconds(30),
+ ZoneId.of("UTC"),
+ RS_LIST,
+ LOG_PROXY_HOST,
+ 2983);
+ expectedSource.producedDataType = SCHEMA_WITH_METADATA.toSourceRowDataType();
+ expectedSource.metadataKeys =
+ Arrays.asList("op_ts", "tenant_name", "database_name", "table_name");
+
+ assertEquals(expectedSource, actualSource);
+ }
+
+ @Test
+ public void testValidation() {
+ try {
+ Map properties = getRequiredOptions();
+ properties.put("unknown", "abc");
+
+ createTableSource(SCHEMA, properties);
+ fail("exception expected");
+ } catch (Throwable t) {
+ assertTrue(
+ ExceptionUtils.findThrowableWithMessage(t, "Unsupported options:\n\nunknown")
+ .isPresent());
+ }
+ }
+
+ private Map getRequiredOptions() {
+ Map options = new HashMap<>();
+ options.put("connector", "oceanbase-cdc");
+ options.put("scan.startup.mode", STARTUP_MODE);
+ options.put("username", USERNAME);
+ options.put("password", PASSWORD);
+ options.put("tenant-name", TENANT_NAME);
+ options.put("database-name", DATABASE_NAME);
+ options.put("table-name", TABLE_NAME);
+ options.put("rootserver-list", RS_LIST);
+ options.put("logproxy.host", LOG_PROXY_HOST);
+ options.put("logproxy.port", LOG_PROXY_PORT);
+ return options;
+ }
+
+ private static DynamicTableSource createTableSource(
+ ResolvedSchema schema, Map options) {
+ return FactoryUtil.createTableSource(
+ null,
+ ObjectIdentifier.of("default", "default", "t1"),
+ new ResolvedCatalogTable(
+ CatalogTable.of(
+ Schema.newBuilder().fromResolvedSchema(schema).build(),
+ "mock source",
+ new ArrayList<>(),
+ options),
+ schema),
+ new Configuration(),
+ OceanBaseTableFactoryTest.class.getClassLoader(),
+ false);
+ }
+}
diff --git a/flink-connector-oceanbase-cdc/src/test/resources/ddl/column_type_test.sql b/flink-connector-oceanbase-cdc/src/test/resources/ddl/column_type_test.sql
new file mode 100644
index 000000000..bf33870be
--- /dev/null
+++ b/flink-connector-oceanbase-cdc/src/test/resources/ddl/column_type_test.sql
@@ -0,0 +1,67 @@
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements. See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership. The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License. You may obtain a copy of the License at
+-- http://www.apache.org/licenses/LICENSE-2.0
+-- Unless required by applicable law or agreed to in writing,
+-- software distributed under the License is distributed on an
+-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+-- KIND, either express or implied. See the License for the
+-- specific language governing permissions and limitations
+-- under the License.
+
+-- ----------------------------------------------------------------------------------------------------------------
+-- DATABASE: column_type_test
+-- ----------------------------------------------------------------------------------------------------------------
+
+CREATE DATABASE column_type_test;
+USE column_type_test;
+
+CREATE TABLE full_types
+(
+ id INT AUTO_INCREMENT NOT NULL,
+ bool_c BOOLEAN,
+ tiny_c TINYINT,
+ tiny_un_c TINYINT UNSIGNED,
+ small_c SMALLINT,
+ small_un_c SMALLINT UNSIGNED,
+ medium_c MEDIUMINT,
+ medium_un_c MEDIUMINT UNSIGNED,
+ int_c INTEGER,
+ int_un_c INTEGER UNSIGNED,
+ big_c BIGINT,
+ big_un_c BIGINT UNSIGNED,
+ real_c REAL,
+ float_c FLOAT,
+ double_c DOUBLE,
+ decimal_c DECIMAL(8, 4),
+ numeric_c NUMERIC(6, 0),
+ big_decimal_c DECIMAL(65, 1),
+ date_c DATE,
+ time_c TIME(0),
+ datetime3_c TIMESTAMP(3),
+ datetime6_c TIMESTAMP(6),
+ timestamp_c TIMESTAMP,
+ char_c CHAR(3),
+ varchar_c VARCHAR(255),
+ bit_c BIT(64),
+ text_c TEXT,
+ tiny_blob_c TINYBLOB,
+ medium_blob_c MEDIUMBLOB,
+ blob_c BLOB,
+ long_blob_c LONGBLOB,
+ year_c YEAR,
+ set_c SET ('a', 'b'),
+ enum_c ENUM ('red', 'green', 'blue'),
+ PRIMARY KEY (id)
+) DEFAULT CHARSET = utf8mb4;
+
+INSERT INTO full_types
+VALUES (DEFAULT, true, 127, 255, 32767, 65535, 8388607, 16777215, 2147483647, 4294967295, 9223372036854775807,
+ 18446744073709551615, 123.102, 123.102, 404.4443, 123.4567, 345.6, 34567892.1, '2020-07-17', '18:00:22',
+ '2020-07-17 18:00:22.123', '2020-07-17 18:00:22.123456', '2020-07-17 18:00:22', 'abc', 'Hello World',
+ b'0000010000000100000001000000010000000100000001000000010000000100', 'text', UNHEX(HEX(16)), UNHEX(HEX(16)),
+ UNHEX(HEX(16)), UNHEX(HEX(16)), 2022, 'a', 'red');
diff --git a/flink-connector-oceanbase-cdc/src/test/resources/ddl/inventory.sql b/flink-connector-oceanbase-cdc/src/test/resources/ddl/inventory.sql
new file mode 100644
index 000000000..cd96f9186
--- /dev/null
+++ b/flink-connector-oceanbase-cdc/src/test/resources/ddl/inventory.sql
@@ -0,0 +1,42 @@
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements. See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership. The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License. You may obtain a copy of the License at
+-- http://www.apache.org/licenses/LICENSE-2.0
+-- Unless required by applicable law or agreed to in writing,
+-- software distributed under the License is distributed on an
+-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+-- KIND, either express or implied. See the License for the
+-- specific language governing permissions and limitations
+-- under the License.
+
+-- ----------------------------------------------------------------------------------------------------------------
+-- DATABASE: inventory
+-- ----------------------------------------------------------------------------------------------------------------
+
+CREATE DATABASE inventory;
+USE inventory;
+
+-- Create and populate our products using a single insert with many rows
+CREATE TABLE products
+(
+ id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
+ name VARCHAR(255) NOT NULL DEFAULT 'flink',
+ description VARCHAR(512),
+ weight DECIMAL(20, 10)
+);
+ALTER TABLE products AUTO_INCREMENT = 101;
+
+INSERT INTO products
+VALUES (default, "scooter", "Small 2-wheel scooter", 3.14),
+ (default, "car battery", "12V car battery", 8.1),
+ (default, "12-pack drill bits", "12-pack of drill bits with sizes ranging from #40 to #3", 0.8),
+ (default, "hammer", "12oz carpenter's hammer", 0.75),
+ (default, "hammer", "14oz carpenter's hammer", 0.875),
+ (default, "hammer", "16oz carpenter's hammer", 1.0),
+ (default, "rocks", "box of assorted rocks", 5.3),
+ (default, "jacket", "water resistent black wind breaker", 0.1),
+ (default, "spare tire", "24 inch spare tire", 22.2);
diff --git a/flink-connector-oceanbase-cdc/src/test/resources/ddl/inventory_meta.sql b/flink-connector-oceanbase-cdc/src/test/resources/ddl/inventory_meta.sql
new file mode 100644
index 000000000..a53f3e61a
--- /dev/null
+++ b/flink-connector-oceanbase-cdc/src/test/resources/ddl/inventory_meta.sql
@@ -0,0 +1,42 @@
+-- Licensed to the Apache Software Foundation (ASF) under one
+-- or more contributor license agreements. See the NOTICE file
+-- distributed with this work for additional information
+-- regarding copyright ownership. The ASF licenses this file
+-- to you under the Apache License, Version 2.0 (the
+-- "License"); you may not use this file except in compliance
+-- with the License. You may obtain a copy of the License at
+-- http://www.apache.org/licenses/LICENSE-2.0
+-- Unless required by applicable law or agreed to in writing,
+-- software distributed under the License is distributed on an
+-- "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+-- KIND, either express or implied. See the License for the
+-- specific language governing permissions and limitations
+-- under the License.
+
+-- ----------------------------------------------------------------------------------------------------------------
+-- DATABASE: inventory_meta
+-- ----------------------------------------------------------------------------------------------------------------
+
+CREATE DATABASE inventory_meta;
+USE inventory_meta;
+
+-- Create and populate our products using a single insert with many rows
+CREATE TABLE products
+(
+ id INTEGER NOT NULL AUTO_INCREMENT PRIMARY KEY,
+ name VARCHAR(255) NOT NULL DEFAULT 'flink',
+ description VARCHAR(512),
+ weight DECIMAL(20, 10)
+);
+ALTER TABLE products AUTO_INCREMENT = 101;
+
+INSERT INTO products
+VALUES (default, "scooter", "Small 2-wheel scooter", 3.14),
+ (default, "car battery", "12V car battery", 8.1),
+ (default, "12-pack drill bits", "12-pack of drill bits with sizes ranging from #40 to #3", 0.8),
+ (default, "hammer", "12oz carpenter's hammer", 0.75),
+ (default, "hammer", "14oz carpenter's hammer", 0.875),
+ (default, "hammer", "16oz carpenter's hammer", 1.0),
+ (default, "rocks", "box of assorted rocks", 5.3),
+ (default, "jacket", "water resistent black wind breaker", 0.1),
+ (default, "spare tire", "24 inch spare tire", 22.2);
diff --git a/flink-connector-oceanbase-cdc/src/test/resources/log4j2-test.properties b/flink-connector-oceanbase-cdc/src/test/resources/log4j2-test.properties
new file mode 100644
index 000000000..b82a9606d
--- /dev/null
+++ b/flink-connector-oceanbase-cdc/src/test/resources/log4j2-test.properties
@@ -0,0 +1,28 @@
+################################################################################
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+################################################################################
+
+# Set root logger level to OFF to not flood build logs
+# set manually to INFO for debugging purposes
+rootLogger.level=INFO
+rootLogger.appenderRef.test.ref = TestLogger
+
+appender.testlogger.name = TestLogger
+appender.testlogger.type = CONSOLE
+appender.testlogger.target = SYSTEM_ERR
+appender.testlogger.layout.type = PatternLayout
+appender.testlogger.layout.pattern = %-4r [%t] %-5p %c - %m%n
diff --git a/flink-sql-connector-oceanbase-cdc/pom.xml b/flink-sql-connector-oceanbase-cdc/pom.xml
new file mode 100644
index 000000000..5fc948a8f
--- /dev/null
+++ b/flink-sql-connector-oceanbase-cdc/pom.xml
@@ -0,0 +1,121 @@
+
+
+
+
+ flink-cdc-connectors
+ com.ververica
+ 2.2-SNAPSHOT
+
+ 4.0.0
+
+ flink-sql-connector-oceanbase-cdc
+
+
+
+ com.ververica
+ flink-connector-oceanbase-cdc
+ ${project.version}
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 3.2.4
+
+
+ shade-flink
+ package
+
+ shade
+
+
+ false
+
+
+ io.debezium:debezium-api
+ io.debezium:debezium-embedded
+ io.debezium:debezium-core
+ com.ververica:flink-connector-debezium
+ com.ververica:flink-connector-oceanbase-cdc
+ mysql:mysql-connector-java
+ com.oceanbase.logclient:*
+ io.netty:netty-all
+ com.google.protobuf:protobuf-java
+ commons-codec:commons-codec
+ org.lz4:lz4-java
+ org.apache.avro:avro
+ org.apache.commons:*
+ org.apache.kafka:*
+ com.fasterxml.*:*
+ com.google.guava:*
+
+ org.apache.flink:flink-shaded-guava
+
+
+
+
+ org.apache.kafka:*
+
+ kafka/kafka-version.properties
+ LICENSE
+
+ NOTICE
+ common/**
+
+
+
+
+
+ org.apache.kafka
+
+ com.ververica.cdc.connectors.shaded.org.apache.kafka
+
+
+
+ org.apache.avro
+
+ com.ververica.cdc.connectors.shaded.org.apache.avro
+
+
+
+ com.fasterxml
+
+ com.ververica.cdc.connectors.shaded.com.fasterxml
+
+
+
+ com.google
+
+ com.ververica.cdc.connectors.shaded.com.google
+
+
+
+
+
+
+
+
+
+
diff --git a/flink-sql-connector-oceanbase-cdc/src/main/java/com/ververica/cdc/connectors/oceanbase/DummyDocs.java b/flink-sql-connector-oceanbase-cdc/src/main/java/com/ververica/cdc/connectors/oceanbase/DummyDocs.java
new file mode 100644
index 000000000..ae8ed7569
--- /dev/null
+++ b/flink-sql-connector-oceanbase-cdc/src/main/java/com/ververica/cdc/connectors/oceanbase/DummyDocs.java
@@ -0,0 +1,22 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.ververica.cdc.connectors.oceanbase;
+
+/** This is used to generate a dummy docs jar for this module to pass OSS repository rule. */
+public class DummyDocs {}
diff --git a/flink-sql-connector-oceanbase-cdc/src/main/resources/META-INF/NOTICE b/flink-sql-connector-oceanbase-cdc/src/main/resources/META-INF/NOTICE
new file mode 100644
index 000000000..3b14d567b
--- /dev/null
+++ b/flink-sql-connector-oceanbase-cdc/src/main/resources/META-INF/NOTICE
@@ -0,0 +1,6 @@
+flink-sql-connector-oceanbase-cdc
+Copyright 2020 Ververica Inc.
+
+This project bundles the following dependencies under the Apache Software License 2.0. (http://www.apache.org/licenses/LICENSE-2.0.txt)
+
+- org.apache.kafka:kafka-clients:2.7.0
diff --git a/pom.xml b/pom.xml
index 088b66a7b..ca7f081a6 100644
--- a/pom.xml
+++ b/pom.xml
@@ -39,12 +39,14 @@ under the License.
flink-connector-postgres-cdc
flink-connector-oracle-cdc
flink-connector-mongodb-cdc
+ flink-connector-oceanbase-cdc
flink-connector-sqlserver-cdc
flink-connector-tidb-cdc
flink-sql-connector-mysql-cdc
flink-sql-connector-postgres-cdc
flink-sql-connector-mongodb-cdc
flink-sql-connector-oracle-cdc
+ flink-sql-connector-oceanbase-cdc
flink-sql-connector-sqlserver-cdc
flink-sql-connector-tidb-cdc
flink-cdc-e2e-tests
@@ -88,6 +90,7 @@ under the License.
1.7.15
2.17.1
2.4.2
+ 1.0.2
1
true