[FLINK-37124][build] Simplify logs in test cases to avoid flooding GHA outputs

This closes  #3860
pull/3863/head
yuxiqian 2 weeks ago committed by GitHub
parent b5d9673258
commit e2b8f70e0d
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194

@ -11,9 +11,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level = INFO
rootLogger.level = ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level = OFF
rootLogger.level = ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -47,6 +47,8 @@ import org.junit.jupiter.api.Timeout;
import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
@ -65,6 +67,9 @@ import static org.apache.flink.configuration.CoreOptions.ALWAYS_PARENT_FIRST_LOA
@Timeout(value = 600, unit = java.util.concurrent.TimeUnit.SECONDS)
class FlinkParallelizedPipelineITCase {
private static final Logger LOG =
LoggerFactory.getLogger(FlinkParallelizedPipelineITCase.class);
private static final int MAX_PARALLELISM = 4;
private static final int UPSTREAM_TABLE_COUNT = 4;
private static final List<RouteDef> ROUTING_RULES;
@ -144,11 +149,11 @@ class FlinkParallelizedPipelineITCase {
@AfterEach
void cleanup() {
System.setOut(standardOut);
System.out.println(
LOG.debug(
"NOTICE: This is a semi-fuzzy test. Please also check if value sink prints expected events:");
System.out.println("================================");
System.out.print(outCaptor);
System.out.println("================================");
LOG.debug("================================");
LOG.debug(outCaptor.toString());
LOG.debug("================================");
outCaptor.reset();
}

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -86,7 +86,7 @@ public class Elasticsearch6DataSinkITCaseTest {
@Test
public void testElasticsearchSink() throws Exception {
TableId tableId = TableId.tableId("default", "schema", "table");
List<Event> events = ElasticsearchTestUtils.createTestEvents(tableId); // 使用工具类
List<Event> events = ElasticsearchTestUtils.createTestEvents(tableId);
runJobWithEvents(events);
@ -121,7 +121,7 @@ public class Elasticsearch6DataSinkITCaseTest {
@Test
public void testElasticsearchInsertAndDelete() throws Exception {
TableId tableId = TableId.tableId("default", "schema", "table");
List<Event> events = ElasticsearchTestUtils.createTestEventsWithDelete(tableId); // 使用工具类
List<Event> events = ElasticsearchTestUtils.createTestEventsWithDelete(tableId);
runJobWithEvents(events);
@ -131,7 +131,7 @@ public class Elasticsearch6DataSinkITCaseTest {
@Test
public void testElasticsearchAddColumn() throws Exception {
TableId tableId = TableId.tableId("default", "schema", "table");
List<Event> events = ElasticsearchTestUtils.createTestEventsWithAddColumn(tableId); // 使用工具类
List<Event> events = ElasticsearchTestUtils.createTestEventsWithAddColumn(tableId);
runJobWithEvents(events);

@ -92,7 +92,7 @@ public class Elasticsearch7DataSinkITCaseTest {
@Test
public void testElasticsearchSink() throws Exception {
TableId tableId = TableId.tableId("default", "schema", "table");
List<Event> events = ElasticsearchTestUtils.createTestEvents(tableId); // 使用工具类
List<Event> events = ElasticsearchTestUtils.createTestEvents(tableId);
runJobWithEvents(events);
@ -127,7 +127,7 @@ public class Elasticsearch7DataSinkITCaseTest {
@Test
public void testElasticsearchInsertAndDelete() throws Exception {
TableId tableId = TableId.tableId("default", "schema", "table");
List<Event> events = ElasticsearchTestUtils.createTestEventsWithDelete(tableId); // 使用工具类
List<Event> events = ElasticsearchTestUtils.createTestEventsWithDelete(tableId);
runJobWithEvents(events);
@ -137,7 +137,7 @@ public class Elasticsearch7DataSinkITCaseTest {
@Test
public void testElasticsearchAddColumn() throws Exception {
TableId tableId = TableId.tableId("default", "schema", "table");
List<Event> events = ElasticsearchTestUtils.createTestEventsWithAddColumn(tableId); // 使用工具类
List<Event> events = ElasticsearchTestUtils.createTestEventsWithAddColumn(tableId);
runJobWithEvents(events);

@ -1,18 +1,20 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* contributor license agreements. See the NOTICE file distributed with
* this work for additional regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.flink.cdc.connectors.elasticsearch.sink;
@ -29,6 +31,8 @@ import org.apache.flink.shaded.guava31.com.google.common.collect.ImmutableMap;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.util.HashMap;
import java.util.List;
@ -38,6 +42,9 @@ import java.util.stream.Collectors;
/** Tests for {@link ElasticsearchDataSinkFactory}. */
public class ElasticsearchDataSinkFactoryTest {
private static final Logger LOG =
LoggerFactory.getLogger(ElasticsearchDataSinkFactoryTest.class);
private static final String ELASTICSEARCH_IDENTIFIER = "elasticsearch";
/** Tests the creation of an Elasticsearch DataSink with valid configuration. */
@ -57,15 +64,11 @@ public class ElasticsearchDataSinkFactoryTest {
DataSinkFactory sinkFactory = getElasticsearchDataSinkFactory();
List<String> requiredKeys = getRequiredKeys(sinkFactory);
for (String requiredKey : requiredKeys) {
// 创建一个新的配置 Map包含所有必需选项
Map<String, String> options = new HashMap<>(createValidOptions());
// 移除当前正在测试的必需选项
options.remove(requiredKey);
Configuration conf = Configuration.fromMap(options);
// 打印日志以确保我们在测试缺少必需选项的情况
System.out.println("Testing missing required option: " + requiredKey);
LOG.info("Testing missing required option: {}", requiredKey);
// 添加创建 DataSink 对象的代码
Assertions.assertThatThrownBy(() -> createDataSink(sinkFactory, conf))
// Assertions to check for missing required option
@ -92,8 +95,7 @@ public class ElasticsearchDataSinkFactoryTest {
.put("unsupported_key", "unsupported_value")
.build());
// 打印日志以确保我们在测试不受支持的选项
System.out.println("Testing unsupported option");
LOG.info("Testing unsupported option");
// Assertions to check for unsupported options
Assertions.assertThatThrownBy(() -> createDataSink(sinkFactory, conf))
@ -121,8 +123,7 @@ public class ElasticsearchDataSinkFactoryTest {
.put("version", "7") // Added version to the test configuration
.build());
// 打印日志以确保我们在测试带前缀的必需选项
System.out.println("Testing prefixed required option");
LOG.info("Testing prefixed required option");
DataSink dataSink = createDataSink(sinkFactory, conf);
Assertions.assertThat(dataSink).isInstanceOf(ElasticsearchDataSink.class);

@ -94,7 +94,7 @@ public class ElasticsearchDataSinkITCaseTest {
@Test
public void testElasticsearchSink() throws Exception {
TableId tableId = TableId.tableId("default", "schema", "table");
List<Event> events = ElasticsearchTestUtils.createTestEvents(tableId); // 使用工具类
List<Event> events = ElasticsearchTestUtils.createTestEvents(tableId);
runJobWithEvents(events);
@ -129,7 +129,7 @@ public class ElasticsearchDataSinkITCaseTest {
@Test
public void testElasticsearchInsertAndDelete() throws Exception {
TableId tableId = TableId.tableId("default", "schema", "table");
List<Event> events = ElasticsearchTestUtils.createTestEventsWithDelete(tableId); // 使用工具类
List<Event> events = ElasticsearchTestUtils.createTestEventsWithDelete(tableId);
runJobWithEvents(events);
@ -139,7 +139,7 @@ public class ElasticsearchDataSinkITCaseTest {
@Test
public void testElasticsearchAddColumn() throws Exception {
TableId tableId = TableId.tableId("default", "schema", "table");
List<Event> events = ElasticsearchTestUtils.createTestEventsWithAddColumn(tableId); // 使用工具类
List<Event> events = ElasticsearchTestUtils.createTestEventsWithAddColumn(tableId);
runJobWithEvents(events);

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level = INFO
rootLogger.level = ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -14,9 +14,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -22,7 +22,10 @@ import org.apache.flink.cdc.connectors.maxcompute.utils.MaxComputeUtils;
import com.aliyun.odps.Odps;
import org.junit.ClassRule;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.utility.DockerImageName;
@ -34,6 +37,9 @@ import java.net.UnknownHostException;
/** init maxcompute-emulator use for e2e test. */
public class EmulatorTestBase {
private static final Logger LOG = LoggerFactory.getLogger(EmulatorTestBase.class);
public static final DockerImageName MAXCOMPUTE_IMAGE =
DockerImageName.parse("maxcompute/maxcompute-emulator:v0.0.4");
@ -43,7 +49,7 @@ public class EmulatorTestBase {
.withExposedPorts(8080)
.waitingFor(
Wait.forLogMessage(".*Started MaxcomputeEmulatorApplication.*\\n", 1))
.withLogConsumer(frame -> System.out.print(frame.getUtf8String()));
.withLogConsumer(new Slf4jLogConsumer(LOG));
public final MaxComputeOptions testOptions =
MaxComputeOptions.builder("ak", "sk", getEndpoint(), "mocked_mc").build();

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=OFF
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=OFF
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=OFF
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -28,7 +28,7 @@ public final class StartupOptions {
/**
* Performs an initial snapshot on the monitored database tables upon first startup, and
* continue to read change events from the databases redo logs.
* continue to read change events from the database's redo logs.
*/
public static StartupOptions initial() {
return new StartupOptions(StartupMode.INITIAL);

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -44,7 +44,7 @@ import java.util.concurrent.TimeUnit;
/**
* Debezium converts the datetime type in MySQL into a UTC timestamp by default ({@link
* io.debezium.time.Timestamp} )The time zone is hard-coded and cannot be changed. causing
* io.debezium.time.Timestamp} ), The time zone is hard-coded and cannot be changed. causing
* conversion errors part of the time Enable this converter to convert the four times "DATE",
* "DATETIME", "TIME", and "TIMESTAMP" into the format corresponding to the configured time zone
* (for example, yyyy-MM-dd)

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -37,6 +37,7 @@ import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.utility.DockerImageName;
@ -64,7 +65,7 @@ public class MaxComputeE2eITCase extends PipelineTestEnvironment {
.withExposedPorts(8080)
.waitingFor(
Wait.forLogMessage(".*Started MaxcomputeEmulatorApplication.*\\n", 1))
.withLogConsumer(frame -> System.out.print(frame.getUtf8String()));
.withLogConsumer(new Slf4jLogConsumer(LOG));
public final MaxComputeOptions testOptions =
MaxComputeOptions.builder("ak", "sk", getEndpoint(), "mocked_mc")
@ -80,7 +81,7 @@ public class MaxComputeE2eITCase extends PipelineTestEnvironment {
"select * from table1 order by col1;");
instance.waitForSuccess();
List<Record> result = SQLTask.getResult(instance);
System.out.println(result);
LOG.info("{}", result);
Assert.assertEquals(2, result.size());
// 2,x
Assert.assertEquals("2", result.get(0).get(0));

@ -14,9 +14,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -186,7 +186,8 @@ public abstract class FlinkContainerTestEnvironment extends TestLogger {
dockerClient.removeContainerCmd(container.getId()).exec();
});
// List all images and remove the ones that are not flink、mysql、testcontainers related.
// List all images and remove the ones that are not Flink, MySQL, and TestContainers
// related.
dockerClient.listImagesCmd().exec().stream()
.filter(
image ->

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level=INFO
rootLogger.level=ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

@ -38,6 +38,8 @@ import org.apache.commons.lang3.SerializationException;
import org.junit.Assert;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@ -65,6 +67,8 @@ import static org.junit.Assert.fail;
@ExtendWith(TestLoggerExtension.class)
public abstract class SerializerTestBase<T> {
private static final Logger LOG = LoggerFactory.getLogger(SerializerTestBase.class);
private final DeeplyEqualsChecker checker;
protected SerializerTestBase() {
@ -123,8 +127,7 @@ public abstract class SerializerTestBase<T> {
+ instance.getClass());
}
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -181,8 +184,7 @@ public abstract class SerializerTestBase<T> {
TypeSerializer<T> serializer = getSerializer();
assertEquals(len, serializer.getLength());
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -199,8 +201,7 @@ public abstract class SerializerTestBase<T> {
deepEquals("Copied element is not equal to the original element.", datum, copy);
}
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -217,8 +218,7 @@ public abstract class SerializerTestBase<T> {
deepEquals("Copied element is not equal to the original element.", datum, copy);
}
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -238,8 +238,7 @@ public abstract class SerializerTestBase<T> {
target = copy;
}
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -265,8 +264,7 @@ public abstract class SerializerTestBase<T> {
assertTrue("Trailing data available after deserialization.", in.available() == 0);
}
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -296,8 +294,7 @@ public abstract class SerializerTestBase<T> {
reuseValue = deserialized;
}
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -326,8 +323,7 @@ public abstract class SerializerTestBase<T> {
assertEquals("Wrong number of elements deserialized.", testData.length, num);
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -358,8 +354,7 @@ public abstract class SerializerTestBase<T> {
assertEquals("Wrong number of elements deserialized.", testData.length, num);
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -392,8 +387,7 @@ public abstract class SerializerTestBase<T> {
toVerify.available() == 0);
}
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -428,8 +422,7 @@ public abstract class SerializerTestBase<T> {
assertEquals("Wrong number of elements copied.", testData.length, num);
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -449,8 +442,7 @@ public abstract class SerializerTestBase<T> {
assertEquals(
"The copy of the serializer is not equal to the original one.", ser1, ser2);
} catch (Exception e) {
System.err.println(e.getMessage());
e.printStackTrace();
LOG.error("", e);
fail("Exception in test: " + e.getMessage());
}
}
@ -461,8 +453,7 @@ public abstract class SerializerTestBase<T> {
try {
NullableSerializer.checkIfNullSupported(serializer);
} catch (Throwable t) {
System.err.println(t.getMessage());
t.printStackTrace();
LOG.error("", t);
fail("Unexpected failure of null value handling: " + t.getMessage());
}
}

@ -28,6 +28,8 @@ import org.apache.flink.core.memory.DataOutputView;
import org.apache.flink.core.memory.DataOutputViewStreamWrapper;
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
@ -40,6 +42,9 @@ import static org.junit.jupiter.api.Assertions.assertNotNull;
/** A test for the {@link DataTypeSerializer}. */
public class DataTypeSerializerTest extends SerializerTestBase<DataType> {
private static final Logger LOG = LoggerFactory.getLogger(DataTypeSerializerTest.class);
@Override
protected TypeSerializer<DataType> createSerializer() {
return new DataTypeSerializer();
@ -104,12 +109,12 @@ public class DataTypeSerializerTest extends SerializerTestBase<DataType> {
// Log to ensure INSTANCE is initialized
assertNotNull(RowTypeSerializer.INSTANCE, "RowTypeSerializer.INSTANCE is null");
System.out.println("RowTypeSerializer.INSTANCE is initialized");
LOG.info("RowTypeSerializer.INSTANCE is initialized");
// Copy the RowType
RowType copiedRow = (RowType) serializer.copy(outerRowType);
assertNotNull(copiedRow, "Copied RowType is null");
System.out.println("Copied RowType: " + copiedRow);
LOG.info("Copied RowType: {}", copiedRow);
// Serialize the RowType
ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
@ -126,6 +131,6 @@ public class DataTypeSerializerTest extends SerializerTestBase<DataType> {
assertNotNull(deserializedRow, "Deserialized RowType is null");
assertEquals(
outerRowType, deserializedRow, "Deserialized RowType does not match the original");
System.out.println("Deserialized RowType: " + deserializedRow);
LOG.info("Deserialized RowType: {}", deserializedRow);
}
}

@ -13,9 +13,9 @@
# limitations under the License.
################################################################################
# Set root logger level to OFF to not flood build logs
# Set root logger level to ERROR to not flood build logs
# set manually to INFO for debugging purposes
rootLogger.level = INFO
rootLogger.level = ERROR
rootLogger.appenderRef.test.ref = TestLogger
appender.testlogger.name = TestLogger

Loading…
Cancel
Save