You cannot select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
376 lines
14 KiB
YAML
376 lines
14 KiB
YAML
# Licensed to the Apache Software Foundation (ASF) under one or more
|
|
# contributor license agreements. See the NOTICE file distributed with
|
|
# this work for additional information regarding copyright ownership.
|
|
# The ASF licenses this file to You under the Apache License, Version 2.0
|
|
# (the "License"); you may not use this file except in compliance with
|
|
# the License. You may obtain a copy of the License at
|
|
#
|
|
# http://www.apache.org/licenses/LICENSE-2.0
|
|
#
|
|
# Unless required by applicable law or agreed to in writing, software
|
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
# See the License for the specific language governing permissions and
|
|
# limitations under the License.
|
|
|
|
name: Flink CDC CI
|
|
on:
|
|
push:
|
|
branches:
|
|
- master
|
|
- release-*
|
|
paths-ignore:
|
|
- 'docs/**'
|
|
- 'README.md'
|
|
pull_request:
|
|
branches:
|
|
- master
|
|
- release-*
|
|
paths-ignore:
|
|
- 'docs/**'
|
|
- 'README.md'
|
|
|
|
# Concurrency strategy:
|
|
# github.workflow: distinguish this workflow from others
|
|
# github.event_name: distinguish `push` event from `pull_request` event
|
|
# github.event.number: set to the number of the pull request if `pull_request` event
|
|
# github.run_id: otherwise, it's a `push` or `schedule` event, only cancel if we rerun the workflow
|
|
#
|
|
# Reference:
|
|
# https://docs.github.com/en/actions/using-jobs/using-concurrency
|
|
# https://docs.github.com/en/actions/learn-github-actions/contexts#github-context
|
|
concurrency:
|
|
group: ${{ github.workflow }}-${{ github.event_name }}-${{ github.event.number || github.run_id }}
|
|
cancel-in-progress: true
|
|
|
|
env:
|
|
MODULES_CORE: "\
|
|
flink-cdc-cli,\
|
|
flink-cdc-common,\
|
|
flink-cdc-composer,\
|
|
flink-cdc-runtime"
|
|
|
|
MODULES_PIPELINE_CONNECTORS: "\
|
|
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-values,\
|
|
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-mysql,\
|
|
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-doris,\
|
|
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-starrocks,\
|
|
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-kafka,\
|
|
flink-cdc-connect/flink-cdc-pipeline-connectors/flink-cdc-pipeline-connector-paimon"
|
|
|
|
MODULES_MYSQL: "\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mysql-cdc,\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-mysql-cdc"
|
|
|
|
MODULES_POSTGRES: "\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-postgres-cdc,\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-postgres-cdc"
|
|
|
|
MODULES_ORACLE: "\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oracle-cdc,\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-oracle-cdc"
|
|
|
|
MODULES_MONGODB: "\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-mongodb-cdc,\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-mongodb-cdc"
|
|
|
|
MODULES_SQLSERVER: "\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-sqlserver-cdc,\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-sqlserver-cdc"
|
|
|
|
MODULES_TIDB: "\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-tidb-cdc,\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-tidb-cdc"
|
|
|
|
MODULES_OCEANBASE: "\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-oceanbase-cdc,\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-oceanbase-cdc"
|
|
|
|
MODULES_DB2: "\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-db2-cdc,\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-db2-cdc"
|
|
|
|
MODULES_VITESS: "\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-connector-vitess-cdc,\
|
|
flink-cdc-connect/flink-cdc-source-connectors/flink-sql-connector-vitess-cdc"
|
|
|
|
MODULES_PIPELINE_E2E: "\
|
|
flink-cdc-e2e-tests/flink-cdc-pipeline-e2e-tests"
|
|
|
|
MODULES_SOURCE_E2E: "\
|
|
flink-cdc-e2e-tests/flink-cdc-source-e2e-tests"
|
|
|
|
jobs:
|
|
license_check:
|
|
runs-on: ubuntu-latest
|
|
steps:
|
|
- name: Check out repository code
|
|
uses: actions/checkout@v4
|
|
with:
|
|
submodules: true
|
|
- name: Set up Ruby environment
|
|
uses: ruby/setup-ruby@v1
|
|
with:
|
|
ruby-version: '3.3'
|
|
- name: Compiling jar packages
|
|
run: mvn --no-snapshot-updates -B package -DskipTests
|
|
- name: Run license check
|
|
run: gem install rubyzip -v 2.3.0 && ./tools/ci/license_check.rb
|
|
|
|
compile_and_test:
|
|
needs: license_check
|
|
# Only run the CI pipeline for the flink-cdc-connectors repository
|
|
# if: github.repository == 'apache/flink-cdc-connectors'
|
|
runs-on: ubuntu-latest
|
|
strategy:
|
|
matrix:
|
|
jdk: [ 8 ]
|
|
module: [ "core",
|
|
"pipeline_connectors",
|
|
"mysql",
|
|
"postgres",
|
|
"oracle",
|
|
"mongodb",
|
|
"sqlserver",
|
|
"tidb",
|
|
"oceanbase",
|
|
"db2",
|
|
"vitess",
|
|
"pipeline_e2e",
|
|
"source_e2e"
|
|
]
|
|
timeout-minutes: 120
|
|
env:
|
|
MVN_COMMON_OPTIONS: -Dmaven.wagon.http.pool=false \
|
|
-Dorg.slf4j.simpleLogger.showDateTime=true \
|
|
-Dorg.slf4j.simpleLogger.dateTimeFormat=HH:mm:ss.SSS \
|
|
-Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn \
|
|
--no-snapshot-updates -B \
|
|
--settings /home/vsts/work/1/s/tools/ci/google-mirror-settings.xml \
|
|
-Dfast -Dlog.dir=/home/vsts/work/_temp/debug_files \
|
|
-Dlog4j.configurationFile=file:///home/vsts/work/1/s/tools/ci/log4j.properties
|
|
steps:
|
|
- run: echo "Running CI pipeline for JDK version ${{ matrix.jdk }}"
|
|
|
|
- name: Clean up disk space
|
|
run: |
|
|
set -euo pipefail
|
|
|
|
echo "Disk space before cleanup"
|
|
df -h
|
|
|
|
echo "Cleaning up disk space"
|
|
sudo rm -rf /usr/share/dotnet
|
|
sudo rm -rf /usr/local/lib/android
|
|
sudo rm -rf /opt/ghc
|
|
sudo rm -rf /opt/hostedtoolcache/CodeQL
|
|
sudo docker image prune --all --force
|
|
|
|
echo "Disk space after cleanup"
|
|
df -h
|
|
|
|
- name: Check out repository code
|
|
uses: actions/checkout@v4
|
|
with:
|
|
submodules: true
|
|
|
|
- name: Set JDK
|
|
uses: actions/setup-java@v4
|
|
with:
|
|
java-version: ${{ matrix.jdk }}
|
|
distribution: 'temurin'
|
|
cache: 'maven'
|
|
|
|
- name: Set Maven 3.8.6
|
|
uses: stCarolas/setup-maven@v5
|
|
with:
|
|
maven-version: 3.8.6
|
|
|
|
- name: Compile and test ${{ matrix.module }}
|
|
timeout-minutes: 90
|
|
run: |
|
|
set -o pipefail
|
|
|
|
case ${{ matrix.module }} in
|
|
("core")
|
|
modules=${{ env.MODULES_CORE }}
|
|
;;
|
|
("pipeline_connectors")
|
|
modules=${{ env.MODULES_PIPELINE_CONNECTORS }}
|
|
;;
|
|
("mysql")
|
|
modules=${{ env.MODULES_MYSQL }}
|
|
;;
|
|
("postgres")
|
|
modules=${{ env.MODULES_POSTGRES }}
|
|
;;
|
|
("oracle")
|
|
modules=${{ env.MODULES_ORACLE }}
|
|
;;
|
|
("mongodb")
|
|
modules=${{ env.MODULES_MONGODB }}
|
|
;;
|
|
("sqlserver")
|
|
modules=${{ env.MODULES_SQLSERVER }}
|
|
;;
|
|
("tidb")
|
|
modules=${{ env.MODULES_TIDB }}
|
|
;;
|
|
("oceanbase")
|
|
modules=${{ env.MODULES_OCEANBASE }}
|
|
;;
|
|
("db2")
|
|
modules=${{ env.MODULES_DB2 }}
|
|
;;
|
|
("vitess")
|
|
modules=${{ env.MODULES_VITESS }}
|
|
;;
|
|
("pipeline_e2e")
|
|
compile_modules="${{ env.MODULES_CORE }},${{ env.MODULES_PIPELINE_CONNECTORS }},${{ env.MODULES_MYSQL }},${{ env.MODULES_POSTGRES }},${{ env.MODULES_ORACLE }},${{ env.MODULES_MONGODB }},${{ env.MODULES_SQLSERVER }},${{ env.MODULES_TIDB }},${{ env.MODULES_OCEANBASE }},${{ env.MODULES_DB2 }},${{ env.MODULES_VITESS }},${{ env.MODULES_PIPELINE_E2E }}"
|
|
modules=${{ env.MODULES_PIPELINE_E2E }}
|
|
;;
|
|
("source_e2e")
|
|
compile_modules="${{ env.MODULES_CORE }},${{ env.MODULES_PIPELINE_CONNECTORS }},${{ env.MODULES_MYSQL }},${{ env.MODULES_POSTGRES }},${{ env.MODULES_ORACLE }},${{ env.MODULES_MONGODB }},${{ env.MODULES_SQLSERVER }},${{ env.MODULES_TIDB }},${{ env.MODULES_OCEANBASE }},${{ env.MODULES_DB2 }},${{ env.MODULES_VITESS }},${{ env.MODULES_SOURCE_E2E }}"
|
|
modules=${{ env.MODULES_SOURCE_E2E }}
|
|
;;
|
|
esac
|
|
|
|
if [ ${{ matrix.module }} != "pipeline_e2e" ] && [ ${{ matrix.module }} != "source_e2e" ]; then
|
|
compile_modules=$modules
|
|
fi
|
|
|
|
mvn --no-snapshot-updates -B -DskipTests -pl $compile_modules -am install && mvn --no-snapshot-updates -B -pl $modules verify
|
|
|
|
- name: Print JVM thread dumps when cancelled
|
|
if: ${{ failure() }}
|
|
run: |
|
|
# ----------------------------------------------------------------------------
|
|
# Copyright 2023 The Netty Project
|
|
#
|
|
# ----------------------------------------------------------------------------
|
|
# Source: https://github.com/netty/netty/blob/main/.github/actions/thread-dump-jvms/action.yml
|
|
echo "$OSTYPE"
|
|
if [[ "$OSTYPE" == "linux-gnu"* ]] && command -v sudo &> /dev/null; then
|
|
echo "Setting up JVM thread dumps"
|
|
# use jattach so that Java processes in docker containers are also covered
|
|
# download jattach
|
|
curl -s -L -o /tmp/jattach https://github.com/apangin/jattach/releases/download/v2.1/jattach
|
|
if command -v sha256sum &> /dev/null; then
|
|
# verify hash of jattach binary
|
|
sha256sum -c <(echo "07885fdc782e02e7302c6d190f54c3930afa10a38140365adf54076ec1086a8e /tmp/jattach") || exit 1
|
|
fi
|
|
chmod +x /tmp/jattach
|
|
for java_pid in $(sudo pgrep java); do
|
|
echo "----------------------- pid $java_pid -----------------------"
|
|
echo "command line: $(sudo cat /proc/$java_pid/cmdline | xargs -0 echo)"
|
|
sudo /tmp/jattach $java_pid jcmd VM.command_line || true
|
|
sudo /tmp/jattach $java_pid jcmd "Thread.print -l"
|
|
sudo /tmp/jattach $java_pid jcmd GC.heap_info || true
|
|
done
|
|
else
|
|
for java_pid in $(jps -q -J-XX:+PerfDisableSharedMem); do
|
|
echo "----------------------- pid $java_pid -----------------------"
|
|
jcmd $java_pid VM.command_line || true
|
|
jcmd $java_pid Thread.print -l
|
|
jcmd $java_pid GC.heap_info || true
|
|
done
|
|
fi
|
|
exit 0
|
|
|
|
|
|
migration_test_ut:
|
|
needs: license_check
|
|
runs-on: ubuntu-latest
|
|
steps:
|
|
- name: Check out repository code
|
|
uses: actions/checkout@v4
|
|
with:
|
|
submodules: true
|
|
- name: Compile snapshot CDC version
|
|
run: mvn --no-snapshot-updates -B install -DskipTests
|
|
- name: Run migration tests
|
|
run: cd flink-cdc-migration-tests && mvn clean verify
|
|
|
|
pipeline_migration_test:
|
|
needs: migration_test_ut
|
|
runs-on: ubuntu-latest
|
|
strategy:
|
|
matrix:
|
|
java-version: [ '8', '11' ]
|
|
|
|
steps:
|
|
- uses: actions/checkout@v4
|
|
- name: Set up Ruby
|
|
uses: ruby/setup-ruby@v1
|
|
with:
|
|
ruby-version: 3.0
|
|
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
|
- uses: actions/setup-java@v4
|
|
with:
|
|
java-version: ${{ matrix.java-version }}
|
|
distribution: temurin
|
|
cache: maven
|
|
- name: Install dependencies
|
|
run: gem install terminal-table
|
|
- name: Prepare CDC versions
|
|
run: CDC_SOURCE_HOME=$PWD ruby tools/mig-test/prepare_libs.rb
|
|
- name: Prepare Flink distro
|
|
run: wget https://dlcdn.apache.org/flink/flink-1.18.1/flink-1.18.1-bin-scala_2.12.tgz && tar -xzvf flink-1.18.1-bin-scala_2.12.tgz
|
|
working-directory: ./tools/mig-test
|
|
- name: Patch Flink configs
|
|
run: FLINK_HOME=./flink-1.18.1/ ruby misc/patch_flink_conf.rb
|
|
working-directory: ./tools/mig-test
|
|
- name: Start containers
|
|
run: cd conf && docker compose up -d
|
|
working-directory: ./tools/mig-test
|
|
- name: Run migration tests
|
|
run: FLINK_HOME=./flink-1.18.1/ ruby run_migration_test.rb
|
|
working-directory: ./tools/mig-test
|
|
- name: Stop containers
|
|
if: always()
|
|
run: cd conf && docker compose down
|
|
working-directory: ./tools/mig-test
|
|
|
|
data_stream_migration_test:
|
|
needs: migration_test_ut
|
|
runs-on: ubuntu-latest
|
|
strategy:
|
|
matrix:
|
|
java-version: [ '8', '11' ]
|
|
|
|
steps:
|
|
- uses: actions/checkout@v4
|
|
- name: Set up Ruby
|
|
uses: ruby/setup-ruby@v1
|
|
with:
|
|
ruby-version: 3.0
|
|
bundler-cache: true # runs 'bundle install' and caches installed gems automatically
|
|
- uses: actions/setup-java@v4
|
|
with:
|
|
java-version: ${{ matrix.java-version }}
|
|
distribution: temurin
|
|
cache: maven
|
|
- name: Install dependencies
|
|
run: gem install terminal-table
|
|
- name: Prepare CDC versions
|
|
run: CDC_SOURCE_HOME=$PWD ruby tools/mig-test/prepare_libs.rb
|
|
- name: Prepare Flink distro
|
|
run: wget https://dlcdn.apache.org/flink/flink-1.18.1/flink-1.18.1-bin-scala_2.12.tgz && tar -xzvf flink-1.18.1-bin-scala_2.12.tgz
|
|
working-directory: ./tools/mig-test
|
|
- name: Patch Flink configs
|
|
run: FLINK_HOME=./flink-1.18.1/ ruby misc/patch_flink_conf.rb
|
|
working-directory: ./tools/mig-test
|
|
- name: Compile Dummy DataStream Jobs
|
|
run: cd datastream && ruby compile_jobs.rb
|
|
working-directory: ./tools/mig-test
|
|
- name: Start containers
|
|
run: cd conf && docker compose up -d
|
|
working-directory: ./tools/mig-test
|
|
- name: Run migration tests
|
|
run: cd datastream && FLINK_HOME=../flink-1.18.1/ ruby run_migration_test.rb
|
|
working-directory: ./tools/mig-test
|
|
- name: Stop containers
|
|
if: always()
|
|
run: cd conf && docker compose down
|
|
working-directory: ./tools/mig-test
|