[build][oceanbase] Use self-hosted agent for oceanbase cdc tests
parent
28b1b07661
commit
713746746a
@ -0,0 +1,121 @@
|
||||
# Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
# contributor license agreements. See the NOTICE file distributed with
|
||||
# this work for additional information regarding copyright ownership.
|
||||
# The ASF licenses this file to You under the Apache License, Version 2.0
|
||||
# (the "License"); you may not use this file except in compliance with
|
||||
# the License. You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
|
||||
parameters:
|
||||
test_pool_definition: # defines the hardware pool for compilation and unit test execution.
|
||||
stage_name: # defines a unique identifier for all jobs in a stage (in case the jobs are added multiple times to a stage)
|
||||
run_end_to_end: # if set to 'true', the end to end tests will be executed
|
||||
jdk: # the jdk version to use
|
||||
|
||||
jobs:
|
||||
- job: compile_${{parameters.stage_name}}
|
||||
# succeeded() is needed to allow job cancellation
|
||||
condition: and(succeeded(), not(eq(variables['MODE'], 'e2e')))
|
||||
pool: ${{parameters.test_pool_definition}}
|
||||
timeoutInMinutes: 240
|
||||
cancelTimeoutInMinutes: 1
|
||||
workspace:
|
||||
clean: all # this cleans the entire workspace directory before running a new job
|
||||
# It is necessary because the custom build machines are reused for tests.
|
||||
# See also https://docs.microsoft.com/en-us/azure/devops/pipelines/process/phases?view=azure-devops&tabs=yaml#workspace
|
||||
|
||||
steps:
|
||||
# The cache task is persisting the .m2 directory between builds, so that
|
||||
# we do not have to re-download all dependencies from maven central for
|
||||
# each build. The hope is that downloading the cache is faster than
|
||||
# all dependencies individually.
|
||||
# In this configuration, we use a hash over all committed (not generated) .pom files
|
||||
# as a key for the build cache (CACHE_KEY). If we have a cache miss on the hash
|
||||
# (usually because a pom file has changed), we'll fall back to a key without
|
||||
# the pom files (CACHE_FALLBACK_KEY).
|
||||
# Offical documentation of the Cache task: https://docs.microsoft.com/en-us/azure/devops/pipelines/caching/?view=azure-devops
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
key: $(CACHE_KEY)
|
||||
restoreKeys: $(CACHE_FALLBACK_KEY)
|
||||
path: $(MAVEN_CACHE_FOLDER)
|
||||
continueOnError: true # continue the build even if the cache fails.
|
||||
displayName: Cache Maven local repo
|
||||
- script: |
|
||||
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_${{parameters.jdk}}_X64"
|
||||
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_${{parameters.jdk}}_X64/bin:$PATH"
|
||||
displayName: "Set JDK"
|
||||
# Compile
|
||||
- script: |
|
||||
./tools/ci/compile.sh || exit $?
|
||||
./tools/azure-pipelines/create_build_artifact.sh
|
||||
displayName: Compile
|
||||
|
||||
# upload artifacts for next stage
|
||||
- task: PublishPipelineArtifact@1
|
||||
inputs:
|
||||
targetPath: $(FLINK_ARTIFACT_DIR)
|
||||
artifact: FlinkCompileArtifact-${{parameters.stage_name}}
|
||||
|
||||
- job: test_${{parameters.stage_name}}
|
||||
dependsOn: compile_${{parameters.stage_name}}
|
||||
condition: and(succeeded(), not(eq(variables['MODE'], 'e2e')))
|
||||
pool: ${{parameters.test_pool_definition}}
|
||||
timeoutInMinutes: 240
|
||||
cancelTimeoutInMinutes: 1
|
||||
workspace:
|
||||
clean: all
|
||||
strategy:
|
||||
matrix:
|
||||
oceanbase:
|
||||
module: oceanbase
|
||||
steps:
|
||||
# download artifact from compile stage
|
||||
- task: DownloadPipelineArtifact@2
|
||||
inputs:
|
||||
path: $(FLINK_ARTIFACT_DIR)
|
||||
artifact: FlinkCompileArtifact-${{parameters.stage_name}}
|
||||
|
||||
- script: ./tools/azure-pipelines/unpack_build_artifact.sh
|
||||
displayName: "Unpack Build artifact"
|
||||
|
||||
- task: Cache@2
|
||||
inputs:
|
||||
key: $(CACHE_KEY)
|
||||
restoreKeys: $(CACHE_FALLBACK_KEY)
|
||||
path: $(MAVEN_CACHE_FOLDER)
|
||||
continueOnError: true # continue the build even if the cache fails.
|
||||
condition: not(eq('${{parameters.test_pool_definition.name}}', 'Default'))
|
||||
displayName: Cache Maven local repo
|
||||
|
||||
- script: |
|
||||
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_${{parameters.jdk}}_X64"
|
||||
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_${{parameters.jdk}}_X64/bin:$PATH"
|
||||
displayName: "Set JDK"
|
||||
|
||||
- script: sudo sysctl -w kernel.core_pattern=core.%p
|
||||
displayName: Set coredump pattern
|
||||
|
||||
# Test
|
||||
- script: ./tools/azure-pipelines/uploading_watchdog.sh ./tools/ci/test_controller.sh $(module)
|
||||
displayName: Test - $(module)
|
||||
|
||||
- task: PublishTestResults@2
|
||||
condition: succeededOrFailed()
|
||||
inputs:
|
||||
testResultsFormat: 'JUnit'
|
||||
|
||||
# upload debug artifacts
|
||||
- task: PublishPipelineArtifact@1
|
||||
condition: not(eq('$(DEBUG_FILES_OUTPUT_DIR)', ''))
|
||||
displayName: Upload Logs
|
||||
inputs:
|
||||
targetPath: $(DEBUG_FILES_OUTPUT_DIR)
|
||||
artifact: logs-${{parameters.stage_name}}-$(DEBUG_FILES_NAME)
|
Loading…
Reference in New Issue