|
|
|
@ -66,14 +66,12 @@ jobs:
|
|
|
|
|
dependsOn: compile_${{parameters.stage_name}}
|
|
|
|
|
condition: and(succeeded(), not(eq(variables['MODE'], 'e2e')))
|
|
|
|
|
pool: ${{parameters.test_pool_definition}}
|
|
|
|
|
timeoutInMinutes: 70
|
|
|
|
|
timeoutInMinutes: 60
|
|
|
|
|
cancelTimeoutInMinutes: 1
|
|
|
|
|
workspace:
|
|
|
|
|
clean: all
|
|
|
|
|
strategy:
|
|
|
|
|
matrix:
|
|
|
|
|
mysql:
|
|
|
|
|
module: mysql
|
|
|
|
|
postgres:
|
|
|
|
|
module: postgres
|
|
|
|
|
oracle:
|
|
|
|
@ -86,8 +84,6 @@ jobs:
|
|
|
|
|
module: tidb
|
|
|
|
|
db2:
|
|
|
|
|
module: db2
|
|
|
|
|
e2e:
|
|
|
|
|
module: e2e
|
|
|
|
|
misc:
|
|
|
|
|
module: misc
|
|
|
|
|
steps:
|
|
|
|
@ -126,6 +122,64 @@ jobs:
|
|
|
|
|
inputs:
|
|
|
|
|
testResultsFormat: 'JUnit'
|
|
|
|
|
|
|
|
|
|
# upload debug artifacts
|
|
|
|
|
- task: PublishPipelineArtifact@1
|
|
|
|
|
condition: not(eq('$(DEBUG_FILES_OUTPUT_DIR)', ''))
|
|
|
|
|
displayName: Upload Logs
|
|
|
|
|
inputs:
|
|
|
|
|
targetPath: $(DEBUG_FILES_OUTPUT_DIR)
|
|
|
|
|
artifact: logs-${{parameters.stage_name}}-$(DEBUG_FILES_NAME)
|
|
|
|
|
|
|
|
|
|
- job: test_${{parameters.stage_name}}
|
|
|
|
|
dependsOn: compile_${{parameters.stage_name}}
|
|
|
|
|
condition: and(succeeded(), not(eq(variables['MODE'], 'e2e')))
|
|
|
|
|
pool: ${{parameters.test_pool_definition}}
|
|
|
|
|
timeoutInMinutes: 90
|
|
|
|
|
cancelTimeoutInMinutes: 1
|
|
|
|
|
workspace:
|
|
|
|
|
clean: all
|
|
|
|
|
strategy:
|
|
|
|
|
matrix:
|
|
|
|
|
mysql:
|
|
|
|
|
module: mysql
|
|
|
|
|
e2e:
|
|
|
|
|
module: e2e
|
|
|
|
|
steps:
|
|
|
|
|
# download artifact from compile stage
|
|
|
|
|
- task: DownloadPipelineArtifact@2
|
|
|
|
|
inputs:
|
|
|
|
|
path: $(FLINK_ARTIFACT_DIR)
|
|
|
|
|
artifact: FlinkCompileArtifact-${{parameters.stage_name}}
|
|
|
|
|
|
|
|
|
|
- script: ./tools/azure-pipelines/unpack_build_artifact.sh
|
|
|
|
|
displayName: "Unpack Build artifact"
|
|
|
|
|
|
|
|
|
|
- task: Cache@2
|
|
|
|
|
inputs:
|
|
|
|
|
key: $(CACHE_KEY)
|
|
|
|
|
restoreKeys: $(CACHE_FALLBACK_KEY)
|
|
|
|
|
path: $(MAVEN_CACHE_FOLDER)
|
|
|
|
|
continueOnError: true # continue the build even if the cache fails.
|
|
|
|
|
condition: not(eq('${{parameters.test_pool_definition.name}}', 'Default'))
|
|
|
|
|
displayName: Cache Maven local repo
|
|
|
|
|
|
|
|
|
|
- script: |
|
|
|
|
|
echo "##vso[task.setvariable variable=JAVA_HOME]$JAVA_HOME_${{parameters.jdk}}_X64"
|
|
|
|
|
echo "##vso[task.setvariable variable=PATH]$JAVA_HOME_${{parameters.jdk}}_X64/bin:$PATH"
|
|
|
|
|
displayName: "Set JDK"
|
|
|
|
|
|
|
|
|
|
- script: sudo sysctl -w kernel.core_pattern=core.%p
|
|
|
|
|
displayName: Set coredump pattern
|
|
|
|
|
|
|
|
|
|
# Test
|
|
|
|
|
- script: ./tools/azure-pipelines/uploading_watchdog.sh ./tools/ci/test_controller.sh $(module)
|
|
|
|
|
displayName: Test - $(module)
|
|
|
|
|
|
|
|
|
|
- task: PublishTestResults@2
|
|
|
|
|
condition: succeededOrFailed()
|
|
|
|
|
inputs:
|
|
|
|
|
testResultsFormat: 'JUnit'
|
|
|
|
|
|
|
|
|
|
# upload debug artifacts
|
|
|
|
|
- task: PublishPipelineArtifact@1
|
|
|
|
|
condition: not(eq('$(DEBUG_FILES_OUTPUT_DIR)', ''))
|
|
|
|
|