forked from apache/flink
-
Notifications
You must be signed in to change notification settings - Fork 0
400 lines (350 loc) · 16.9 KB
/
template.flink-ci.yml
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Workflow template for triggering the Flink's test suite.
name: "Apache Flink Test Workflow Template"
on:
workflow_call:
inputs:
workflow-caller-id:
description: "The calling job's ID that can be used for build artifact names (that need to be different between different jobs of the same workflow)."
default: ""
type: string
environment:
description: "Defines environment variables for downstream scripts."
required: true
type: string
jdk_version:
description: "The Java version to use."
default: 8
type: number
secrets:
s3_bucket:
required: false
s3_access_key:
required: false
s3_secret_key:
required: false
permissions: read-all
# Running logic within a container comes with challenges around file permissions (e.g. when trying
# to generate the hash for a certain set of files; see https://github.com/actions/cache/issues/753):
#
# The code is cloned into the runner's workspace /home/runner/work/ but outside the container.
# The runner's workspace folder is then mounted to /__w within the container. Files that were generated
# as part of the checkout will have the runner's user as the owner. Any files that are generated during
# the workflow execution will have the container's user as the owner (i.e. root). That can cause issues
# with actions that need to access the files of the checkout.
#
# We can work around this issue by copying the source code to a separate location (i.e. the container
# user's home folder). It's important to notice that any file that is subject to caching should still
# live in the mounted folder to ensure accessibility by the cache action.
env:
MOUNTED_WORKING_DIR: /__w/flink/flink
CONTAINER_LOCAL_WORKING_DIR: /root/flink
# The following two variables are used by the utility scripts for creating
# and unpacking the build artifacts.
FLINK_ARTIFACT_DIR: /root/artifact-directory
FLINK_ARTIFACT_FILENAME: flink_artifacts.tar.gz
MAVEN_REPO_FOLDER: /root/.m2/repository
MAVEN_ARGS: -Dmaven.repo.local=/root/.m2/repository
# required by tools/azure-pipelines/cache_docker_images.sh
DOCKER_IMAGES_CACHE_FOLDER: /root/.docker-cache
jobs:
compile:
name: "Compile"
runs-on: ubuntu-22.04
container:
image: mapohl/flink-ci:FLINK-34194
# --init makes the process in the container being started as an init process which will clean up any daemon processes during shutdown
# --privileged allows writing coredumps in docker (FLINK-16973)
options: --init --privileged
env:
# timeout in minutes - this environment variable is required by uploading_watchdog.sh
GHA_JOB_TIMEOUT: 240
outputs:
stringified-workflow-name: ${{ steps.workflow-prep-step.outputs.stringified-workflow-name }}
steps:
- name: "Flink Checkout"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Initialize job"
uses: "./.github/actions/job_init"
with:
jdk_version: ${{ inputs.jdk_version }}
maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}
source_directory: ${{ env.MOUNTED_WORKING_DIR }}
target_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
- name: "Stringify workflow name"
id: workflow-prep-step
run: |
# adds a stringified version of the workflow name that can be used for generating unique build artifact names within a composite workflow
# - replaces any special characters (except for underscores and dots) with dashes
# - makes the entire string lowercase
# - condenses multiple dashes into a single one
# - removes leading and following dashes
stringified_workflow_name=$(echo "${{ github.workflow }}-${{ inputs.workflow-caller-id }}" | tr -C '[:alnum:]._' '-' | tr '[:upper:]' '[:lower:]' | sed -e 's/--*/-/g' -e 's/^-*//g' -e 's/-*$//g')
echo "stringified-workflow-name=${stringified_workflow_name}" >> $GITHUB_OUTPUT
- name: "Compile Flink"
uses: "./.github/actions/run_mvn"
timeout-minutes: ${{ fromJSON(env.GHA_JOB_TIMEOUT) }}
with:
working_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
maven-parameters: "test-compile -Dflink.markBundledAsOptional=false -Dfast"
env: "${{ inputs.environment }}"
- name: "Collect build artifacts"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: |
./tools/azure-pipelines/create_build_artifact.sh
- name: "Upload artifacts to make them available in downstream jobs"
uses: actions/upload-artifact@v4
with:
name: build-artifacts-${{ steps.workflow-prep-step.outputs.stringified-workflow-name }}-${{ github.run_number }}
path: ${{ env.FLINK_ARTIFACT_DIR }}/${{ env.FLINK_ARTIFACT_FILENAME }}
if-no-files-found: error
# use minimum here because we only need these artifacts to speed up the build
retention-days: 1
packaging:
name: "Test packaging/licensing"
needs: compile
runs-on: ubuntu-22.04
container:
image: mapohl/flink-ci:FLINK-34194
# --init makes the process in the container being started as an init process which will clean up any daemon processes during shutdown
# --privileged allows writing coredumps in docker (FLINK-16973)
options: --init --privileged
steps:
- name: "Flink Checkout"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Initialize job"
uses: "./.github/actions/job_init"
with:
jdk_version: ${{ inputs.jdk_version }}
maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}
source_directory: ${{ env.MOUNTED_WORKING_DIR }}
target_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
- name: "Download build artifacts from compile job"
uses: actions/download-artifact@v4
with:
name: build-artifacts-${{ needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
path: ${{ env.FLINK_ARTIFACT_DIR }}
- name: "Unpack build artifact"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: |
./tools/azure-pipelines/unpack_build_artifact.sh
- name: "Test"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: |
${{ inputs.environment }} ./tools/ci/compile_ci.sh || exit $?
test:
name: "Test (module: ${{ matrix.module }})"
needs: compile
runs-on: ubuntu-22.04
container:
image: mapohl/flink-ci:FLINK-34194
# --init makes the process in the container being started as an init process which will clean up any daemon processes during shutdown
# --privileged allows writing coredumps in docker (FLINK-16973)
options: --init --privileged
# the /mnt folder is a separate disk mounted to the host filesystem with more free disk space that can be utilized
volumes:
- /mnt:/root
env:
# timeout in minutes - this environment variable is required by uploading_watchdog.sh
GHA_JOB_TIMEOUT: 240
strategy:
fail-fast: false
matrix:
include:
- module: core
stringified-module-name: core
- module: python
stringified-module-name: python
- module: table
stringified-module-name: table
- module: connect
stringified-module-name: connect
- module: tests
stringified-module-name: tests
- module: misc
stringified-module-name: misc
steps:
- name: "Flink Checkout"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Initialize job"
uses: "./.github/actions/job_init"
with:
jdk_version: ${{ inputs.jdk_version }}
maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}
source_directory: ${{ env.MOUNTED_WORKING_DIR }}
target_directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
- name: "Set coredump pattern"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: sudo sysctl -w kernel.core_pattern=core.%p
- name: "Download build artifacts from compile job"
uses: actions/download-artifact@v4
with:
name: build-artifacts-${{ needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
path: ${{ env.FLINK_ARTIFACT_DIR }}
- name: "Unpack build artifact"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: |
./tools/azure-pipelines/unpack_build_artifact.sh
- name: "Try loading Docker images from Cache"
id: docker-cache
uses: actions/cache@v4
with:
path: ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
key: ${{ matrix.module }}-docker-${{ runner.os }}-${{ hashFiles('**/cache_docker_images.sh', '**/flink-test-utils-parent/**/DockerImageVersions.java') }}
restore-keys: ${{ matrix.module }}-docker-${{ runner.os }}
- name: "Load Docker images if not present in cache, yet"
if: ${{ !cancelled() && !steps.docker-cache.cache.hit }}
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: ./tools/azure-pipelines/cache_docker_images.sh load
- name: "Test - ${{ matrix.module }}"
id: test-run
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
env:
IT_CASE_S3_BUCKET: ${{ secrets.s3_bucket }}
IT_CASE_S3_ACCESS_KEY: ${{ secrets.s3_access_key }}
IT_CASE_S3_SECRET_KEY: ${{ secrets.s3_secret_key }}
timeout-minutes: ${{ fromJSON(env.GHA_JOB_TIMEOUT) }}
run: |
${{ inputs.environment }} PROFILE="$PROFILE -Pgithub-actions" ./tools/azure-pipelines/uploading_watchdog.sh \
./tools/ci/test_controller.sh ${{ matrix.module }}
- name: "Post-build Disk Info"
if: ${{ always() }}
shell: bash
run: df -h
- name: "Top 15 biggest directories in terms of used disk space"
if: ${{ always() }}
shell: bash
run: |
du -ah --exclude="proc" -t100M . | sort -h -r | head -n 15
- name: "Post-process build artifacts"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
run: find ${{ steps.test-run.outputs.debug-files-output-dir }} -type f -exec rename 's/[:<>|*?]/-/' {} \;
- name: "Upload build artifacts"
uses: actions/upload-artifact@v4
if: ${{ failure() && steps.test-run.outputs.debug-files-output-dir }} != ''
with:
name: logs-test-${{ needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}-${{ matrix.stringified-module-name }}-${{ steps.test-run.outputs.debug-files-name }}
path: ${{ steps.test-run.outputs.debug-files-output-dir }}
- name: "Save Docker images to cache"
working-directory: ${{ env.CONTAINER_LOCAL_WORKING_DIR }}
if: ${{ !cancelled() && (failure() || !steps.docker-cache.cache.hit) }}
run: ./tools/azure-pipelines/cache_docker_images.sh save
e2e:
name: "E2E (group ${{ matrix.group }})"
needs: compile
# the end to end tests are not executed in Flink's CI Docker container due to problems when running Docker-in-Docker
runs-on: ubuntu-22.04
timeout-minutes: 310
env:
# timeout in minutes - this environment variable is required by uploading_watchdog.sh
GHA_JOB_TIMEOUT: 310
# required and forwarded as the cache-dir system property to DownloadCacheFactory
# implementations in flink-end-to-end-tests/flink-end-to-end-tests-common
E2E_CACHE_FOLDER: ${{ github.workspace }}/.e2e-cache
# required by common_artifact_download_cacher.sh in flink-end-to-end-tests/test-scripts
E2E_TARBALL_CACHE: ${{ github.workspace }}/.e2e-tar-cache
# The following environment variables need to be overwritten here because the e2e tests do not
# run in containers.
MAVEN_REPO_FOLDER: ${{ github.workspace }}/.m2/repository
MAVEN_ARGS: -Dmaven.repo.local=${{ github.workspace }}/.m2/repository
FLINK_ARTIFACT_DIR: ${{ github.workspace }}
DOCKER_IMAGES_CACHE_FOLDER: ${{ github.workspace }}/.docker-cache
strategy:
fail-fast: false
matrix:
group: [1, 2]
steps:
- name: "Flink Checkout"
uses: actions/checkout@v4
with:
persist-credentials: false
- name: "Initialize job"
uses: "./.github/actions/job_init"
with:
jdk_version: ${{ inputs.jdk_version }}
maven_repo_folder: ${{ env.MAVEN_REPO_FOLDER }}
- name: "Install missing packages"
run: sudo apt-get install -y net-tools docker-compose zip
# netty-tcnative requires OpenSSL v1.0.0
- name: "Install OpenSSL"
run: |
sudo apt-get install -y bc libapr1
wget -r --no-parent -nd --accept=libssl1.0.0_*ubuntu5.*_amd64.deb http://security.ubuntu.com/ubuntu/pool/main/o/openssl1.0/
sudo apt install ./libssl1.0.0_*.deb
- name: "Download build artifacts from compile job"
uses: actions/download-artifact@v4
with:
name: build-artifacts-${{ needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}
path: ${{ env.FLINK_ARTIFACT_DIR }}
- name: "Unpack build artifact"
run: ./tools/azure-pipelines/unpack_build_artifact.sh
# the cache task does not create directories on a cache miss, and can later fail when trying to tar the directory if the test haven't created it
# this may for example happen if a given directory is only used by a subset of tests, which are run in a different 'group'
- name: "Create cache directories"
run: |
mkdir -p ${{ env.E2E_CACHE_FOLDER }}
mkdir -p ${{ env.E2E_TARBALL_CACHE }}
mkdir -p ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
- name: "Load E2E files from Cache"
uses: actions/cache@v4
with:
path: ${{ env.E2E_CACHE_FOLDER }}
key: e2e-cache-${{ matrix.group }}-${{ hashFiles('**/flink-end-to-end-tests/**/*.java', '!**/avro/**') }}
- name: "Load E2E artifacts from Cache"
uses: actions/cache@v4
with:
path: ${{ env.E2E_TARBALL_CACHE }}
key: e2e-artifact-cache-${{ matrix.group }}-${{ hashFiles('**/flink-end-to-end-tests/**/*.sh') }}
restore-keys: e2e-artifact-cache-${{ matrix.group }}
- name: "Try loading Docker images from Cache"
id: docker-cache
uses: actions/cache@v4
with:
path: ${{ env.DOCKER_IMAGES_CACHE_FOLDER }}
key: e2e-${{ matrix.group }}-docker-${{ runner.os }}-${{ hashFiles('**/cache_docker_images.sh', '**/flink-test-utils-parent/**/DockerImageVersions.java') }}
- name: "Load Docker images if not present in Cache, yet"
if: ${{ !cancelled() && !steps.docker-cache.cache.hit }}
run: ./tools/azure-pipelines/cache_docker_images.sh load
- name: "Build Flink"
uses: "./.github/actions/run_mvn"
with:
maven-parameters: "install -DskipTests -Dfast $PROFILE -Pskip-webui-build"
env: "${{ inputs.environment }}"
- name: "Run E2E Tests"
id: test-run
env:
IT_CASE_S3_BUCKET: ${{ secrets.s3_bucket }}
IT_CASE_S3_ACCESS_KEY: ${{ secrets.s3_access_key }}
IT_CASE_S3_SECRET_KEY: ${{ secrets.s3_secret_key }}
timeout-minutes: ${{ fromJSON(env.GHA_JOB_TIMEOUT) }}
run: |
${{ inputs.environment }} FLINK_DIR=`pwd`/build-target PROFILE="$PROFILE -Pgithub-actions" ./tools/azure-pipelines/uploading_watchdog.sh \
flink-end-to-end-tests/run-nightly-tests.sh ${{ matrix.group }}
- name: "Upload Logs"
uses: actions/upload-artifact@v4
if: ${{ failure() && steps.test-run.outputs.debug-files-output-dir != '' }}
with:
name: logs-e2e-${{ needs.compile.outputs.stringified-workflow-name }}-${{ github.run_number }}-${{ matrix.group }}-${{ steps.test-run.outputs.debug-files-name }}
path: ${{ steps.test-run.outputs.debug-files-output-dir }}
- name: "Save Docker images to Cache"
if: ${{ !cancelled() && (failure() || !steps.docker-cache.cache.hit) }}
run: ./tools/azure-pipelines/cache_docker_images.sh save