Skip to content

Commit

Permalink
K8S IT in GA
Browse files Browse the repository at this point in the history
  • Loading branch information
Yikun committed Mar 13, 2022
1 parent c032928 commit 72e5f44
Show file tree
Hide file tree
Showing 3 changed files with 66 additions and 1 deletion.
59 changes: 58 additions & 1 deletion .github/workflows/build_and_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -97,7 +97,7 @@ jobs:
else
echo '::set-output name=java::8'
echo '::set-output name=branch::master' # Default branch to run on. CHANGE here when a branch is cut out.
echo '::set-output name=type::regular'
echo '::set-output name=type::k8s'
echo '::set-output name=envs::{"SPARK_ANSI_SQL_MODE": "${{ inputs.ansi_enabled }}"}'
echo '::set-output name=hadoop::hadoop3'
fi
Expand Down Expand Up @@ -768,6 +768,63 @@ jobs:
name: unit-tests-log-tpcds--8-${{ needs.configure-jobs.outputs.hadoop }}-hive2.3
path: "**/target/unit-tests.log"

k8s-integration-tests:
needs: [configure-jobs, precondition]
if: needs.configure-jobs.outputs.type == 'k8s' && fromJson(needs.precondition.outputs.required).build == 'true'
name: Run K8S integration tests
runs-on: ubuntu-20.04
env:
SPARK_LOCAL_IP: localhost
steps:
- name: Checkout Spark repository
uses: actions/checkout@v2
with:
fetch-depth: 0
repository: apache/spark
ref: master
- name: Sync the current branch with the latest in Apache Spark
if: github.repository != 'apache/spark'
run: |
git fetch https://github.com/$GITHUB_REPOSITORY.git ${GITHUB_REF#refs/heads/}
git -c user.name='Apache Spark Test Account' -c user.email='[email protected]' merge --no-commit --progress --squash FETCH_HEAD
git -c user.name='Apache Spark Test Account' -c user.email='[email protected]' commit -m "Merged commit"
- name: Cache Scala, SBT and Maven
uses: actions/cache@v2
with:
path: |
build/apache-maven-*
build/scala-*
build/*.jar
~/.sbt
key: build-${{ hashFiles('**/pom.xml', 'project/build.properties', 'build/mvn', 'build/sbt', 'build/sbt-launch-lib.bash', 'build/spark-build-info') }}
restore-keys: |
build-
- name: Cache Coursier local repository
uses: actions/cache@v2
with:
path: ~/.cache/coursier
key: k8s-integration-coursier-${{ hashFiles('**/pom.xml', '**/plugins.sbt') }}
restore-keys: |
k8s-integration-coursier-
- name: Install Java 8
uses: actions/setup-java@v1
with:
java-version: 8
- name: start minikube
run: |
curl -LO https://storage.googleapis.com/minikube/releases/latest/minikube-linux-amd64
sudo install minikube-linux-amd64 /usr/local/bin/minikube
minikube start --cpus 2 --memory 6144
- name: Show all K8S pods and nodes
run: |
kubectl get pods -A
kubectl get nodes -oyaml
- name: Run K8S integration test
run: |
kubectl create clusterrolebinding serviceaccounts-cluster-admin --clusterrole=cluster-admin --group=system:serviceaccounts || true
eval $(minikube docker-env)
build/sbt -Pkubernetes -Pkubernetes-integration-tests -Dspark.kubernetes.test.driver.cpu=0.5 -Dspark.kubernetes.test.executor.cpu=0.2 -Dtest.exclude.tags=minikube,r,local "kubernetes-integration-tests/test"
docker-integration-tests:
needs: [configure-jobs, precondition]
if: needs.configure-jobs.outputs.type == 'regular' && fromJson(needs.precondition.outputs.required).docker == 'true'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,12 @@ class KubernetesSuite extends SparkFunSuite
.set("spark.kubernetes.driver.label.spark-app-locator", appLocator)
.set("spark.kubernetes.executor.label.spark-app-locator", appLocator)
.set(NETWORK_AUTH_ENABLED.key, "true")
sys.props.get(CONFIG_DRIVER_REQUEST_CPU).map { cpu =>
sparkAppConf.set("spark.kubernetes.driver.request.cores", cpu)
}
sys.props.get(CONFIG_EXECUTOR_REQUEST_CPU).map { cpu =>
sparkAppConf.set("spark.kubernetes.executor.request.cores", cpu)
}
if (!kubernetesTestComponents.hasUserSpecifiedNamespace) {
kubernetesTestComponents.createNamespace()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,4 +34,6 @@ object TestConstants {
val CONFIG_KEY_IMAGE_TAG_FILE = "spark.kubernetes.test.imageTagFile"
val CONFIG_KEY_IMAGE_REPO = "spark.kubernetes.test.imageRepo"
val CONFIG_KEY_UNPACK_DIR = "spark.kubernetes.test.unpackSparkDir"
val CONFIG_DRIVER_REQUEST_CPU = "spark.kubernetes.test.driver.cpu"
val CONFIG_EXECUTOR_REQUEST_CPU = "spark.kubernetes.test.executor.cpu"
}

0 comments on commit 72e5f44

Please sign in to comment.