From bf268c1f25c096309db8c86eab27183f437948f8 Mon Sep 17 00:00:00 2001 From: Nick Date: Mon, 7 Nov 2016 16:58:11 -0800 Subject: [PATCH 1/2] Add support for testing against strict mode clusters --- bin/test.sh | 13 ++++++++----- 1 file changed, 8 insertions(+), 5 deletions(-) diff --git a/bin/test.sh b/bin/test.sh index 0f1c7f20eb281..aeea5ec5bc598 100755 --- a/bin/test.sh +++ b/bin/test.sh @@ -63,12 +63,15 @@ configure_cli() { install_spark() { notify_github pending "Installing Spark" - # with universe server running, there are no longer enough CPUs to - # launch spark jobs if we give the dispatcher an entire CPU - # TODO: remove this? - echo '{"service": {"cpus": 0.1}}' > /tmp/spark.json + if [ "$SECURITY" = "strict" ]; then + # custom configuration to enable auth stuff: + ${REPO_ROOT_DIR}/dcos-commons-tools/setup_permissions.sh nobody "*" # spark's default service.role + echo '{ "service": { "user": "nobody", "principal": "service-acct", "secret_name": "secret" } }' > /tmp/spark.json + dcos --log-level=INFO package install spark --options=/tmp/spark.json --yes + else + dcos --log-level=INFO package install spark --yes + fi - dcos --log-level=INFO package install spark --options=/tmp/spark.json --yes if [ $? -ne 0 ]; then notify_github failure "Spark install failed" exit 1 From 26c38ee77a0a27ccd4afd99ae55879ec5ec68b22 Mon Sep 17 00:00:00 2001 From: Nick Date: Tue, 8 Nov 2016 13:01:30 -0800 Subject: [PATCH 2/2] Fix path, add TODO --- bin/test.sh | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/bin/test.sh b/bin/test.sh index aeea5ec5bc598..2e7e7cf549a79 100755 --- a/bin/test.sh +++ b/bin/test.sh @@ -65,7 +65,7 @@ install_spark() { if [ "$SECURITY" = "strict" ]; then # custom configuration to enable auth stuff: - ${REPO_ROOT_DIR}/dcos-commons-tools/setup_permissions.sh nobody "*" # spark's default service.role + ${COMMONS_TOOLS_DIR}/setup_permissions.sh nobody "*" # spark's default service.role echo '{ "service": { "user": "nobody", "principal": "service-acct", "secret_name": "secret" } }' > /tmp/spark.json dcos --log-level=INFO package install spark --options=/tmp/spark.json --yes else @@ -111,6 +111,7 @@ run_tests() { check_env fetch_commons_tools start_cluster +# TODO: Migrate the following three commands to dcos-commons-tools/run-tests.py configure_cli install_spark run_tests