From 022f33232d35937fd5975e6fd9dd984ccd8dbcf1 Mon Sep 17 00:00:00 2001 From: Arthur Rand Date: Wed, 22 Nov 2017 13:13:05 -0800 Subject: [PATCH] [SPARK-542] Add and document Driver<->Executor TLS support. (#188) * add env var decoding and documentation about driver-to-executor tls support * Switched to file-based secrets. * Reverted spark-env.sh change. Added TLS integration test (currently disabled). * Updated docs, incorporating suggestions from Suzanne. * Removed mention of driver labels in docs. * Consolidated secrets creation code, removed SecretHandler. * Simplified the options in the run command. * Simplified the interface by automatically inferring some of the TLS config properties. * Better --help and error messages, updated docs. * Make sure user-defined secrets are preserved. Updated test and docs. * Converted passwords to "magic" options. Updated test and docs. * Added blurb in limitations docs section, moved setting of password configs. --- cli/dcos-spark/submit_builder.go | 95 ++++++++++++++++++++++++++++- docs/limitations.md | 5 ++ docs/security.md | 82 +++++++++++++++++-------- tests/jobs/python/pi_with_secret.py | 48 +++++++++++++++ tests/resources/server.jks.base64 | 1 + tests/resources/trust.jks.base64 | 1 + tests/test_spark.py | 75 +++++++++++++++++++---- tests/utils.py | 15 ----- 8 files changed, 269 insertions(+), 53 deletions(-) create mode 100644 tests/jobs/python/pi_with_secret.py create mode 100644 tests/resources/server.jks.base64 create mode 100644 tests/resources/trust.jks.base64 diff --git a/cli/dcos-spark/submit_builder.go b/cli/dcos-spark/submit_builder.go index 608152e683f19..1c711da20f43b 100644 --- a/cli/dcos-spark/submit_builder.go +++ b/cli/dcos-spark/submit_builder.go @@ -43,6 +43,11 @@ type sparkArgs struct { keytabSecretPath string tgtSecretPath string tgtSecretValue string + keystoreSecretPath string + keystorePassword string + privateKeyPassword string + truststoreSecretPath string + truststorePassword string propertiesFile string properties map[string]string @@ -59,6 +64,11 @@ type sparkArgs struct { func NewSparkArgs() *sparkArgs { return &sparkArgs{ + "", + "", + "", + "", + "", "", "", "", @@ -140,12 +150,24 @@ Args: PlaceHolder("PROP=VALUE").StringMapVar(&args.properties) submit.Flag("kerberos-principal", "Principal to be used to login to KDC."). PlaceHolder("user@REALM").Default("").StringVar(&args.kerberosPrincipal) - submit.Flag("keytab-secret-path", "path to Keytab in secret store to be used in the Spark drivers"). + submit.Flag("keytab-secret-path", "Path to Keytab in secret store to be used in the Spark drivers"). PlaceHolder("/mykeytab").Default("").StringVar(&args.keytabSecretPath) submit.Flag("tgt-secret-path", "Path to ticket granting ticket (TGT) in secret store to be used " + "in the Spark drivers").PlaceHolder("/mytgt").Default("").StringVar(&args.tgtSecretPath) submit.Flag("tgt-secret-value", "Value of TGT to be used in the drivers, must be base64 encoded"). Default("").StringVar(&args.tgtSecretValue) + submit.Flag("keystore-secret-path", "Path to keystore in secret store for TLS/SSL. " + + "Make sure to set --keystore-password and --private-key-password as well."). + PlaceHolder("__dcos_base64__keystore").Default("").StringVar(&args.keystoreSecretPath) + submit.Flag("keystore-password", "A password to the keystore."). + Default("").StringVar(&args.keystorePassword) + submit.Flag("private-key-password", "A password to the private key in the keystore."). + Default("").StringVar(&args.privateKeyPassword) + submit.Flag("truststore-secret-path", "Path to truststore in secret store for TLS/SSL. " + + "Make sure to set --truststore-password as well."). + PlaceHolder("__dcos_base64__truststore").Default("").StringVar(&args.truststoreSecretPath) + submit.Flag("truststore-password", "A password to the truststore."). + Default("").StringVar(&args.truststorePassword) submit.Flag("isR", "Force using SparkR").Default("false").BoolVar(&args.isR) submit.Flag("isPython", "Force using Python").Default("false").BoolVar(&args.isPython) @@ -257,6 +279,52 @@ func setupKerberosAuthArgs(args *sparkArgs) error { return errors.New(fmt.Sprintf("Unable to add Kerberos args, got args %s", args)) } +func setupTLSArgs(args *sparkArgs) { + args.properties["spark.mesos.containerizer"] = "mesos" + args.properties["spark.ssl.enabled"] = "true" + + // Keystore and truststore + const keyStoreFileName = "server.jks" + const trustStoreFileName = "trust.jks" + args.properties["spark.ssl.keyStore"] = keyStoreFileName + if args.truststoreSecretPath != "" { + args.properties["spark.ssl.trustStore"] = trustStoreFileName + } + + // Secret paths, filenames, and place holder envvars + paths := []string{args.keystoreSecretPath} + filenames := []string{keyStoreFileName} + envkeys := []string{"DCOS_SPARK_KEYSTORE"} + if args.truststoreSecretPath != "" { + paths = append(paths, args.truststoreSecretPath) + filenames = append(filenames, trustStoreFileName) + envkeys = append(envkeys, "DCOS_SPARK_TRUSTSTORE") + } + joinedPaths := strings.Join(paths, ",") + joinedFilenames := strings.Join(filenames, ",") + joinedEnvkeys := strings.Join(envkeys, ",") + + taskTypes :=[]string{"driver", "executor"} + for _, taskType := range taskTypes { + appendToProperty(fmt.Sprintf("spark.mesos.%s.secret.names", taskType), joinedPaths, args) + appendToProperty(fmt.Sprintf("spark.mesos.%s.secret.filenames", taskType), joinedFilenames, args) + appendToPropertyIfSet(fmt.Sprintf("spark.mesos.%s.secret.envkeys", taskType), joinedEnvkeys, args) + } + + // Passwords + args.properties["spark.ssl.keyStorePassword"] = args.keystorePassword + args.properties["spark.ssl.keyPassword"] = args.privateKeyPassword + + if args.truststoreSecretPath != "" { + args.properties["spark.ssl.trustStorePassword"] = args.truststorePassword + } + + // Protocol + if _, ok := args.properties["spark.ssl.protocol"]; !ok { + args.properties["spark.ssl.protocol"] = "TLS" + } +} + func parseApplicationFile(args *sparkArgs) error { appString := args.app.String() fs := strings.Split(appString, "/") @@ -435,6 +503,13 @@ func appendToProperty(propValue, toAppend string, args *sparkArgs) { } } +func appendToPropertyIfSet(propValue, toAppend string, args *sparkArgs) { + _, contains := args.properties[propValue] + if contains { + args.properties[propValue] += "," + toAppend + } +} + func getBase64Content(path string) string { log.Printf("Opening file %s", path) data, err := ioutil.ReadFile(path) @@ -573,6 +648,8 @@ func buildSubmitJson(cmd *SparkCommand) (string, error) { log.Printf("Setting DCOS_SPACE to %s", cmd.submitDcosSpace) appendToProperty("spark.mesos.driver.labels", fmt.Sprintf("DCOS_SPACE:%s", cmd.submitDcosSpace), args) + appendToProperty("spark.mesos.task.labels", fmt.Sprintf("DCOS_SPACE:%s", cmd.submitDcosSpace), + args) // HDFS config hdfs_config_url, err := getStringFromTree(responseJson, []string{"app", "labels", "SPARK_HDFS_CONFIG_URL"}) @@ -620,6 +697,22 @@ func buildSubmitJson(cmd *SparkCommand) (string, error) { } } + // TLS configuration + if args.keystoreSecretPath != "" { + // Make sure passwords are set + if args.keystorePassword == "" || args.privateKeyPassword == "" { + return "", errors.New("Need to provide keystore password and key password with keystore") + } + + if args.truststoreSecretPath != "" { + if args.truststorePassword == "" { + return "", errors.New("Need to provide truststore password with truststore") + } + } + + setupTLSArgs(args) + } + jsonMap := map[string]interface{}{ "action": "CreateSubmissionRequest", "appArgs": args.appArgs, diff --git a/docs/limitations.md b/docs/limitations.md index 111de5a6bff76..600f48e2177c3 100644 --- a/docs/limitations.md +++ b/docs/limitations.md @@ -10,3 +10,8 @@ enterprise: 'no' * Spark jobs run in Docker containers. The first time you run a Spark job on a node, it might take longer than you expect because of the `docker pull`. * DC/OS Apache Spark only supports running the Spark shell from within a DC/OS cluster. See the Spark Shell section for more information. For interactive analytics, we recommend Zeppelin, which supports visualizations and dynamic dependency management. + +* With Spark SSL/TLS enabled, + if you specify environment-based secrets with `spark.mesos.[driver|executor].secret.envkeys`, + the keystore and truststore secrets will also show up as environment-based secrets, + due to the way secrets are implemented. You can ignore these extra environment variables. diff --git a/docs/security.md b/docs/security.md index 28e8da1d1dd79..20e82fbfe5794 100644 --- a/docs/security.md +++ b/docs/security.md @@ -15,48 +15,78 @@ Follow these instructions to [authenticate in strict mode](https://docs.mesosphe SSL support in DC/OS Apache Spark encrypts the following channels: * From the [DC/OS admin router][11] to the dispatcher. -* From the dispatcher to the drivers. * From the drivers to their executors. -There are a number of configuration variables relevant to SSL setup. List them with the following command: +To enable SSL, a Java keystore (and, optionally, truststore) must be provided, along +with their passwords. The first three settings below are **required** during job +submission. If using a truststore, the last two are also **required**: - dcos package describe spark --config +| Variable | Description | +|----------------------------------|-------------------------------------------------| +| `--keystore-secret-path` | Path to keystore in secret store | +| `--keystore-password` | The password used to access the keystore | +| `--private-key-password` | The password for the private key | +| `--truststore-secret-path` | Path to truststore in secret store | +| `--truststore-password` | The password used to access the truststore | -Here are the required variables: -| Variable | Description | -|----------------------------|-------------------------------------------------| -| `spark.ssl.enabled` | Whether to enable SSL (default: `false`). | -| `spark.ssl.keyStoreBase64` | Base64 encoded blob containing a Java keystore. | +In addition, there are a number of Spark configuration variables relevant to SSL setup. +These configuration settings are **optional**: -The Java keystore (and, optionally, truststore) are created using the [Java keytool][12]. The keystore must contain one private key and its signed public key. The truststore is optional and might contain a self-signed root-ca certificate that is explicitly trusted by Java. +| Variable | Description | Default Value | +|----------------------------------|-----------------------|---------------| +| `spark.ssl.enabledAlgorithms` | Allowed cyphers | JVM defaults | +| `spark.ssl.protocol` | Protocol | TLS | -Both stores must be base64 encoded, for example: - cat keystore | base64 /u3+7QAAAAIAAAACAAAAAgA... +The keystore and truststore are created using the [Java keytool][12]. The keystore +must contain one private key and its signed public key. The truststore is optional +and might contain a self-signed root-ca certificate that is explicitly trusted by Java. + +Both stores must be base64 encoded without newlines, for example: + +```bash +cat keystore | base64 -w 0 > keystore.base64 +cat keystore.base64 +/u3+7QAAAAIAAAACAAAAAgA... +``` **Note:** The base64 string of the keystore will probably be much longer than the snippet above, spanning 50 lines or so. -With this and the password `secret` for the keystore and the private key, your JSON options file will look like this: +Add the stores to your secrets in the DC/OS secret store. For example, if your base64-encoded keystores +and truststores are server.jks.base64 and trust.jks.base64, respectively, then use the following +commands to add them to the secret store: + +```bash +dcos security secrets create /__dcos_base64__keystore --value-file server.jks.base64 +dcos security secrets create /__dcos_base64__truststore --value-file trust.jks.base64 +``` + +You must add the following configurations to your `dcos spark run ` command. +The ones in parentheses are optional: - { - "security": { - "ssl": { - "enabled": true, - "keyStoreBase64": "/u3+7QAAAAIAAAACAAAAAgA...”, - "keyStorePassword": "secret", - "keyPassword": "secret" - } - } - } +```bash -Install Spark with your custom configuration: +dcos spark run --verbose --submit-args="\ +--keystore-secret-path= \ +--keystore-password= \ +--private-key-password= \ +(—-truststore-secret-path= \) +(--truststore-password= \) +(—-conf spark.ssl.enabledAlgorithms= \) +--class [application args]" +``` - dcos package install --options=options.json spark +**Note:** If you have specified a space for your secrets other than the default value, +`/spark`, then you must set `spark.mesos.task.labels=DCOS_SPACE:` +in the command above in order to access the secrets. +See the [Secrets Documentation about spaces][13] for more details about spaces. -Make sure to connect the DC/OS cluster only using an SSL connection (i.e. by using an `https://`). Use the following command to set your DC/OS URL: +**Note:** If you specify environment-based secrets with `spark.mesos.[driver|executor].secret.envkeys`, +the keystore and truststore secrets will also show up as environment-based secrets, +due to the way secrets are implemented. You can ignore these extra environment variables. - dcos config set core.dcos_url https:// [11]: https://docs.mesosphere.com/1.9/overview/architecture/components/ [12]: http://docs.oracle.com/javase/8/docs/technotes/tools/unix/keytool.html + [13]: https://docs.mesosphere.com/1.10/security/#spaces diff --git a/tests/jobs/python/pi_with_secret.py b/tests/jobs/python/pi_with_secret.py new file mode 100644 index 0000000000000..34e06d41cc383 --- /dev/null +++ b/tests/jobs/python/pi_with_secret.py @@ -0,0 +1,48 @@ +import os +import sys +from random import random +from operator import add + +from pyspark.sql import SparkSession + +def check_secret(secret_name, secret_content): + ''' + Make sure the extra secret envvar and secret file show up in driver. + ''' + envvar_content = os.environ.get(secret_name) + if envvar_content != secret_content: + print("Unexpected contents in secret envvar, found: {} expected: {}".format(envvar_content, secret_content)) + exit(1) + + file_content = open(secret_name, 'r').read() + if file_content != secret_content: + print("Unexpected contents in secret file, found: {} expected: {}".format(file_content, secret_content)) + exit(1) + + +if __name__ == "__main__": + """ + Usage: pi [partitions] [secret] [secret content] + Checks for the given env-based and file-based driver secret. + Then calculates the value of pi. + """ + + check_secret(sys.argv[2], sys.argv[3]) + + spark = SparkSession \ + .builder \ + .appName("PythonPi") \ + .getOrCreate() + + partitions = int(sys.argv[1]) + n = 100000 * partitions + + def f(_): + x = random() * 2 - 1 + y = random() * 2 - 1 + return 1 if x ** 2 + y ** 2 < 1 else 0 + + count = spark.sparkContext.parallelize(range(1, n + 1), partitions).map(f).reduce(add) + print("Pi is roughly %f" % (4.0 * count / n)) + + spark.stop() diff --git a/tests/resources/server.jks.base64 b/tests/resources/server.jks.base64 new file mode 100644 index 0000000000000..b823a84766d34 --- /dev/null +++ b/tests/resources/server.jks.base64 @@ -0,0 +1 @@ +/u3+7QAAAAIAAAACAAAAAgAGY2Fyb290AAABXxZdYVoABVguNTA5AAACtjCCArIwggIboAMCAQICCQCzNKodBAdsRzANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMCAXDTE3MTAxMzE1Mjg0MloYDzIxMTcwOTE5MTUyODQyWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPdPSuUvDBpt6+1nV6YlTaaNZjd6OsmWm1tRllnpeJfnlXai3HkmQmRuTtbs2Foru5JVtHynPG1vWS/VeOMSBZYeDBHA2s2yLhWcWLFutPKZbNJ3Cf8OjGFf8wqfVBF8xGjUiaQUBTsrHvYlwRkX+bUo7074f5FgLOz15mdomZCQIDAQABo4GnMIGkMB0GA1UdDgQWBBQRjmxtDBg7vrMDMAH4y78lVhPtRjB1BgNVHSMEbjBsgBQRjmxtDBg7vrMDMAH4y78lVhPtRqFJpEcwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZIIJALM0qh0EB2xHMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEANqti2ozB3tlwTVyiPidrVK7n5RMLZQNsDJ68WMl1p7DhtSi4dU87kkNSAFIwHgndVr10CWhfa6TCFW4eVM/bGCEDK5ay+zhZRp+B+YYAKKQZatiMC/EXTdJ2pc+vEcEDF3vAbZ1hzUBbNEWZKkYNDnHOB9dfeQVv6w21P9wdc4gAAAABAAlsb2NhbGhvc3QAAAFfFl2Z7wAAAY8wggGLMA4GCisGAQQBKgIRAQEFAASCAXf8ccU6jYrFx5kW4S+fBJXuQOcCedY9TzOS2bPyouSwPCs42Zo6p0+CSbvv+JqYfMDTR5YbsjMR4xDwQ6B2iW/ldXX6ud0m1TtWroiQV0WSkmqI2s+yai40zjH8Z6/HdcEPSInoC+aXvTvQeK2bstZvPHUsa8H9ZwvKV2tsLjeg7kgoMFQAiMmSvnHawGO/icPwkJTctKEQnL9Jd6yGR2kc0FAAfJusuxYuIIsSXcw4Zisj7d5uRwPv4ecjOwCut1M8U/qs+Dz+LK8sFqdzVY2DyU0BhK8v7WLd2tAyNdcTnUVAQFrhdF4YX8EeMPA7Suyg5IsMo8/GD6zWtKZMDHDba1GelTnfCj7SQyT0dFRGYL/s0jC0DRQiX/2NYPaiNd43eo0yinxOe63L47XIQdjRjfFuBasrONYO4/ccjM84YB/YvbC6Lt0ppkkgGahFgqxhzG4ZSwAEdo9mRHWK1ORyZG8sH8qWdk7o+37yB8j7zIJcsU5EugIAAAACAAVYLjUwOQAAA0gwggNEMIICrQIJALjUH5yBVyi+MA0GCSqGSIb3DQEBBQUAMEUxCzAJBgNVBAYTAkFVMRMwEQYDVQQIEwpTb21lLVN0YXRlMSEwHwYDVQQKExhJbnRlcm5ldCBXaWRnaXRzIFB0eSBMdGQwIBcNMTcxMDEzMTUzMTMwWhgPMjExNzA5MTkxNTMxMzBaMGwxEDAOBgNVBAYTB1Vua25vd24xEDAOBgNVBAgTB1Vua25vd24xEDAOBgNVBAcTB1Vua25vd24xEDAOBgNVBAoTB1Vua25vd24xEDAOBgNVBAsTB1Vua25vd24xEDAOBgNVBAMTB1Vua25vd24wggG4MIIBLAYHKoZIzjgEATCCAR8CgYEA/X9TgR11EilS30qcLuzk5/YRt1I870QAwx4/gLZRJmlFXUAiUftZPY1Y+r/F9bow9subVWzXgTuAHTRv8mZgt2uZUKWkn5/oBHsQIsJPu6nX/rfGG/g7V+fGqKYVDwT7g/bTxR7DAjVUE1oWkTL2dfOuK2HXKu/yIgMZndFIAccCFQCXYFCPFSMLzLKSuYKi64QL8Fgc9QKBgQD34aCF1ps93su8q1w2uFe5eZSvu/o66oL5V0wLPQeCZ1FZV4661FlP5nEHEIGAtEkWcSPoTCgWE7fPCTKMyKbhPBZ6i1R8jSjgo64eK7OmdZFuo38L+iE1YvH7YnoBJDvMpPG+qFGQiaiD3+Fa5Z8GkotmXoB7VSVkAUw7/s9JKgOBhQACgYEAu0U7jSWyAsVcIzCFBt9harR2Ajl9RDB/4nmSYOGGL52uI6kNH9p7PPeKWtgwiCSoIpWypo+GovjVubFKabKM9EbbRRjK8XJyU2TEtXVy3iiEAJM3mVBaSEA8xfmoQz4+ZjfI+fPOvZs3e7xUHwm3Kpu6WgidORSlnKL44MGQfZEwDQYJKoZIhvcNAQEFBQADgYEAbp/wgDQOmdBWKjxtu2N2k2adlY3X8V/sJWG3s/ewBRLdeEiX/qTiBGZQ4S/o3PrkmvqAkIMk4ZYIF+et/O8P0T8YBijA82H8zh6bH9HfT6n/88M0GCTtLqg1hmTeUthRyKKwKkDMzcjVT7NXi3zdf1Upnqzk9IgUNL9VrxO1s1wABVguNTA5AAACtjCCArIwggIboAMCAQICCQCzNKodBAdsRzANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMCAXDTE3MTAxMzE1Mjg0MloYDzIxMTcwOTE5MTUyODQyWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPdPSuUvDBpt6+1nV6YlTaaNZjd6OsmWm1tRllnpeJfnlXai3HkmQmRuTtbs2Foru5JVtHynPG1vWS/VeOMSBZYeDBHA2s2yLhWcWLFutPKZbNJ3Cf8OjGFf8wqfVBF8xGjUiaQUBTsrHvYlwRkX+bUo7074f5FgLOz15mdomZCQIDAQABo4GnMIGkMB0GA1UdDgQWBBQRjmxtDBg7vrMDMAH4y78lVhPtRjB1BgNVHSMEbjBsgBQRjmxtDBg7vrMDMAH4y78lVhPtRqFJpEcwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZIIJALM0qh0EB2xHMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEANqti2ozB3tlwTVyiPidrVK7n5RMLZQNsDJ68WMl1p7DhtSi4dU87kkNSAFIwHgndVr10CWhfa6TCFW4eVM/bGCEDK5ay+zhZRp+B+YYAKKQZatiMC/EXTdJ2pc+vEcEDF3vAbZ1hzUBbNEWZKkYNDnHOB9dfeQVv6w21P9wdc4imd0svG9i4vIb3CuGKLwgRGWtmYQ== \ No newline at end of file diff --git a/tests/resources/trust.jks.base64 b/tests/resources/trust.jks.base64 new file mode 100644 index 0000000000000..0d0f518cd65df --- /dev/null +++ b/tests/resources/trust.jks.base64 @@ -0,0 +1 @@ +/u3+7QAAAAIAAAABAAAAAgAGY2Fyb290AAABXxZa5REABVguNTA5AAACtjCCArIwggIboAMCAQICCQCzNKodBAdsRzANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMCAXDTE3MTAxMzE1Mjg0MloYDzIxMTcwOTE5MTUyODQyWjBFMQswCQYDVQQGEwJBVTETMBEGA1UECBMKU29tZS1TdGF0ZTEhMB8GA1UEChMYSW50ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDPdPSuUvDBpt6+1nV6YlTaaNZjd6OsmWm1tRllnpeJfnlXai3HkmQmRuTtbs2Foru5JVtHynPG1vWS/VeOMSBZYeDBHA2s2yLhWcWLFutPKZbNJ3Cf8OjGFf8wqfVBF8xGjUiaQUBTsrHvYlwRkX+bUo7074f5FgLOz15mdomZCQIDAQABo4GnMIGkMB0GA1UdDgQWBBQRjmxtDBg7vrMDMAH4y78lVhPtRjB1BgNVHSMEbjBsgBQRjmxtDBg7vrMDMAH4y78lVhPtRqFJpEcwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgTClNvbWUtU3RhdGUxITAfBgNVBAoTGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZIIJALM0qh0EB2xHMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADgYEANqti2ozB3tlwTVyiPidrVK7n5RMLZQNsDJ68WMl1p7DhtSi4dU87kkNSAFIwHgndVr10CWhfa6TCFW4eVM/bGCEDK5ay+zhZRp+B+YYAKKQZatiMC/EXTdJ2pc+vEcEDF3vAbZ1hzUBbNEWZKkYNDnHOB9dfeQVv6w21P9wdc4iAVWlBXqa86mJGXpe7H0vZZ/++aA== \ No newline at end of file diff --git a/tests/test_spark.py b/tests/test_spark.py index d06855a5453f2..85f8404b78c00 100644 --- a/tests/test_spark.py +++ b/tests/test_spark.py @@ -13,6 +13,8 @@ import json import shakedown +import sdk_utils + from tests import s3 from tests import utils @@ -29,6 +31,7 @@ def setup_module(module): utils.require_spark() utils.upload_file(os.environ["SCALA_TEST_JAR_PATH"]) + shakedown.run_dcos_command('package install --cli dcos-enterprise-cli --yes') def teardown_module(module): @@ -257,23 +260,26 @@ def test_marathon_group(): @pytest.mark.sanity +@pytest.mark.secrets def test_secrets(): properties_file_path = os.path.join(THIS_DIR, "resources", "secrets-opts.txt") - secrets_handler = utils.SecretHandler(SECRET_NAME, SECRET_CONTENTS) - r = secrets_handler.create_secret() - assert r.ok, "Error creating secret, {}".format(r.content) + # Create secret + shakedown.run_dcos_command('security secrets create /{} --value {}'.format(SECRET_NAME, SECRET_CONTENTS)) + secret_file_name = "secret_file" output = "Contents of file {}: {}".format(secret_file_name, SECRET_CONTENTS) args = ["--properties-file", properties_file_path, "--class", "SecretsJob"] - utils.run_tests(app_url=utils._scala_test_jar_url(), - app_args=secret_file_name, - expected_output=output, - app_name="/spark", - args=args) - r = secrets_handler.delete_secret() - if not r.ok: - LOGGER.warn("Error when deleting secret, {}".format(r.content)) + try: + utils.run_tests(app_url=utils._scala_test_jar_url(), + app_args=secret_file_name, + expected_output=output, + app_name="/spark", + args=args) + + finally: + # Delete secret + shakedown.run_dcos_command('security secrets delete /{}'.format(SECRET_NAME)) @pytest.mark.sanity @@ -286,5 +292,52 @@ def test_cli_multiple_spaces(): " --class ", "org.apache.spark.examples.SparkPi"]) +# Skip DC/OS < 1.10, because it doesn't have support for file-based secrets. +@pytest.mark.skipif('shakedown.dcos_version_less_than("1.10")') +@sdk_utils.dcos_ee_only +@pytest.mark.sanity +def test_driver_executor_tls(): + ''' + Put keystore and truststore as secrets in DC/OS secret store. + Run SparkPi job with TLS enabled, referencing those secrets. + Make sure other secrets still show up. + ''' + python_script_path = os.path.join(THIS_DIR, 'jobs', 'python', 'pi_with_secret.py') + python_script_url = utils.upload_file(python_script_path) + resources_folder = os.path.join( + os.path.dirname(os.path.realpath(__file__)), 'resources' + ) + keystore_file = 'server.jks' + truststore_file = 'trust.jks' + keystore_path = os.path.join(resources_folder, '{}.base64'.format(keystore_file)) + truststore_path = os.path.join(resources_folder, '{}.base64'.format(truststore_file)) + keystore_secret = '__dcos_base64__keystore' + truststore_secret = '__dcos_base64__truststore' + my_secret = 'mysecret' + my_secret_content = 'secretcontent' + shakedown.run_dcos_command('security secrets create /{} --value-file {}'.format(keystore_secret, keystore_path)) + shakedown.run_dcos_command('security secrets create /{} --value-file {}'.format(truststore_secret, truststore_path)) + shakedown.run_dcos_command('security secrets create /{} --value {}'.format(my_secret, my_secret_content)) + password = 'changeit' + try: + utils.run_tests(app_url=python_script_url, + app_args="30 {} {}".format(my_secret, my_secret_content), + expected_output="Pi is roughly 3", + app_name="/spark", + args=["--keystore-secret-path", keystore_secret, + "--truststore-secret-path", truststore_secret, + "--private-key-password", format(password), + "--keystore-password", format(password), + "--truststore-password", format(password), + "--conf", "spark.mesos.driver.secret.names={}".format(my_secret), + "--conf", "spark.mesos.driver.secret.filenames={}".format(my_secret), + "--conf", "spark.mesos.driver.secret.envkeys={}".format(my_secret), + ]) + finally: + shakedown.run_dcos_command('security secrets delete /{}'.format(keystore_secret)) + shakedown.run_dcos_command('security secrets delete /{}'.format(truststore_secret)) + shakedown.run_dcos_command('security secrets delete /{}'.format(my_secret)) + + def _scala_test_jar_url(): return s3.http_url(os.path.basename(os.environ["SCALA_TEST_JAR_PATH"])) diff --git a/tests/utils.py b/tests/utils.py index 5df51d17e2113..ffaf6f5fe80f5 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -163,21 +163,6 @@ def check_job_output(task_id, expected_output): raise Exception("{} not found in stdout".format(expected_output)) -class SecretHandler(): - def __init__(self, path, value): - self.payload = json.dumps({"value": value}) - self.api_url = urllib.parse.urljoin(dcos.config.get_config_val("core.dcos_url"), - "secrets/v1/secret/default/{}".format(path)) - self.token = dcos.config.get_config_val("core.dcos_acs_token") - self.headers = {"Content-Type": "application/json", "Authorization": "token={}".format(self.token)} - - def create_secret(self): - return requests.put(self.api_url, data=self.payload, headers=self.headers, verify=False) - - def delete_secret(self): - return requests.delete(self.api_url, headers=self.headers, verify=False) - - def upload_file(file_path): LOGGER.info("Uploading {} to s3://{}/{}".format( file_path,