Skip to content

Commit

Permalink
[SPARK-486] Secrets integration test (apache#152)
Browse files Browse the repository at this point in the history
  • Loading branch information
mgummelt authored Jun 19, 2017
1 parent 1ed8829 commit f876c68
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 5 deletions.
36 changes: 31 additions & 5 deletions tests/test_spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ def setup_module(module):
if utils.hdfs_enabled():
utils.require_hdfs()
utils.require_spark()
_upload_file(os.environ["SCALA_TEST_JAR_PATH"])


def teardown_module(module):
Expand Down Expand Up @@ -128,10 +129,10 @@ def test_s3():
"spark.mesos.driverEnv.AWS_SECRET_ACCESS_KEY={}".format(
os.environ["AWS_SECRET_ACCESS_KEY"]),
"--class", "S3Job"]
utils.run_tests(_upload_file(os.environ["SCALA_TEST_JAR_PATH"]),
app_args,
"",
args)
utils.run_tests(_scala_test_jar_url(),
app_args,
"",
args)

assert len(list(s3.list("linecount-out"))) > 0

Expand All @@ -154,6 +155,28 @@ def test_marathon_group():
dcos.config.unset("spark.app_id")


@pytest.mark.skip(reason="Skip until secrets are released in DC/OS Spark: SPARK-466")
@pytest.mark.sanity
def test_secrets():
try:
secret_name = "secret"
secret_contents = "mgummelt"
utils.create_secret(secret_name, secret_contents)

secret_file_name = "secret_file"
output = "Contents of file {}: {}".format(secret_file_name, secret_contents)
args = ["--conf", "spark.mesos.containerizer=mesos",
"--conf", "spark.mesos.driver.secret.name={}".format(secret_name),
"--conf", "spark.mesos.driver.secret.filename={}".format(secret_file_name),
"--class", "SecretsJob"]
utils.run_tests(_scala_test_jar_url(),
secret_file_name,
output,
args)
finally:
utils.delete_secret(secret_name)


def _run_janitor(service_name):
janitor_cmd = (
'docker run mesosphere/janitor /janitor.py '
Expand All @@ -164,7 +187,7 @@ def _run_janitor(service_name):


def _upload_file(file_path):
print("Uploading {} to s3://{}/{}".format(
LOGGER.info("Uploading {} to s3://{}/{}".format(
file_path,
os.environ['S3_BUCKET'],
os.environ['S3_PREFIX']))
Expand All @@ -173,3 +196,6 @@ def _upload_file(file_path):

basename = os.path.basename(file_path)
return s3.http_url(basename)

def _scala_test_jar_url():
return s3.http_url(os.path.basename(os.environ["SCALA_TEST_JAR_PATH"]))
19 changes: 19 additions & 0 deletions tests/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,11 @@
import dcos.http
import dcos.package

import json
import logging
import os
import re
import requests
import shakedown
import subprocess
import urllib
Expand Down Expand Up @@ -148,6 +150,23 @@ def run_tests(app_url, app_args, expected_output, args=[]):
raise Exception("{} not found in stdout".format(expected_output))


def delete_secret(name):
LOGGER.info("Deleting secret name={}".format(name))

dcos_url = dcos.config.get_config_val("core.dcos_url")
url = dcos_url + "secrets/v1/secret/default/{}".format(name)
dcos.http.delete(url)


def create_secret(name, value):
LOGGER.info("Creating secret name={}".format(name))

dcos_url = dcos.config.get_config_val("core.dcos_url")
url = dcos_url + "secrets/v1/secret/default/{}".format(name)
data = {"path": name, "value": value}
dcos.http.put(url, data=json.dumps(data))


def _submit_job(app_url, app_args, args=[]):
if is_strict():
args += ["--conf", 'spark.mesos.driverEnv.MESOS_MODULES=file:///opt/mesosphere/etc/mesos-scheduler-modules/dcos_authenticatee_module.json']
Expand Down

0 comments on commit f876c68

Please sign in to comment.