Skip to content

Commit

Permalink
Added "smoke" mark for smoke tests, to be run against development ver…
Browse files Browse the repository at this point in the history
…sions of DC/OS. Includes all tests except HDFS ones. (apache#238)
  • Loading branch information
susanxhuynh authored and Arthur Rand committed Dec 19, 2017
1 parent a52e08e commit feecd5f
Show file tree
Hide file tree
Showing 2 changed files with 10 additions and 0 deletions.
1 change: 1 addition & 0 deletions tests/test_kafka.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@ def setup_spark(kerberized_kafka, configure_security_spark, configure_universe):


@pytest.mark.sanity
@pytest.mark.smoke
@pytest.mark.skipif(not utils.kafka_enabled(), reason='KAFKA_ENABLED is false')
def test_spark_and_kafka():
kerberos_flag = "true" if KERBERIZED_KAFKA else "false" # flag for using kerberized kafka given to app
Expand Down
9 changes: 9 additions & 0 deletions tests/test_spark.py
Original file line number Diff line number Diff line change
Expand Up @@ -46,6 +46,7 @@ def setup_spark(configure_security, configure_universe):

@pytest.mark.xfail(utils.is_strict(), reason="Currently fails in strict mode")
@pytest.mark.sanity
@pytest.mark.smoke
def test_jar(app_name=utils.SPARK_APP_NAME):
master_url = ("https" if utils.is_strict() else "http") + "://leader.mesos:5050"
spark_job_runner_args = '{} dcos \\"*\\" spark:only 2 --auth-token={}'.format(
Expand All @@ -60,6 +61,7 @@ def test_jar(app_name=utils.SPARK_APP_NAME):


@pytest.mark.sanity
@pytest.mark.smoke
def test_rpc_auth():
secret_name = "sparkauth"

Expand Down Expand Up @@ -94,6 +96,7 @@ def test_sparkPi(app_name=utils.SPARK_APP_NAME):


@pytest.mark.sanity
@pytest.mark.smoke
def test_python():
python_script_path = os.path.join(THIS_DIR, 'jobs', 'python', 'pi_with_include.py')
python_script_url = utils.upload_file(python_script_path)
Expand All @@ -106,6 +109,7 @@ def test_python():


@pytest.mark.sanity
@pytest.mark.smoke
def test_r():
r_script_path = os.path.join(THIS_DIR, 'jobs', 'R', 'dataframe.R')
r_script_url = utils.upload_file(r_script_path)
Expand All @@ -125,6 +129,7 @@ def test_cni():

#@pytest.mark.skip("Enable when SPARK-21694 is merged and released in DC/OS Spark")
@pytest.mark.sanity
@pytest.mark.smoke
def test_cni_labels():
driver_task_id = utils.submit_job(app_url=utils.SPARK_EXAMPLES,
app_args="3000", # Long enough to examine the Driver's & Executor's task infos
Expand Down Expand Up @@ -166,6 +171,7 @@ def _check_task_network_info(task):


@pytest.mark.sanity
@pytest.mark.smoke
def test_s3():
def make_credential_secret(envvar, secret_path):
rc, stdout, stderr = sdk_cmd.run_raw_cli("security secrets create {p} -v {e}"
Expand Down Expand Up @@ -234,6 +240,7 @@ def make_credential_secret(envvar, secret_path):
# Skip DC/OS < 1.10, because it doesn't have adminrouter support for service groups.
@pytest.mark.skipif('shakedown.dcos_version_less_than("1.10")')
@pytest.mark.sanity
@pytest.mark.smoke
def test_marathon_group():
app_id = utils.FOLDERED_SPARK_APP_NAME
options = {"service": {"name": app_id}}
Expand All @@ -243,6 +250,7 @@ def test_marathon_group():
#shakedown.uninstall_package_and_wait(SPARK_PACKAGE_NAME, app_id)



@pytest.mark.sanity
def test_cli_multiple_spaces():
utils.run_tests(app_url=utils.SPARK_EXAMPLES,
Expand All @@ -256,6 +264,7 @@ def test_cli_multiple_spaces():
@pytest.mark.skipif('shakedown.dcos_version_less_than("1.10")')
@sdk_utils.dcos_ee_only
@pytest.mark.sanity
@pytest.mark.smoke
def test_driver_executor_tls():
'''
Put keystore and truststore as secrets in DC/OS secret store.
Expand Down

0 comments on commit feecd5f

Please sign in to comment.