Skip to content

Commit

Permalink
feat!: Add job & fleet CLI usability improvements
Browse files Browse the repository at this point in the history
BREAKING-CHANGE: This change modifies some of the dataclass property
names that the integrated GUI submitters use

NOTE: To get the tests passing in github actions, I had to comment out the
      PIP_INDEX_URL configuration in hatch.toml.

- Add a defaults.job_id config that is usually the most recently
  submitted job, because it is set by the job bundle submission functions.
  This means a user can run `deadline job get` after submitting a job to
  get information about what they just created.
- Modify the `deadline fleet get` to accept a queue id, and when
  provided display all the fleets associated with that queue.

Additional fixes:

- Fix the handle-web-url logic around farm id and queue id. It should
  not have been using the default parameter logic for farm and queue id.
- Remove the integ testing dependency from requirements-testing so that
  it's easier to run the unit tests in a bare-bones setup. Added
  requirements-integ-testing to make up the difference in the integ env.
  This takes it one step closer to working on Windows.
- Rearrange the unit vs integ tests, because the pattern did not work on
  Windows. Instead have separate test/unit and test/integ that can be
  selected separately via a single file path.
- Remove the deprecated uihint. Removed it here because fixing the
  Windows tests ended up in the path-related code, and removing the
  deprecated uihint was better than spending the time to fix it for
  tests.
- Removed empty path parameter values as an error case. For it to be an
  error, the parameter definition should set a minLength of 1.
- Adjusted PATH parameter treatment with NONE dataflow slightly.
- Add types to the fus3 class so that it passes mypy.
- Correct some typo "a" to "an".
- Update installation_requirements to rez_packages in the settings
  dataclasses.
- Update max_failed_tasks_count and max_retries_per_task property names
  and GUI field names to match the service API choices.
- Fix asset_sync.py to work on Windows.
- Renamed the setting defaults.storage_profile_id to
  settings.storage_profile_id, because it is not a default that you
  select from multiple options, it is the configuration of the machine.
- Made settings.storage_profile_id depend on defaults.farm_id instead of
  defaults.queue_id. The storage profile is a child of the farm in the
  resource model, and a workstation would use the same one across
  multiple queues.
  • Loading branch information
mwiebe committed Aug 29, 2023
1 parent ebc29b8 commit afef801
Show file tree
Hide file tree
Showing 96 changed files with 402 additions and 881 deletions.
10 changes: 6 additions & 4 deletions .github/workflows/reuse_python_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,14 @@ on:

jobs:
Python:
runs-on: ubuntu-latest
runs-on: ${{ matrix.os }}
permissions:
id-token: write
contents: read
strategy:
matrix:
python-version: ['3.7', '3.8', '3.9', '3.10', '3.11']
os: [ubuntu-latest, windows-latest, macOS-latest]
env:
PYTHON: ${{ matrix.python-version }}
CODEARTIFACT_REGION: "us-west-2"
Expand All @@ -25,7 +26,7 @@ jobs:
steps:
- uses: actions/checkout@v3
if: ${{ !inputs.branch }}

- uses: actions/checkout@v3
if: ${{ inputs.branch }}
with:
Expand All @@ -36,14 +37,15 @@ jobs:
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}

- name: Configure AWS credentials
uses: aws-actions/configure-aws-credentials@v2
with:
role-to-assume: ${{ secrets.AWS_CODEARTIFACT_ROLE }}
aws-region: us-west-2

- name: Install Hatch
shell: bash
run: |
CODEARTIFACT_AUTH_TOKEN=$(aws codeartifact get-authorization-token --domain ${{ secrets.CODEARTIFACT_DOMAIN }} --domain-owner ${{ secrets.CODEARTIFACT_ACCOUNT_ID }} --query authorizationToken --output text --region us-west-2)
echo "::add-mask::$CODEARTIFACT_AUTH_TOKEN"
Expand All @@ -57,4 +59,4 @@ jobs:
run: hatch build

- name: Run Tests
run: hatch run test
run: hatch run test -vv
15 changes: 8 additions & 7 deletions hatch.toml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ pre-install-commands = [

[envs.default.scripts]
sync = "pip install -r requirements-testing.txt"
test = "pytest --cov-config pyproject.toml {args:test/*/unit}"
test = "pytest --cov-config pyproject.toml {args:test/unit}"
typing = "mypy {args:src test}"
style = [
"ruff {args:.}",
Expand All @@ -24,7 +24,7 @@ lint = [
python = ["3.7", "3.8", "3.9", "3.10", "3.11"]

[envs.default.env-vars]
PIP_INDEX_URL="https://aws:{env:CODEARTIFACT_AUTH_TOKEN}@{env:CODEARTIFACT_DOMAIN}-{env:CODEARTIFACT_ACCOUNT_ID}.d.codeartifact.{env:CODEARTIFACT_REGION}.amazonaws.com/pypi/{env:CODEARTIFACT_REPOSITORY}/simple/"
# PIP_INDEX_URL="https://aws:{env:CODEARTIFACT_AUTH_TOKEN}@{env:CODEARTIFACT_DOMAIN}-{env:CODEARTIFACT_ACCOUNT_ID}.d.codeartifact.{env:CODEARTIFACT_REGION}.amazonaws.com/pypi/{env:CODEARTIFACT_REPOSITORY}/simple/"
SKIP_BOOTSTRAP_TEST_RESOURCES="True"

[envs.codebuild.scripts]
Expand All @@ -47,11 +47,13 @@ SKIP_BOOTSTRAP_TEST_RESOURCES="True"
build = "hatch build"
make_exe = "python scripts/pyinstaller/make_exe.py --output {env:OUT_FILE}"

[envs.integ.scripts]
test = "pytest --no-cov {args:test/*/integ} -vvv --numprocesses=1"
[envs.integ]
pre-install-commands = [
"pip install -r requirements-integ-testing.txt"
]

[envs.e2e.scripts]
test = "pytest --no-cov {args:test/*/e2e}"
[envs.integ.scripts]
test = "pytest --no-cov {args:test/integ} -vvv --numprocesses=1"

[envs.installer]
pre-install-commands = [
Expand All @@ -61,4 +63,3 @@ pre-install-commands = [
[envs.installer.scripts]
build = "hatch build"
make_exe = "python scripts/pyinstaller/make_exe.py --output {env:OUT_FILE}"

4 changes: 2 additions & 2 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -118,10 +118,10 @@ addopts = [
"--cov-report=term-missing",
"--numprocesses=auto"
]
testpaths = [ "test" ]
testpaths = [ "test/unit" ]
looponfailroots = [
"src",
"test",
"test/unit",
]
markers = [
"no_setup: mark that test shouldn't use default setups",
Expand Down
1 change: 1 addition & 0 deletions requirements-integ-testing.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
deadline-cloud-test-fixtures ~= 0.2.0
1 change: 0 additions & 1 deletion requirements-testing.txt
Original file line number Diff line number Diff line change
Expand Up @@ -12,4 +12,3 @@ mypy ~= 1.4
ruff ~= 0.0.282
moto ~= 4.1
jsondiff ~= 2.0
deadline-cloud-test-fixtures ~= 0.2.0
11 changes: 8 additions & 3 deletions src/deadline/client/api/_submit_job_bundle.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@

from deadline.client import api
from deadline.client.exceptions import DeadlineOperationError, CreateJobWaiterCanceled
from deadline.client.config import get_setting
from deadline.client.config import get_setting, set_setting
from deadline.client.job_bundle.loader import read_yaml_or_json, read_yaml_or_json_object
from deadline.client.job_bundle.parameters import apply_job_parameters, read_job_bundle_parameters
from deadline.client.job_bundle.submission import (
Expand Down Expand Up @@ -99,7 +99,7 @@ def create_job_from_job_bundle(
"templateType": file_type,
}

storage_profile_id = get_setting("defaults.storage_profile_id", config=config)
storage_profile_id = get_setting("settings.storage_profile_id", config=config)
if storage_profile_id:
create_job_args["storageProfileId"] = storage_profile_id

Expand Down Expand Up @@ -171,14 +171,19 @@ def create_job_from_job_bundle(
logger.debug(f"CreateJob Response {create_job_response}")

if create_job_response and "jobId" in create_job_response:
job_id = create_job_response["jobId"]

# If using the default config, set the default job id so it holds the
# most-recently submitted job.
if config is None:
set_setting("defaults.job_id", job_id)

def _default_create_job_result_callback() -> bool:
return True

if not create_job_result_callback:
create_job_result_callback = _default_create_job_result_callback

job_id = create_job_response["jobId"]
success, status_message = wait_for_create_job_to_complete(
create_job_args["farmId"],
create_job_args["queueId"],
Expand Down
15 changes: 12 additions & 3 deletions src/deadline/client/cli/_common.py
Original file line number Diff line number Diff line change
Expand Up @@ -72,11 +72,11 @@ def apply_cli_options_to_config(
*, config: Optional[ConfigParser] = None, required_options: Set[str] = set(), **args
) -> Optional[ConfigParser]:
"""
Modifies a Amazon Deadline Cloud config object to apply standard option names to it, such as
Modifies an Amazon Deadline Cloud config object to apply standard option names to it, such as
the AWS profile, Amazon Deadline Cloud Farm, or Amazon Deadline Cloud Queue to use.
Args:
config (ConfigParser, optional): A Amazon Deadline Cloud config, read by config_file.read_config().
config (ConfigParser, optional): an Amazon Deadline Cloud config, read by config_file.read_config().
If not provided, loads the config from disk.
"""
# Only work with a custom config if there are standard options provided
Expand All @@ -96,12 +96,16 @@ def apply_cli_options_to_config(
if queue_id:
config_file.set_setting("defaults.queue_id", queue_id, config=config)

job_id = args.pop("job_id", None)
if job_id:
config_file.set_setting("defaults.job_id", job_id, config=config)

auto_accept = args.pop("yes", None)
if auto_accept:
config_file.set_setting("settings.auto_accept", "true", config=config)
else:
# Remove the standard option names from the args list
for name in ["profile", "farm_id", "queue_id"]:
for name in ["profile", "farm_id", "queue_id", "job_id"]:
args.pop(name, None)

# Check that the required options have values
Expand All @@ -115,6 +119,11 @@ def apply_cli_options_to_config(
if not config_file.get_setting("defaults.queue_id", config=config):
raise click.UsageError("Missing '--queue-id' or default Queue ID configuration")

if "job_id" in required_options:
required_options.remove("job_id")
if not config_file.get_setting("defaults.job_id", config=config):
raise click.UsageError("Missing '--job-id' or default Job ID configuration")

if required_options:
raise RuntimeError(
f"Unexpected required Amazon Deadline Cloud CLI options: {required_options}"
Expand Down
19 changes: 15 additions & 4 deletions src/deadline/client/cli/groups/bundle_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
from deadline.client import api
from deadline.client.api import get_boto3_client, get_queue_boto3_session
from deadline.client.api._session import _modified_logging_level
from deadline.client.config import config_file, get_setting
from deadline.client.config import config_file, get_setting, set_setting
from deadline.client.job_bundle.loader import read_yaml_or_json, read_yaml_or_json_object
from deadline.client.job_bundle.parameters import apply_job_parameters, read_job_bundle_parameters
from deadline.client.job_bundle.submission import (
Expand Down Expand Up @@ -89,7 +89,7 @@ def validate_parameters(ctx, param, value):
"--asset-loading-method",
help="The method to use for loading assets on the server. Options are PRELOAD (load assets onto server first then run the job) or ON_DEMAND (load assets as requested).",
type=click.Choice([e.value for e in AssetLoadingMethod]),
default=AssetLoadingMethod.PRELOAD,
default=AssetLoadingMethod.PRELOAD.value,
)
@click.option(
"--yes",
Expand All @@ -102,6 +102,12 @@ def bundle_submit(job_bundle_dir, asset_loading_method, parameter, **args):
"""
Submits an OpenJobIO job bundle to Amazon Deadline Cloud.
"""
# Check Whether the CLI options are modifying any of the default settings that affect
# the job id. If not, we'll save the job id submitted as the default job id.
if args.get("profile") is None and args.get("farm_id") is None and args.get("queue_id") is None:
should_save_job_id = True
else:
should_save_job_id = False
# Get a temporary config object with the standard options handled
config = apply_cli_options_to_config(required_options={"farm_id", "queue_id"}, **args)

Expand All @@ -123,7 +129,7 @@ def bundle_submit(job_bundle_dir, asset_loading_method, parameter, **args):
"templateType": file_type,
}

storage_profile_id = get_setting("defaults.storage_profile_id", config=config)
storage_profile_id = get_setting("settings.storage_profile_id", config=config)
if storage_profile_id:
create_job_args["storageProfileId"] = storage_profile_id

Expand Down Expand Up @@ -202,12 +208,17 @@ def bundle_submit(job_bundle_dir, asset_loading_method, parameter, **args):
logger.debug(f"CreateJob Response {create_job_response}")

if create_job_response and "jobId" in create_job_response:
job_id = create_job_response["jobId"]
click.echo("Waiting for Job to be created...")

# If using the default config, set the default job id so it holds the
# most-recently submitted job.
if should_save_job_id:
set_setting("defaults.job_id", job_id)

def _check_create_job_wait_canceled() -> bool:
return continue_submission

job_id = create_job_response["jobId"]
success, status_message = api.wait_for_create_job_to_complete(
create_job_args["farmId"],
create_job_args["queueId"],
Expand Down
10 changes: 6 additions & 4 deletions src/deadline/client/cli/groups/config_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,10 @@ def cli_config():
defaults.queue_id:
The default queue ID to use for job submissions or CLI operations.
defaults.storage_profile_id:
The default storage profile ID to use for job submission or CLI operations.
settings.storage_profile_id:
The storage profile that this workstation conforms to. It specifies
where shared file systems are mounted, and where named job attachments
should go.
settings.job_history_dir:
The directory in which to create new job bundles for
Expand Down Expand Up @@ -86,7 +88,7 @@ def config_gui():
@handle_error
def config_set(setting_name, value):
"""
Sets a Amazon Deadline Cloud workstation configuration setting.
Sets an Amazon Deadline Cloud workstation configuration setting.
For example `deadline config set defaults.farm_id <farm-id>`.
Run `deadline config --help` to show available settings.
Expand All @@ -99,7 +101,7 @@ def config_set(setting_name, value):
@handle_error
def config_get(setting_name):
"""
Gets a Amazon Deadline Cloud workstation configuration setting.
Gets an Amazon Deadline Cloud workstation configuration setting.
For example `deadline config get defaults.farm_id`.
Run `deadline config --help` to show available settings.
Expand Down
2 changes: 1 addition & 1 deletion src/deadline/client/cli/groups/farm_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -50,7 +50,7 @@ def farm_list(**args):
@handle_error
def farm_get(**args):
"""
Get the details of a Amazon Deadline Cloud farm.
Get the details of an Amazon Deadline Cloud farm.
If farm ID is not provided, returns the configured default farm.
"""
Expand Down
49 changes: 43 additions & 6 deletions src/deadline/client/cli/groups/fleet_group.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,19 +51,56 @@ def fleet_list(**args):
@cli_fleet.command(name="get")
@click.option("--profile", help="The AWS profile to use.")
@click.option("--farm-id", help="The Amazon Deadline Cloud Farm to use.")
@click.option("--fleet-id", help="The Amazon Deadline Cloud Fleet to use.", required=True)
@click.option("--fleet-id", help="The Amazon Deadline Cloud Fleet to use.")
@click.option("--queue-id", help="If provided, gets all Fleets associated with the Queue.")
@handle_error
def fleet_get(fleet_id, **args):
def fleet_get(fleet_id, queue_id, **args):
"""
Get the details of a Amazon Deadline Cloud Fleet.
Get the details of an Amazon Deadline Cloud Fleet.
"""
if fleet_id and queue_id:
raise DeadlineOperationError(
"Only one of the --fleet-id and --queue-id options may be provided."
)

# Get a temporary config object with the standard options handled
config = apply_cli_options_to_config(required_options={"farm_id"}, **args)

farm_id = config_file.get_setting("defaults.farm_id", config=config)
if not fleet_id:
queue_id = config_file.get_setting("defaults.queue_id", config=config)
if not queue_id:
raise click.UsageError(
"Missing '--fleet-id', '--queue-id', or default Queue ID configuration"
)

deadline = api.get_boto3_client("deadline", config=config)
response = deadline.get_fleet(farmId=farm_id, fleetId=fleet_id)
response.pop("ResponseMetadata", None)

click.echo(cli_object_repr(response))
if fleet_id:
response = deadline.get_fleet(farmId=farm_id, fleetId=fleet_id)
response.pop("ResponseMetadata", None)

click.echo(cli_object_repr(response))
else:
response = deadline.get_queue(farmId=farm_id, queueId=queue_id)
queue_name = response["displayName"]

response = api._call_paginated_deadline_list_api(
deadline.list_queue_fleet_associations,
"queueFleetAssociations",
farmId=farm_id,
queueId=queue_id,
)
response.pop("ResponseMetadata", None)
qfa_list = response["queueFleetAssociations"]

click.echo(
f"Showing all fleets ({len(qfa_list)} total) associated with queue: {queue_name}"
)
for qfa in qfa_list:
response = deadline.get_fleet(farmId=farm_id, fleetId=qfa["fleetId"])
response.pop("ResponseMetadata", None)
response["queueFleetAssociationStatus"] = qfa["status"]

click.echo("")
click.echo(cli_object_repr(response))
Loading

0 comments on commit afef801

Please sign in to comment.