From af7fe8db5c5a0118b1c4f69ac50af94950eeb56f Mon Sep 17 00:00:00 2001 From: Subin Shekhar Date: Wed, 23 Nov 2022 03:19:28 +0530 Subject: [PATCH] feat: add new sdk methods and update existing 1. Add new sdk methods for updating default runtime and for getting resource consumption limit. 2. Update create_application to support runtime option. 3. Update list_applications to accept state filter query parameters. Signed-off-by: Subin Shekhar --- ...st_ibm_analytics_engine_api_v3_examples.py | 79 ++- iaesdk/ibm_analytics_engine_api_v3.py | 607 +++++++++++++++--- .../test_ibm_analytics_engine_api_v3.py | 57 +- test/unit/test_ibm_analytics_engine_api_v3.py | 482 ++++++++++++-- 4 files changed, 1076 insertions(+), 149 deletions(-) diff --git a/examples/test_ibm_analytics_engine_api_v3_examples.py b/examples/test_ibm_analytics_engine_api_v3_examples.py index dc0419e..6237227 100644 --- a/examples/test_ibm_analytics_engine_api_v3_examples.py +++ b/examples/test_ibm_analytics_engine_api_v3_examples.py @@ -166,7 +166,10 @@ def test_replace_instance_default_configs_example(self): instance_default_configs = ibm_analytics_engine_api_service.replace_instance_default_configs( instance_id='e64c907a-e82f-46fd-addc-ccfafbd28b09', - body={'key1': 'testString'} + body={ + "spark.driver.memory": "8G", + "spark.driver.cores": "2", + } ).get_result() print(json.dumps(instance_default_configs, indent=2)) @@ -187,7 +190,10 @@ def test_update_instance_default_configs_example(self): instance_default_configs = ibm_analytics_engine_api_service.update_instance_default_configs( instance_id='e64c907a-e82f-46fd-addc-ccfafbd28b09', - body={'key1': 'testString'} + body={ + "ae.spark.history-server.cores": "1", + "ae.spark.history-server.memory": "4G", + } ).get_result() print(json.dumps(instance_default_configs, indent=2)) @@ -197,6 +203,47 @@ def test_update_instance_default_configs_example(self): except ApiException as e: pytest.fail(str(e)) + @needscredentials + def test_get_instance_default_runtime_example(self): + """ + get_instance_default_runtime request example + """ + try: + print('\nget_instance_default_runtime() result:') + # begin-get_instance_default_runtime + + runtime = ibm_analytics_engine_api_service.get_instance_default_runtime( + instance_id='e64c907a-e82f-46fd-addc-ccfafbd28b09' + ).get_result() + + print(json.dumps(runtime, indent=2)) + + # end-get_instance_default_runtime + + except ApiException as e: + pytest.fail(str(e)) + + @needscredentials + def test_replace_instance_default_runtime_example(self): + """ + replace_instance_default_runtime request example + """ + try: + print('\nreplace_instance_default_runtime() result:') + # begin-replace_instance_default_runtime + + runtime = ibm_analytics_engine_api_service.replace_instance_default_runtime( + instance_id='e64c907a-e82f-46fd-addc-ccfafbd28b09', + spark_version='3.3' + ).get_result() + + print(json.dumps(runtime, indent=2)) + + # end-replace_instance_default_runtime + + except ApiException as e: + pytest.fail(str(e)) + @needscredentials def test_create_application_example(self): """ @@ -206,8 +253,16 @@ def test_create_application_example(self): print('\ncreate_application() result:') # begin-create_application + application_details = ApplicationRequestApplicationDetails( + application='/opt/ibm/spark/examples/src/main/python/wordcount.py', + arguments=['/opt/ibm/spark/examples/src/main/resources/people.txt'], + runtime={ + 'spark_version': '3.3' + } + ) application_response = ibm_analytics_engine_api_service.create_application( instance_id='e64c907a-e82f-46fd-addc-ccfafbd28b09', + application_details=application_details, ).get_result() print(json.dumps(application_response, indent=2)) @@ -299,6 +354,26 @@ def test_get_current_resource_consumption_example(self): except ApiException as e: pytest.fail(str(e)) + @needscredentials + def test_get_resource_consumption_limits_example(self): + """ + get_resource_consumption_limits request example + """ + try: + print('\nget_resource_consumption_limits() result:') + # begin-get_resource_consumption_limits + + resource_consumption_limits_response = ibm_analytics_engine_api_service.get_resource_consumption_limits( + instance_id='e64c907a-e82f-46fd-addc-ccfafbd28b09' + ).get_result() + + print(json.dumps(resource_consumption_limits_response, indent=2)) + + # end-get_resource_consumption_limits + + except ApiException as e: + pytest.fail(str(e)) + @needscredentials def test_replace_log_forwarding_config_example(self): """ diff --git a/iaesdk/ibm_analytics_engine_api_v3.py b/iaesdk/ibm_analytics_engine_api_v3.py index 8c7fb58..61c7e7c 100644 --- a/iaesdk/ibm_analytics_engine_api_v3.py +++ b/iaesdk/ibm_analytics_engine_api_v3.py @@ -30,7 +30,7 @@ from ibm_cloud_sdk_core import BaseService, DetailedResponse from ibm_cloud_sdk_core.authenticators.authenticator import Authenticator from ibm_cloud_sdk_core.get_authenticator import get_authenticator_from_environment -from ibm_cloud_sdk_core.utils import convert_model, datetime_to_string, string_to_datetime +from ibm_cloud_sdk_core.utils import convert_list, convert_model, datetime_to_string, string_to_datetime from .common import get_sdk_headers @@ -198,6 +198,8 @@ def set_instance_home(self, Provide the details of the Cloud Object Storage instance to associate with the Analytics Engine instance and use as 'instance home' if 'instance home' has not already been set. + **Note**: You can set 'instance home' again if the instance is in + 'instance_home_creation_failure' state. :param str instance_id: The ID of the Analytics Engine instance for which 'instance home' is to be set. @@ -399,6 +401,100 @@ def update_instance_default_configs(self, return response + def get_instance_default_runtime(self, + instance_id: str, + **kwargs + ) -> DetailedResponse: + """ + Get instance default runtime. + + Get the default runtime environment on which all workloads of the instance will + run. + + :param str instance_id: The ID of the Analytics Engine instance. + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `Runtime` object + """ + + if instance_id is None: + raise ValueError('instance_id must be provided') + headers = {} + sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME, + service_version='V3', + operation_id='get_instance_default_runtime') + headers.update(sdk_headers) + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['instance_id'] + path_param_values = self.encode_path_vars(instance_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v3/analytics_engines/{instance_id}/default_runtime'.format(**path_param_dict) + request = self.prepare_request(method='GET', + url=url, + headers=headers) + + response = self.send(request, **kwargs) + return response + + + def replace_instance_default_runtime(self, + instance_id: str, + *, + spark_version: str = None, + **kwargs + ) -> DetailedResponse: + """ + Replace instance default runtime. + + Replace the default runtime environment on which all workloads of the instance + will run. + + :param str instance_id: The ID of the Analytics Engine instance. + :param str spark_version: (optional) Spark version of the runtime + environment. + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `Runtime` object + """ + + if instance_id is None: + raise ValueError('instance_id must be provided') + headers = {} + sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME, + service_version='V3', + operation_id='replace_instance_default_runtime') + headers.update(sdk_headers) + + data = { + 'spark_version': spark_version + } + data = {k: v for (k, v) in data.items() if v is not None} + data = json.dumps(data) + headers['content-type'] = 'application/json' + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['instance_id'] + path_param_values = self.encode_path_vars(instance_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v3/analytics_engines/{instance_id}/default_runtime'.format(**path_param_dict) + request = self.prepare_request(method='PUT', + url=url, + headers=headers, + data=data) + + response = self.send(request, **kwargs) + return response + + def create_application(self, instance_id: str, *, @@ -410,8 +506,8 @@ def create_application(self, Deploys a Spark application on a given serverless Spark instance. - :param str instance_id: The identifier of the instance where the Spark - application is submitted. + :param str instance_id: The identifier of the Analytics Engine instance + associated with the Spark application(s). :param ApplicationRequestApplicationDetails application_details: (optional) Application details. :param dict headers: A `dict` containing the request headers @@ -456,15 +552,20 @@ def create_application(self, def list_applications(self, instance_id: str, + *, + state: List[str] = None, **kwargs ) -> DetailedResponse: """ - Retrieve all Spark applications. + List all Spark applications. - Gets all applications submitted in an instance with a specified instance-id. + Returns a list of all Spark applications submitted to the specified Analytics + Engine instance. The result can be filtered by specifying query parameters. - :param str instance_id: Identifier of the instance where the applications - run. + :param str instance_id: The identifier of the Analytics Engine instance + associated with the Spark application(s). + :param List[str] state: (optional) List of Spark application states that + will be used to filter the response. :param dict headers: A `dict` containing the request headers :return: A `DetailedResponse` containing the result, headers and HTTP status code. :rtype: DetailedResponse with `dict` result representing a `ApplicationCollection` object @@ -478,6 +579,10 @@ def list_applications(self, operation_id='list_applications') headers.update(sdk_headers) + params = { + 'state': convert_list(state) + } + if 'headers' in kwargs: headers.update(kwargs.get('headers')) del kwargs['headers'] @@ -489,7 +594,8 @@ def list_applications(self, url = '/v3/analytics_engines/{instance_id}/spark_applications'.format(**path_param_dict) request = self.prepare_request(method='GET', url=url, - headers=headers) + headers=headers, + params=params) response = self.send(request, **kwargs) return response @@ -680,6 +786,47 @@ def get_current_resource_consumption(self, return response + def get_resource_consumption_limits(self, + instance_id: str, + **kwargs + ) -> DetailedResponse: + """ + Get resource consumption limits. + + Returns the maximum total memory and virtual processor cores that can be allotted + across all the applications running in the service instance at any point in time. + + :param str instance_id: ID of the Analytics Engine instance. + :param dict headers: A `dict` containing the request headers + :return: A `DetailedResponse` containing the result, headers and HTTP status code. + :rtype: DetailedResponse with `dict` result representing a `ResourceConsumptionLimitsResponse` object + """ + + if instance_id is None: + raise ValueError('instance_id must be provided') + headers = {} + sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME, + service_version='V3', + operation_id='get_resource_consumption_limits') + headers.update(sdk_headers) + + if 'headers' in kwargs: + headers.update(kwargs.get('headers')) + del kwargs['headers'] + headers['Accept'] = 'application/json' + + path_param_keys = ['instance_id'] + path_param_values = self.encode_path_vars(instance_id) + path_param_dict = dict(zip(path_param_keys, path_param_values)) + url = '/v3/analytics_engines/{instance_id}/resource_consumption_limits'.format(**path_param_dict) + request = self.prepare_request(method='GET', + url=url, + headers=headers) + + response = self.send(request, **kwargs) + return response + + def replace_log_forwarding_config(self, instance_id: str, *, @@ -875,6 +1022,28 @@ def get_logging_configuration(self, return response +class ListApplicationsEnums: + """ + Enums for list_applications parameters. + """ + + class State(str, Enum): + """ + List of Spark application states that will be used to filter the response. + """ + FINISHED = 'finished' + RUNNING = 'running' + FAILED = 'failed' + ERROR = 'error' + ACCEPTED = 'accepted' + SUBMITTED = 'submitted' + WAITING = 'waiting' + UNKNOWN = 'unknown' + STOPPED = 'stopped' + AUTO_TERMINATED = 'auto_terminated' + OPS_TERMINATED = 'ops_terminated' + + ############################################################################## # Models ############################################################################## @@ -887,10 +1056,12 @@ class Application(): :attr str id: (optional) Identifier provided by Analytics Engine service for the Spark application. :attr str href: (optional) Full URL of the resource. + :attr Runtime runtime: (optional) Runtime enviroment for applications and other + workloads. :attr str spark_application_id: (optional) Identifier provided by Apache Spark for the application. :attr str spark_application_name: (optional) Name of the Spark application. - :attr str state: (optional) Status of the application. + :attr str state: (optional) State of the Spark application. :attr str start_time: (optional) Time when the application was started. :attr str end_time: (optional) Time when the application run ended in success, failure or was stopped. @@ -901,6 +1072,7 @@ def __init__(self, *, id: str = None, href: str = None, + runtime: 'Runtime' = None, spark_application_id: str = None, spark_application_name: str = None, state: str = None, @@ -913,11 +1085,13 @@ def __init__(self, :param str id: (optional) Identifier provided by Analytics Engine service for the Spark application. :param str href: (optional) Full URL of the resource. + :param Runtime runtime: (optional) Runtime enviroment for applications and + other workloads. :param str spark_application_id: (optional) Identifier provided by Apache Spark for the application. :param str spark_application_name: (optional) Name of the Spark application. - :param str state: (optional) Status of the application. + :param str state: (optional) State of the Spark application. :param str start_time: (optional) Time when the application was started. :param str end_time: (optional) Time when the application run ended in success, failure or was stopped. @@ -925,6 +1099,7 @@ def __init__(self, """ self.id = id self.href = href + self.runtime = runtime self.spark_application_id = spark_application_id self.spark_application_name = spark_application_name self.state = state @@ -940,6 +1115,8 @@ def from_dict(cls, _dict: Dict) -> 'Application': args['id'] = _dict.get('id') if 'href' in _dict: args['href'] = _dict.get('href') + if 'runtime' in _dict: + args['runtime'] = Runtime.from_dict(_dict.get('runtime')) if 'spark_application_id' in _dict: args['spark_application_id'] = _dict.get('spark_application_id') if 'spark_application_name' in _dict: @@ -966,6 +1143,8 @@ def to_dict(self) -> Dict: _dict['id'] = self.id if hasattr(self, 'href') and self.href is not None: _dict['href'] = self.href + if hasattr(self, 'runtime') and self.runtime is not None: + _dict['runtime'] = self.runtime.to_dict() if hasattr(self, 'spark_application_id') and self.spark_application_id is not None: _dict['spark_application_id'] = self.spark_application_id if hasattr(self, 'spark_application_name') and self.spark_application_name is not None: @@ -998,6 +1177,23 @@ def __ne__(self, other: 'Application') -> bool: """Return `true` when self and other are not equal, false otherwise.""" return not self == other + class StateEnum(str, Enum): + """ + State of the Spark application. + """ + FINISHED = 'finished' + RUNNING = 'running' + FAILED = 'failed' + ERROR = 'error' + ACCEPTED = 'accepted' + SUBMITTED = 'submitted' + WAITING = 'waiting' + UNKNOWN = 'unknown' + STOPPED = 'stopped' + AUTO_TERMINATED = 'auto_terminated' + OPS_TERMINATED = 'ops_terminated' + + class ApplicationCollection(): """ An array of application details. @@ -1058,6 +1254,8 @@ class ApplicationDetails(): Application details. :attr str application: (optional) Path of the application to run. + :attr Runtime runtime: (optional) Runtime enviroment for applications and other + workloads. :attr str jars: (optional) Path of the jar files containing the application. :attr str packages: (optional) Package names. :attr str repositories: (optional) Repositories names. @@ -1081,6 +1279,7 @@ class ApplicationDetails(): def __init__(self, *, application: str = None, + runtime: 'Runtime' = None, jars: str = None, packages: str = None, repositories: str = None, @@ -1095,6 +1294,8 @@ def __init__(self, Initialize a ApplicationDetails object. :param str application: (optional) Path of the application to run. + :param Runtime runtime: (optional) Runtime enviroment for applications and + other workloads. :param str jars: (optional) Path of the jar files containing the application. :param str packages: (optional) Package names. @@ -1116,6 +1317,7 @@ def __init__(self, for a list of the supported variables. """ self.application = application + self.runtime = runtime self.jars = jars self.packages = packages self.repositories = repositories @@ -1133,6 +1335,8 @@ def from_dict(cls, _dict: Dict) -> 'ApplicationDetails': args = {} if 'application' in _dict: args['application'] = _dict.get('application') + if 'runtime' in _dict: + args['runtime'] = Runtime.from_dict(_dict.get('runtime')) if 'jars' in _dict: args['jars'] = _dict.get('jars') if 'packages' in _dict: @@ -1165,6 +1369,8 @@ def to_dict(self) -> Dict: _dict = {} if hasattr(self, 'application') and self.application is not None: _dict['application'] = self.application + if hasattr(self, 'runtime') and self.runtime is not None: + _dict['runtime'] = self.runtime.to_dict() if hasattr(self, 'jars') and self.jars is not None: _dict['jars'] = self.jars if hasattr(self, 'packages') and self.packages is not None: @@ -1214,7 +1420,9 @@ class ApplicationGetResponse(): :attr str spark_application_id: (optional) Identifier provided by Apache Spark for the application. :attr str spark_application_name: (optional) Name of the Spark application. - :attr str state: (optional) Application state. + :attr str state: (optional) State of the Spark application. + :attr List[ApplicationGetResponseStateDetailsItem] state_details: (optional) + List of additional information messages on the current state of the application. :attr datetime start_time: (optional) Application start time in the format YYYY-MM-DDTHH:mm:ssZ. :attr datetime end_time: (optional) Application end time in the format @@ -1230,6 +1438,7 @@ def __init__(self, spark_application_id: str = None, spark_application_name: str = None, state: str = None, + state_details: List['ApplicationGetResponseStateDetailsItem'] = None, start_time: datetime = None, end_time: datetime = None, finish_time: datetime = None) -> None: @@ -1243,7 +1452,10 @@ def __init__(self, Spark for the application. :param str spark_application_name: (optional) Name of the Spark application. - :param str state: (optional) Application state. + :param str state: (optional) State of the Spark application. + :param List[ApplicationGetResponseStateDetailsItem] state_details: + (optional) List of additional information messages on the current state of + the application. :param datetime start_time: (optional) Application start time in the format YYYY-MM-DDTHH:mm:ssZ. :param datetime end_time: (optional) Application end time in the format @@ -1256,6 +1468,7 @@ def __init__(self, self.spark_application_id = spark_application_id self.spark_application_name = spark_application_name self.state = state + self.state_details = state_details self.start_time = start_time self.end_time = end_time self.finish_time = finish_time @@ -1274,6 +1487,8 @@ def from_dict(cls, _dict: Dict) -> 'ApplicationGetResponse': args['spark_application_name'] = _dict.get('spark_application_name') if 'state' in _dict: args['state'] = _dict.get('state') + if 'state_details' in _dict: + args['state_details'] = [ApplicationGetResponseStateDetailsItem.from_dict(x) for x in _dict.get('state_details')] if 'start_time' in _dict: args['start_time'] = string_to_datetime(_dict.get('start_time')) if 'end_time' in _dict: @@ -1300,6 +1515,8 @@ def to_dict(self) -> Dict: _dict['spark_application_name'] = self.spark_application_name if hasattr(self, 'state') and self.state is not None: _dict['state'] = self.state + if hasattr(self, 'state_details') and self.state_details is not None: + _dict['state_details'] = [x.to_dict() for x in self.state_details] if hasattr(self, 'start_time') and self.start_time is not None: _dict['start_time'] = datetime_to_string(self.start_time) if hasattr(self, 'end_time') and self.end_time is not None: @@ -1326,12 +1543,111 @@ def __ne__(self, other: 'ApplicationGetResponse') -> bool: """Return `true` when self and other are not equal, false otherwise.""" return not self == other + class StateEnum(str, Enum): + """ + State of the Spark application. + """ + FINISHED = 'finished' + RUNNING = 'running' + FAILED = 'failed' + ERROR = 'error' + ACCEPTED = 'accepted' + SUBMITTED = 'submitted' + WAITING = 'waiting' + UNKNOWN = 'unknown' + STOPPED = 'stopped' + AUTO_TERMINATED = 'auto_terminated' + OPS_TERMINATED = 'ops_terminated' + + +class ApplicationGetResponseStateDetailsItem(): + """ + Additional information message on the current state of the application. + + :attr str type: (optional) Type of the message. + :attr str code: (optional) Fixed code for the message. + :attr str message: (optional) A descriptive message providing additional + information on the current application state. + """ + + def __init__(self, + *, + type: str = None, + code: str = None, + message: str = None) -> None: + """ + Initialize a ApplicationGetResponseStateDetailsItem object. + + :param str type: (optional) Type of the message. + :param str code: (optional) Fixed code for the message. + :param str message: (optional) A descriptive message providing additional + information on the current application state. + """ + self.type = type + self.code = code + self.message = message + + @classmethod + def from_dict(cls, _dict: Dict) -> 'ApplicationGetResponseStateDetailsItem': + """Initialize a ApplicationGetResponseStateDetailsItem object from a json dictionary.""" + args = {} + if 'type' in _dict: + args['type'] = _dict.get('type') + if 'code' in _dict: + args['code'] = _dict.get('code') + if 'message' in _dict: + args['message'] = _dict.get('message') + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a ApplicationGetResponseStateDetailsItem object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'type') and self.type is not None: + _dict['type'] = self.type + if hasattr(self, 'code') and self.code is not None: + _dict['code'] = self.code + if hasattr(self, 'message') and self.message is not None: + _dict['message'] = self.message + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this ApplicationGetResponseStateDetailsItem object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'ApplicationGetResponseStateDetailsItem') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'ApplicationGetResponseStateDetailsItem') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + + class TypeEnum(str, Enum): + """ + Type of the message. + """ + USER_ERROR = 'user_error' + SERVER_ERROR = 'server_error' + INFO = 'info' + + class ApplicationGetStateResponse(): """ State of a given application. :attr str id: (optional) Identifier of the application. - :attr str state: (optional) Status of the application. + :attr str state: (optional) State of the Spark application. :attr str start_time: (optional) Time when the application was started. :attr str end_time: (optional) Time when the application run ended in success, failure or was stopped. @@ -1349,7 +1665,7 @@ def __init__(self, Initialize a ApplicationGetStateResponse object. :param str id: (optional) Identifier of the application. - :param str state: (optional) Status of the application. + :param str state: (optional) State of the Spark application. :param str start_time: (optional) Time when the application was started. :param str end_time: (optional) Time when the application run ended in success, failure or was stopped. @@ -1415,11 +1731,30 @@ def __ne__(self, other: 'ApplicationGetStateResponse') -> bool: """Return `true` when self and other are not equal, false otherwise.""" return not self == other + class StateEnum(str, Enum): + """ + State of the Spark application. + """ + FINISHED = 'finished' + RUNNING = 'running' + FAILED = 'failed' + ERROR = 'error' + ACCEPTED = 'accepted' + SUBMITTED = 'submitted' + WAITING = 'waiting' + UNKNOWN = 'unknown' + STOPPED = 'stopped' + AUTO_TERMINATED = 'auto_terminated' + OPS_TERMINATED = 'ops_terminated' + + class ApplicationRequestApplicationDetails(): """ Application details. :attr str application: (optional) Path of the application to run. + :attr Runtime runtime: (optional) Runtime enviroment for applications and other + workloads. :attr str jars: (optional) Path of the jar files containing the application. :attr str packages: (optional) Package names. :attr str repositories: (optional) Repositories names. @@ -1443,6 +1778,7 @@ class ApplicationRequestApplicationDetails(): def __init__(self, *, application: str = None, + runtime: 'Runtime' = None, jars: str = None, packages: str = None, repositories: str = None, @@ -1457,6 +1793,8 @@ def __init__(self, Initialize a ApplicationRequestApplicationDetails object. :param str application: (optional) Path of the application to run. + :param Runtime runtime: (optional) Runtime enviroment for applications and + other workloads. :param str jars: (optional) Path of the jar files containing the application. :param str packages: (optional) Package names. @@ -1478,6 +1816,7 @@ def __init__(self, for a list of the supported variables. """ self.application = application + self.runtime = runtime self.jars = jars self.packages = packages self.repositories = repositories @@ -1495,6 +1834,8 @@ def from_dict(cls, _dict: Dict) -> 'ApplicationRequestApplicationDetails': args = {} if 'application' in _dict: args['application'] = _dict.get('application') + if 'runtime' in _dict: + args['runtime'] = Runtime.from_dict(_dict.get('runtime')) if 'jars' in _dict: args['jars'] = _dict.get('jars') if 'packages' in _dict: @@ -1527,6 +1868,8 @@ def to_dict(self) -> Dict: _dict = {} if hasattr(self, 'application') and self.application is not None: _dict['application'] = self.application + if hasattr(self, 'runtime') and self.runtime is not None: + _dict['runtime'] = self.runtime.to_dict() if hasattr(self, 'jars') and self.jars is not None: _dict['jars'] = self.jars if hasattr(self, 'packages') and self.packages is not None: @@ -1572,7 +1915,7 @@ class ApplicationResponse(): Application response details. :attr str id: (optional) Identifier of the application that was submitted. - :attr str state: (optional) State of the submitted application. + :attr str state: (optional) State of the Spark application. """ def __init__(self, @@ -1583,7 +1926,7 @@ def __init__(self, Initialize a ApplicationResponse object. :param str id: (optional) Identifier of the application that was submitted. - :param str state: (optional) State of the submitted application. + :param str state: (optional) State of the Spark application. """ self.id = id self.state = state @@ -1632,11 +1975,19 @@ def __ne__(self, other: 'ApplicationResponse') -> bool: class StateEnum(str, Enum): """ - State of the submitted application. + State of the Spark application. """ - ACCEPTED = 'accepted' + FINISHED = 'finished' + RUNNING = 'running' FAILED = 'failed' ERROR = 'error' + ACCEPTED = 'accepted' + SUBMITTED = 'submitted' + WAITING = 'waiting' + UNKNOWN = 'unknown' + STOPPED = 'stopped' + AUTO_TERMINATED = 'auto_terminated' + OPS_TERMINATED = 'ops_terminated' class CurrentResourceConsumptionResponse(): @@ -1708,11 +2059,11 @@ class Instance(): :attr str id: (optional) GUID of the Analytics Engine instance. :attr str href: (optional) Full URL of the resource. - :attr str state: (optional) Instance state. + :attr str state: (optional) State of the Analytics Engine instance. :attr datetime state_change_time: (optional) Timestamp when the state of the instance was changed, in the format YYYY-MM-DDTHH:mm:ssZ. - :attr InstanceDefaultRuntime default_runtime: (optional) Specifies the default - runtime to use for all workloads that run in this instance. + :attr Runtime default_runtime: (optional) Runtime enviroment for applications + and other workloads. :attr InstanceHome instance_home: (optional) Object storage instance that acts as the home for custom libraries and Spark events. :attr InstanceDefaultConfig default_config: (optional) Instance level default @@ -1725,7 +2076,7 @@ def __init__(self, href: str = None, state: str = None, state_change_time: datetime = None, - default_runtime: 'InstanceDefaultRuntime' = None, + default_runtime: 'Runtime' = None, instance_home: 'InstanceHome' = None, default_config: 'InstanceDefaultConfig' = None) -> None: """ @@ -1733,11 +2084,11 @@ def __init__(self, :param str id: (optional) GUID of the Analytics Engine instance. :param str href: (optional) Full URL of the resource. - :param str state: (optional) Instance state. + :param str state: (optional) State of the Analytics Engine instance. :param datetime state_change_time: (optional) Timestamp when the state of the instance was changed, in the format YYYY-MM-DDTHH:mm:ssZ. - :param InstanceDefaultRuntime default_runtime: (optional) Specifies the - default runtime to use for all workloads that run in this instance. + :param Runtime default_runtime: (optional) Runtime enviroment for + applications and other workloads. :param InstanceHome instance_home: (optional) Object storage instance that acts as the home for custom libraries and Spark events. :param InstanceDefaultConfig default_config: (optional) Instance level @@ -1764,7 +2115,7 @@ def from_dict(cls, _dict: Dict) -> 'Instance': if 'state_change_time' in _dict: args['state_change_time'] = string_to_datetime(_dict.get('state_change_time')) if 'default_runtime' in _dict: - args['default_runtime'] = InstanceDefaultRuntime.from_dict(_dict.get('default_runtime')) + args['default_runtime'] = Runtime.from_dict(_dict.get('default_runtime')) if 'instance_home' in _dict: args['instance_home'] = InstanceHome.from_dict(_dict.get('instance_home')) if 'default_config' in _dict: @@ -1815,11 +2166,15 @@ def __ne__(self, other: 'Instance') -> bool: class StateEnum(str, Enum): """ - Instance state. + State of the Analytics Engine instance. """ - CREATED = 'created' + CREATION_ACCEPTED = 'creation_accepted' + INITIALIZED = 'initialized' + PREPARING = 'preparing' + ACTIVE = 'active' DELETED = 'deleted' - FAILED = 'failed' + DISABLED = 'disabled' + CREATION_FAILED = 'creation_failed' class InstanceDefaultConfig(): @@ -1877,69 +2232,12 @@ def __ne__(self, other: 'InstanceDefaultConfig') -> bool: """Return `true` when self and other are not equal, false otherwise.""" return not self == other -class InstanceDefaultRuntime(): - """ - Specifies the default runtime to use for all workloads that run in this instance. - - :attr str spark_version: (optional) Version of Spark runtime to use. Currently, - only 3.1 is supported. - """ - - def __init__(self, - *, - spark_version: str = None) -> None: - """ - Initialize a InstanceDefaultRuntime object. - - :param str spark_version: (optional) Version of Spark runtime to use. - Currently, only 3.1 is supported. - """ - self.spark_version = spark_version - - @classmethod - def from_dict(cls, _dict: Dict) -> 'InstanceDefaultRuntime': - """Initialize a InstanceDefaultRuntime object from a json dictionary.""" - args = {} - if 'spark_version' in _dict: - args['spark_version'] = _dict.get('spark_version') - return cls(**args) - - @classmethod - def _from_dict(cls, _dict): - """Initialize a InstanceDefaultRuntime object from a json dictionary.""" - return cls.from_dict(_dict) - - def to_dict(self) -> Dict: - """Return a json dictionary representing this model.""" - _dict = {} - if hasattr(self, 'spark_version') and self.spark_version is not None: - _dict['spark_version'] = self.spark_version - return _dict - - def _to_dict(self): - """Return a json dictionary representing this model.""" - return self.to_dict() - - def __str__(self) -> str: - """Return a `str` version of this InstanceDefaultRuntime object.""" - return json.dumps(self.to_dict(), indent=2) - - def __eq__(self, other: 'InstanceDefaultRuntime') -> bool: - """Return `true` when self and other are equal, false otherwise.""" - if not isinstance(other, self.__class__): - return False - return self.__dict__ == other.__dict__ - - def __ne__(self, other: 'InstanceDefaultRuntime') -> bool: - """Return `true` when self and other are not equal, false otherwise.""" - return not self == other - class InstanceGetStateResponse(): """ State details of Analytics Engine instance. :attr str id: (optional) GUID of the Analytics Engine instance. - :attr str state: (optional) Instance state. + :attr str state: (optional) State of the Analytics Engine instance. """ def __init__(self, @@ -1950,7 +2248,7 @@ def __init__(self, Initialize a InstanceGetStateResponse object. :param str id: (optional) GUID of the Analytics Engine instance. - :param str state: (optional) Instance state. + :param str state: (optional) State of the Analytics Engine instance. """ self.id = id self.state = state @@ -1999,11 +2297,15 @@ def __ne__(self, other: 'InstanceGetStateResponse') -> bool: class StateEnum(str, Enum): """ - Instance state. + State of the Analytics Engine instance. """ - CREATED = 'created' + CREATION_ACCEPTED = 'creation_accepted' + INITIALIZED = 'initialized' + PREPARING = 'preparing' + ACTIVE = 'active' DELETED = 'deleted' - FAILED = 'failed' + DISABLED = 'disabled' + CREATION_FAILED = 'creation_failed' class InstanceHome(): @@ -2507,3 +2809,126 @@ def __eq__(self, other: 'LoggingConfigurationResponseLogServer') -> bool: def __ne__(self, other: 'LoggingConfigurationResponseLogServer') -> bool: """Return `true` when self and other are not equal, false otherwise.""" return not self == other + +class ResourceConsumptionLimitsResponse(): + """ + Resource consumption limits for the instance. + + :attr str max_cores: (optional) Maximum number of virtual processor cores that + be used in the instance. + :attr str max_memory: (optional) Maximum memory that can be used in the + instance. + """ + + def __init__(self, + *, + max_cores: str = None, + max_memory: str = None) -> None: + """ + Initialize a ResourceConsumptionLimitsResponse object. + + :param str max_cores: (optional) Maximum number of virtual processor cores + that be used in the instance. + :param str max_memory: (optional) Maximum memory that can be used in the + instance. + """ + self.max_cores = max_cores + self.max_memory = max_memory + + @classmethod + def from_dict(cls, _dict: Dict) -> 'ResourceConsumptionLimitsResponse': + """Initialize a ResourceConsumptionLimitsResponse object from a json dictionary.""" + args = {} + if 'max_cores' in _dict: + args['max_cores'] = _dict.get('max_cores') + if 'max_memory' in _dict: + args['max_memory'] = _dict.get('max_memory') + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a ResourceConsumptionLimitsResponse object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'max_cores') and self.max_cores is not None: + _dict['max_cores'] = self.max_cores + if hasattr(self, 'max_memory') and self.max_memory is not None: + _dict['max_memory'] = self.max_memory + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this ResourceConsumptionLimitsResponse object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'ResourceConsumptionLimitsResponse') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'ResourceConsumptionLimitsResponse') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other + +class Runtime(): + """ + Runtime enviroment for applications and other workloads. + + :attr str spark_version: (optional) Spark version of the runtime environment. + """ + + def __init__(self, + *, + spark_version: str = None) -> None: + """ + Initialize a Runtime object. + + :param str spark_version: (optional) Spark version of the runtime + environment. + """ + self.spark_version = spark_version + + @classmethod + def from_dict(cls, _dict: Dict) -> 'Runtime': + """Initialize a Runtime object from a json dictionary.""" + args = {} + if 'spark_version' in _dict: + args['spark_version'] = _dict.get('spark_version') + return cls(**args) + + @classmethod + def _from_dict(cls, _dict): + """Initialize a Runtime object from a json dictionary.""" + return cls.from_dict(_dict) + + def to_dict(self) -> Dict: + """Return a json dictionary representing this model.""" + _dict = {} + if hasattr(self, 'spark_version') and self.spark_version is not None: + _dict['spark_version'] = self.spark_version + return _dict + + def _to_dict(self): + """Return a json dictionary representing this model.""" + return self.to_dict() + + def __str__(self) -> str: + """Return a `str` version of this Runtime object.""" + return json.dumps(self.to_dict(), indent=2) + + def __eq__(self, other: 'Runtime') -> bool: + """Return `true` when self and other are equal, false otherwise.""" + if not isinstance(other, self.__class__): + return False + return self.__dict__ == other.__dict__ + + def __ne__(self, other: 'Runtime') -> bool: + """Return `true` when self and other are not equal, false otherwise.""" + return not self == other diff --git a/test/integration/test_ibm_analytics_engine_api_v3.py b/test/integration/test_ibm_analytics_engine_api_v3.py index e091045..78f2a29 100644 --- a/test/integration/test_ibm_analytics_engine_api_v3.py +++ b/test/integration/test_ibm_analytics_engine_api_v3.py @@ -113,7 +113,10 @@ def test_replace_instance_default_configs(self): replace_instance_default_configs_response = self.ibm_analytics_engine_api_service.replace_instance_default_configs( instance_id=self.instance_id, - body={'key1': 'testString'} + body={ + "spark.driver.memory": "8G", + "spark.driver.cores": "2", + } ) assert replace_instance_default_configs_response.get_status_code() == 200 @@ -125,20 +128,52 @@ def test_update_instance_default_configs(self): update_instance_default_configs_response = self.ibm_analytics_engine_api_service.update_instance_default_configs( instance_id=self.instance_id, - body={'key1': 'testString'} + body={ + "ae.spark.history-server.cores": "1", + "ae.spark.history-server.memory": "4G", + } ) assert update_instance_default_configs_response.get_status_code() == 200 result = update_instance_default_configs_response.get_result() assert result is not None + @needscredentials + def test_get_instance_default_runtime(self): + + get_instance_default_runtime_response = self.ibm_analytics_engine_api_service.get_instance_default_runtime( + instance_id=self.instance_id + ) + + assert get_instance_default_runtime_response.get_status_code() == 200 + runtime = get_instance_default_runtime_response.get_result() + assert runtime is not None + + @needscredentials + def test_replace_instance_default_runtime(self): + + replace_instance_default_runtime_response = self.ibm_analytics_engine_api_service.replace_instance_default_runtime( + instance_id=self.instance_id, + spark_version='3.3' + ) + + assert replace_instance_default_runtime_response.get_status_code() == 200 + runtime = replace_instance_default_runtime_response.get_result() + assert runtime is not None + @needscredentials def test_create_application(self): + # Construct a dict representation of a Runtime model + runtime_model = { + 'spark_version': '3.1', + } + # Construct a dict representation of a ApplicationRequestApplicationDetails model application_request_application_details_model = { 'application': '/opt/ibm/spark/examples/src/main/python/wordcount.py', - 'arguments': ['/opt/ibm/spark/examples/src/main/resources/people.txt'] + 'arguments': ['/opt/ibm/spark/examples/src/main/resources/people.txt'], + 'runtime': runtime_model } create_application_response = self.ibm_analytics_engine_api_service.create_application( @@ -149,7 +184,7 @@ def test_create_application(self): assert create_application_response.get_status_code() == 202 application_response = create_application_response.get_result() assert application_response is not None - + global application_id application_id = application_response.get('id') @@ -157,7 +192,8 @@ def test_create_application(self): def test_list_applications(self): list_applications_response = self.ibm_analytics_engine_api_service.list_applications( - instance_id=self.instance_id + instance_id=self.instance_id, + state=['accepted', 'submitted', 'waiting', 'running', 'finished', 'failed'] ) assert list_applications_response.get_status_code() == 200 @@ -199,6 +235,17 @@ def test_get_current_resource_consumption(self): current_resource_consumption_response = get_current_resource_consumption_response.get_result() assert current_resource_consumption_response is not None + @needscredentials + def test_get_resource_consumption_limits(self): + + get_resource_consumption_limits_response = self.ibm_analytics_engine_api_service.get_resource_consumption_limits( + instance_id=self.instance_id + ) + + assert get_resource_consumption_limits_response.get_status_code() == 200 + resource_consumption_limits_response = get_resource_consumption_limits_response.get_result() + assert resource_consumption_limits_response is not None + @needscredentials def test_replace_log_forwarding_config(self): diff --git a/test/unit/test_ibm_analytics_engine_api_v3.py b/test/unit/test_ibm_analytics_engine_api_v3.py index b783d95..66e58ee 100644 --- a/test/unit/test_ibm_analytics_engine_api_v3.py +++ b/test/unit/test_ibm_analytics_engine_api_v3.py @@ -25,6 +25,7 @@ import os import pytest import re +import requests import responses import urllib from iaesdk.ibm_analytics_engine_api_v3 import * @@ -117,7 +118,7 @@ def test_get_instance_all_params(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09') - mock_response = '{"id": "id", "href": "href", "state": "created", "state_change_time": "2021-01-30T08:30:00.000Z", "default_runtime": {"spark_version": "spark_version"}, "instance_home": {"id": "id", "provider": "provider", "type": "type", "region": "region", "endpoint": "endpoint", "bucket": "bucket", "hmac_access_key": "hmac_access_key", "hmac_secret_key": "hmac_secret_key"}, "default_config": {"key": "key"}}' + mock_response = '{"id": "id", "href": "href", "state": "creation_accepted", "state_change_time": "2021-01-30T08:30:00.000Z", "default_runtime": {"spark_version": "3.1"}, "instance_home": {"id": "id", "provider": "provider", "type": "type", "region": "region", "endpoint": "endpoint", "bucket": "bucket", "hmac_access_key": "hmac_access_key", "hmac_secret_key": "hmac_secret_key"}, "default_config": {"key": "key"}}' responses.add(responses.GET, url, body=mock_response, @@ -153,7 +154,7 @@ def test_get_instance_value_error(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09') - mock_response = '{"id": "id", "href": "href", "state": "created", "state_change_time": "2021-01-30T08:30:00.000Z", "default_runtime": {"spark_version": "spark_version"}, "instance_home": {"id": "id", "provider": "provider", "type": "type", "region": "region", "endpoint": "endpoint", "bucket": "bucket", "hmac_access_key": "hmac_access_key", "hmac_secret_key": "hmac_secret_key"}, "default_config": {"key": "key"}}' + mock_response = '{"id": "id", "href": "href", "state": "creation_accepted", "state_change_time": "2021-01-30T08:30:00.000Z", "default_runtime": {"spark_version": "3.1"}, "instance_home": {"id": "id", "provider": "provider", "type": "type", "region": "region", "endpoint": "endpoint", "bucket": "bucket", "hmac_access_key": "hmac_access_key", "hmac_secret_key": "hmac_secret_key"}, "default_config": {"key": "key"}}' responses.add(responses.GET, url, body=mock_response, @@ -193,7 +194,7 @@ def test_get_instance_state_all_params(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/state') - mock_response = '{"id": "id", "state": "created"}' + mock_response = '{"id": "id", "state": "creation_accepted"}' responses.add(responses.GET, url, body=mock_response, @@ -229,7 +230,7 @@ def test_get_instance_state_value_error(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/state') - mock_response = '{"id": "id", "state": "created"}' + mock_response = '{"id": "id", "state": "creation_accepted"}' responses.add(responses.GET, url, body=mock_response, @@ -605,6 +606,164 @@ def test_update_instance_default_configs_value_error_with_retries(self): _service.disable_retries() self.test_update_instance_default_configs_value_error() +class TestGetInstanceDefaultRuntime(): + """ + Test Class for get_instance_default_runtime + """ + + @responses.activate + def test_get_instance_default_runtime_all_params(self): + """ + get_instance_default_runtime() + """ + # Set up mock + url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/default_runtime') + mock_response = '{"spark_version": "3.1"}' + responses.add(responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200) + + # Set up parameter values + instance_id = 'e64c907a-e82f-46fd-addc-ccfafbd28b09' + + # Invoke method + response = _service.get_instance_default_runtime( + instance_id, + headers={} + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_get_instance_default_runtime_all_params_with_retries(self): + # Enable retries and run test_get_instance_default_runtime_all_params. + _service.enable_retries() + self.test_get_instance_default_runtime_all_params() + + # Disable retries and run test_get_instance_default_runtime_all_params. + _service.disable_retries() + self.test_get_instance_default_runtime_all_params() + + @responses.activate + def test_get_instance_default_runtime_value_error(self): + """ + test_get_instance_default_runtime_value_error() + """ + # Set up mock + url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/default_runtime') + mock_response = '{"spark_version": "3.1"}' + responses.add(responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200) + + # Set up parameter values + instance_id = 'e64c907a-e82f-46fd-addc-ccfafbd28b09' + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + "instance_id": instance_id, + } + for param in req_param_dict.keys(): + req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.get_instance_default_runtime(**req_copy) + + def test_get_instance_default_runtime_value_error_with_retries(self): + # Enable retries and run test_get_instance_default_runtime_value_error. + _service.enable_retries() + self.test_get_instance_default_runtime_value_error() + + # Disable retries and run test_get_instance_default_runtime_value_error. + _service.disable_retries() + self.test_get_instance_default_runtime_value_error() + +class TestReplaceInstanceDefaultRuntime(): + """ + Test Class for replace_instance_default_runtime + """ + + @responses.activate + def test_replace_instance_default_runtime_all_params(self): + """ + replace_instance_default_runtime() + """ + # Set up mock + url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/default_runtime') + mock_response = '{"spark_version": "3.1"}' + responses.add(responses.PUT, + url, + body=mock_response, + content_type='application/json', + status=200) + + # Set up parameter values + instance_id = 'e64c907a-e82f-46fd-addc-ccfafbd28b09' + spark_version = '3.1' + + # Invoke method + response = _service.replace_instance_default_runtime( + instance_id, + spark_version=spark_version, + headers={} + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + # Validate body params + req_body = json.loads(str(responses.calls[0].request.body, 'utf-8')) + assert req_body['spark_version'] == '3.1' + + def test_replace_instance_default_runtime_all_params_with_retries(self): + # Enable retries and run test_replace_instance_default_runtime_all_params. + _service.enable_retries() + self.test_replace_instance_default_runtime_all_params() + + # Disable retries and run test_replace_instance_default_runtime_all_params. + _service.disable_retries() + self.test_replace_instance_default_runtime_all_params() + + @responses.activate + def test_replace_instance_default_runtime_value_error(self): + """ + test_replace_instance_default_runtime_value_error() + """ + # Set up mock + url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/default_runtime') + mock_response = '{"spark_version": "3.1"}' + responses.add(responses.PUT, + url, + body=mock_response, + content_type='application/json', + status=200) + + # Set up parameter values + instance_id = 'e64c907a-e82f-46fd-addc-ccfafbd28b09' + spark_version = '3.1' + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + "instance_id": instance_id, + } + for param in req_param_dict.keys(): + req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.replace_instance_default_runtime(**req_copy) + + def test_replace_instance_default_runtime_value_error_with_retries(self): + # Enable retries and run test_replace_instance_default_runtime_value_error. + _service.enable_retries() + self.test_replace_instance_default_runtime_value_error() + + # Disable retries and run test_replace_instance_default_runtime_value_error. + _service.disable_retries() + self.test_replace_instance_default_runtime_value_error() + class TestCreateApplication(): """ Test Class for create_application @@ -617,16 +776,21 @@ def test_create_application_all_params(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_applications') - mock_response = '{"id": "id", "state": "accepted"}' + mock_response = '{"id": "id", "state": "finished"}' responses.add(responses.POST, url, body=mock_response, content_type='application/json', status=202) + # Construct a dict representation of a Runtime model + runtime_model = {} + runtime_model['spark_version'] = '3.1' + # Construct a dict representation of a ApplicationRequestApplicationDetails model application_request_application_details_model = {} application_request_application_details_model['application'] = 'cos://bucket_name.my_cos/my_spark_app.py' + application_request_application_details_model['runtime'] = runtime_model application_request_application_details_model['jars'] = 'cos://cloud-object-storage/jars/tests.jar' application_request_application_details_model['packages'] = 'testString' application_request_application_details_model['repositories'] = 'testString' @@ -672,16 +836,21 @@ def test_create_application_value_error(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_applications') - mock_response = '{"id": "id", "state": "accepted"}' + mock_response = '{"id": "id", "state": "finished"}' responses.add(responses.POST, url, body=mock_response, content_type='application/json', status=202) + # Construct a dict representation of a Runtime model + runtime_model = {} + runtime_model['spark_version'] = '3.1' + # Construct a dict representation of a ApplicationRequestApplicationDetails model application_request_application_details_model = {} application_request_application_details_model['application'] = 'cos://bucket_name.my_cos/my_spark_app.py' + application_request_application_details_model['runtime'] = runtime_model application_request_application_details_model['jars'] = 'cos://cloud-object-storage/jars/tests.jar' application_request_application_details_model['packages'] = 'testString' application_request_application_details_model['repositories'] = 'testString' @@ -727,7 +896,7 @@ def test_list_applications_all_params(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_applications') - mock_response = '{"applications": [{"id": "id", "href": "href", "spark_application_id": "spark_application_id", "spark_application_name": "spark_application_name", "state": "state", "start_time": "start_time", "end_time": "end_time", "finish_time": "finish_time"}]}' + mock_response = '{"applications": [{"id": "id", "href": "href", "runtime": {"spark_version": "3.1"}, "spark_application_id": "spark_application_id", "spark_application_name": "spark_application_name", "state": "finished", "start_time": "start_time", "end_time": "end_time", "finish_time": "finish_time"}]}' responses.add(responses.GET, url, body=mock_response, @@ -736,16 +905,22 @@ def test_list_applications_all_params(self): # Set up parameter values instance_id = 'e64c907a-e82f-46fd-addc-ccfafbd28b09' + state = ['finished'] # Invoke method response = _service.list_applications( instance_id, + state=state, headers={} ) # Check for correct operation assert len(responses.calls) == 1 assert response.status_code == 200 + # Validate query params + query_string = responses.calls[0].request.url.split('?',1)[1] + query_string = urllib.parse.unquote_plus(query_string) + assert 'state={}'.format(','.join(state)) in query_string def test_list_applications_all_params_with_retries(self): # Enable retries and run test_list_applications_all_params. @@ -756,6 +931,42 @@ def test_list_applications_all_params_with_retries(self): _service.disable_retries() self.test_list_applications_all_params() + @responses.activate + def test_list_applications_required_params(self): + """ + test_list_applications_required_params() + """ + # Set up mock + url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_applications') + mock_response = '{"applications": [{"id": "id", "href": "href", "runtime": {"spark_version": "3.1"}, "spark_application_id": "spark_application_id", "spark_application_name": "spark_application_name", "state": "finished", "start_time": "start_time", "end_time": "end_time", "finish_time": "finish_time"}]}' + responses.add(responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200) + + # Set up parameter values + instance_id = 'e64c907a-e82f-46fd-addc-ccfafbd28b09' + + # Invoke method + response = _service.list_applications( + instance_id, + headers={} + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_list_applications_required_params_with_retries(self): + # Enable retries and run test_list_applications_required_params. + _service.enable_retries() + self.test_list_applications_required_params() + + # Disable retries and run test_list_applications_required_params. + _service.disable_retries() + self.test_list_applications_required_params() + @responses.activate def test_list_applications_value_error(self): """ @@ -763,7 +974,7 @@ def test_list_applications_value_error(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_applications') - mock_response = '{"applications": [{"id": "id", "href": "href", "spark_application_id": "spark_application_id", "spark_application_name": "spark_application_name", "state": "state", "start_time": "start_time", "end_time": "end_time", "finish_time": "finish_time"}]}' + mock_response = '{"applications": [{"id": "id", "href": "href", "runtime": {"spark_version": "3.1"}, "spark_application_id": "spark_application_id", "spark_application_name": "spark_application_name", "state": "finished", "start_time": "start_time", "end_time": "end_time", "finish_time": "finish_time"}]}' responses.add(responses.GET, url, body=mock_response, @@ -803,7 +1014,7 @@ def test_get_application_all_params(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_applications/ff48cc19-0e7e-4627-aac6-0b4ad080397b') - mock_response = '{"application_details": {"application": "cos://bucket_name.my_cos/my_spark_app.py", "jars": "cos://cloud-object-storage/jars/tests.jar", "packages": "packages", "repositories": "repositories", "files": "files", "archives": "archives", "name": "spark-app", "class": "com.company.path.ClassName", "arguments": ["[arg1, arg2, arg3]"], "conf": {"mapKey": "anyValue"}, "env": {"mapKey": "anyValue"}}, "id": "2b83d31c-397b-48ad-ad76-b83347c982db", "spark_application_id": "spark_application_id", "spark_application_name": "spark_application_name", "state": "accepted", "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z"}' + mock_response = '{"application_details": {"application": "cos://bucket_name.my_cos/my_spark_app.py", "runtime": {"spark_version": "3.1"}, "jars": "cos://cloud-object-storage/jars/tests.jar", "packages": "packages", "repositories": "repositories", "files": "files", "archives": "archives", "name": "spark-app", "class": "com.company.path.ClassName", "arguments": ["[arg1, arg2, arg3]"], "conf": {"mapKey": "anyValue"}, "env": {"mapKey": "anyValue"}}, "id": "2b83d31c-397b-48ad-ad76-b83347c982db", "spark_application_id": "spark_application_id", "spark_application_name": "spark_application_name", "state": "finished", "state_details": [{"type": "server_error", "code": "server_error", "message": "message"}], "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z"}' responses.add(responses.GET, url, body=mock_response, @@ -841,7 +1052,7 @@ def test_get_application_value_error(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_applications/ff48cc19-0e7e-4627-aac6-0b4ad080397b') - mock_response = '{"application_details": {"application": "cos://bucket_name.my_cos/my_spark_app.py", "jars": "cos://cloud-object-storage/jars/tests.jar", "packages": "packages", "repositories": "repositories", "files": "files", "archives": "archives", "name": "spark-app", "class": "com.company.path.ClassName", "arguments": ["[arg1, arg2, arg3]"], "conf": {"mapKey": "anyValue"}, "env": {"mapKey": "anyValue"}}, "id": "2b83d31c-397b-48ad-ad76-b83347c982db", "spark_application_id": "spark_application_id", "spark_application_name": "spark_application_name", "state": "accepted", "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z"}' + mock_response = '{"application_details": {"application": "cos://bucket_name.my_cos/my_spark_app.py", "runtime": {"spark_version": "3.1"}, "jars": "cos://cloud-object-storage/jars/tests.jar", "packages": "packages", "repositories": "repositories", "files": "files", "archives": "archives", "name": "spark-app", "class": "com.company.path.ClassName", "arguments": ["[arg1, arg2, arg3]"], "conf": {"mapKey": "anyValue"}, "env": {"mapKey": "anyValue"}}, "id": "2b83d31c-397b-48ad-ad76-b83347c982db", "spark_application_id": "spark_application_id", "spark_application_name": "spark_application_name", "state": "finished", "state_details": [{"type": "server_error", "code": "server_error", "message": "message"}], "start_time": "2021-01-30T08:30:00.000Z", "end_time": "2021-01-30T08:30:00.000Z", "finish_time": "2021-01-30T08:30:00.000Z"}' responses.add(responses.GET, url, body=mock_response, @@ -957,7 +1168,7 @@ def test_get_application_state_all_params(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_applications/ff48cc19-0e7e-4627-aac6-0b4ad080397b/state') - mock_response = '{"id": "id", "state": "state", "start_time": "start_time", "end_time": "end_time", "finish_time": "finish_time"}' + mock_response = '{"id": "id", "state": "finished", "start_time": "start_time", "end_time": "end_time", "finish_time": "finish_time"}' responses.add(responses.GET, url, body=mock_response, @@ -995,7 +1206,7 @@ def test_get_application_state_value_error(self): """ # Set up mock url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/spark_applications/ff48cc19-0e7e-4627-aac6-0b4ad080397b/state') - mock_response = '{"id": "id", "state": "state", "start_time": "start_time", "end_time": "end_time", "finish_time": "finish_time"}' + mock_response = '{"id": "id", "state": "finished", "start_time": "start_time", "end_time": "end_time", "finish_time": "finish_time"}' responses.add(responses.GET, url, body=mock_response, @@ -1101,6 +1312,82 @@ def test_get_current_resource_consumption_value_error_with_retries(self): _service.disable_retries() self.test_get_current_resource_consumption_value_error() +class TestGetResourceConsumptionLimits(): + """ + Test Class for get_resource_consumption_limits + """ + + @responses.activate + def test_get_resource_consumption_limits_all_params(self): + """ + get_resource_consumption_limits() + """ + # Set up mock + url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/resource_consumption_limits') + mock_response = '{"max_cores": "max_cores", "max_memory": "max_memory"}' + responses.add(responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200) + + # Set up parameter values + instance_id = 'e64c907a-e82f-46fd-addc-ccfafbd28b09' + + # Invoke method + response = _service.get_resource_consumption_limits( + instance_id, + headers={} + ) + + # Check for correct operation + assert len(responses.calls) == 1 + assert response.status_code == 200 + + def test_get_resource_consumption_limits_all_params_with_retries(self): + # Enable retries and run test_get_resource_consumption_limits_all_params. + _service.enable_retries() + self.test_get_resource_consumption_limits_all_params() + + # Disable retries and run test_get_resource_consumption_limits_all_params. + _service.disable_retries() + self.test_get_resource_consumption_limits_all_params() + + @responses.activate + def test_get_resource_consumption_limits_value_error(self): + """ + test_get_resource_consumption_limits_value_error() + """ + # Set up mock + url = preprocess_url('/v3/analytics_engines/e64c907a-e82f-46fd-addc-ccfafbd28b09/resource_consumption_limits') + mock_response = '{"max_cores": "max_cores", "max_memory": "max_memory"}' + responses.add(responses.GET, + url, + body=mock_response, + content_type='application/json', + status=200) + + # Set up parameter values + instance_id = 'e64c907a-e82f-46fd-addc-ccfafbd28b09' + + # Pass in all but one required param and check for a ValueError + req_param_dict = { + "instance_id": instance_id, + } + for param in req_param_dict.keys(): + req_copy = {key:val if key is not param else None for (key,val) in req_param_dict.items()} + with pytest.raises(ValueError): + _service.get_resource_consumption_limits(**req_copy) + + def test_get_resource_consumption_limits_value_error_with_retries(self): + # Enable retries and run test_get_resource_consumption_limits_value_error. + _service.enable_retries() + self.test_get_resource_consumption_limits_value_error() + + # Disable retries and run test_get_resource_consumption_limits_value_error. + _service.disable_retries() + self.test_get_resource_consumption_limits_value_error() + class TestReplaceLogForwardingConfig(): """ Test Class for replace_log_forwarding_config @@ -1445,13 +1732,19 @@ def test_application_serialization(self): Test serialization/deserialization for Application """ + # Construct dict forms of any model objects needed in order to build this model. + + runtime_model = {} # Runtime + runtime_model['spark_version'] = '3.1' + # Construct a json representation of a Application model application_model_json = {} application_model_json['id'] = 'testString' application_model_json['href'] = 'testString' + application_model_json['runtime'] = runtime_model application_model_json['spark_application_id'] = 'testString' application_model_json['spark_application_name'] = 'testString' - application_model_json['state'] = 'testString' + application_model_json['state'] = 'finished' application_model_json['start_time'] = 'testString' application_model_json['end_time'] = 'testString' application_model_json['finish_time'] = 'testString' @@ -1483,12 +1776,16 @@ def test_application_collection_serialization(self): # Construct dict forms of any model objects needed in order to build this model. + runtime_model = {} # Runtime + runtime_model['spark_version'] = '3.1' + application_model = {} # Application application_model['id'] = 'testString' application_model['href'] = 'testString' + application_model['runtime'] = runtime_model application_model['spark_application_id'] = 'testString' application_model['spark_application_name'] = 'testString' - application_model['state'] = 'testString' + application_model['state'] = 'finished' application_model['start_time'] = 'testString' application_model['end_time'] = 'testString' application_model['finish_time'] = 'testString' @@ -1522,9 +1819,15 @@ def test_application_details_serialization(self): Test serialization/deserialization for ApplicationDetails """ + # Construct dict forms of any model objects needed in order to build this model. + + runtime_model = {} # Runtime + runtime_model['spark_version'] = '3.1' + # Construct a json representation of a ApplicationDetails model application_details_model_json = {} application_details_model_json['application'] = 'cos://bucket_name.my_cos/my_spark_app.py' + application_details_model_json['runtime'] = runtime_model application_details_model_json['jars'] = 'cos://cloud-object-storage/jars/tests.jar' application_details_model_json['packages'] = 'testString' application_details_model_json['repositories'] = 'testString' @@ -1563,8 +1866,12 @@ def test_application_get_response_serialization(self): # Construct dict forms of any model objects needed in order to build this model. + runtime_model = {} # Runtime + runtime_model['spark_version'] = '3.1' + application_details_model = {} # ApplicationDetails application_details_model['application'] = 'cos://bucket_name.my_cos/my_spark_app.py' + application_details_model['runtime'] = runtime_model application_details_model['jars'] = 'cos://cloud-object-storage/jars/tests.jar' application_details_model['packages'] = 'testString' application_details_model['repositories'] = 'testString' @@ -1576,13 +1883,19 @@ def test_application_get_response_serialization(self): application_details_model['conf'] = {'key1': 'testString'} application_details_model['env'] = {'key1': 'testString'} + application_get_response_state_details_item_model = {} # ApplicationGetResponseStateDetailsItem + application_get_response_state_details_item_model['type'] = 'server_error' + application_get_response_state_details_item_model['code'] = 'server_error' + application_get_response_state_details_item_model['message'] = 'testString' + # Construct a json representation of a ApplicationGetResponse model application_get_response_model_json = {} application_get_response_model_json['application_details'] = application_details_model application_get_response_model_json['id'] = '2b83d31c-397b-48ad-ad76-b83347c982db' application_get_response_model_json['spark_application_id'] = 'testString' application_get_response_model_json['spark_application_name'] = 'testString' - application_get_response_model_json['state'] = 'accepted' + application_get_response_model_json['state'] = 'finished' + application_get_response_model_json['state_details'] = [application_get_response_state_details_item_model] application_get_response_model_json['start_time'] = '2021-01-30T08:30:00Z' application_get_response_model_json['end_time'] = '2021-01-30T08:30:00Z' application_get_response_model_json['finish_time'] = '2021-01-30T08:30:00Z' @@ -1602,6 +1915,37 @@ def test_application_get_response_serialization(self): application_get_response_model_json2 = application_get_response_model.to_dict() assert application_get_response_model_json2 == application_get_response_model_json +class TestModel_ApplicationGetResponseStateDetailsItem(): + """ + Test Class for ApplicationGetResponseStateDetailsItem + """ + + def test_application_get_response_state_details_item_serialization(self): + """ + Test serialization/deserialization for ApplicationGetResponseStateDetailsItem + """ + + # Construct a json representation of a ApplicationGetResponseStateDetailsItem model + application_get_response_state_details_item_model_json = {} + application_get_response_state_details_item_model_json['type'] = 'server_error' + application_get_response_state_details_item_model_json['code'] = 'server_error' + application_get_response_state_details_item_model_json['message'] = 'testString' + + # Construct a model instance of ApplicationGetResponseStateDetailsItem by calling from_dict on the json representation + application_get_response_state_details_item_model = ApplicationGetResponseStateDetailsItem.from_dict(application_get_response_state_details_item_model_json) + assert application_get_response_state_details_item_model != False + + # Construct a model instance of ApplicationGetResponseStateDetailsItem by calling from_dict on the json representation + application_get_response_state_details_item_model_dict = ApplicationGetResponseStateDetailsItem.from_dict(application_get_response_state_details_item_model_json).__dict__ + application_get_response_state_details_item_model2 = ApplicationGetResponseStateDetailsItem(**application_get_response_state_details_item_model_dict) + + # Verify the model instances are equivalent + assert application_get_response_state_details_item_model == application_get_response_state_details_item_model2 + + # Convert model instance back to dict and verify no loss of data + application_get_response_state_details_item_model_json2 = application_get_response_state_details_item_model.to_dict() + assert application_get_response_state_details_item_model_json2 == application_get_response_state_details_item_model_json + class TestModel_ApplicationGetStateResponse(): """ Test Class for ApplicationGetStateResponse @@ -1615,7 +1959,7 @@ def test_application_get_state_response_serialization(self): # Construct a json representation of a ApplicationGetStateResponse model application_get_state_response_model_json = {} application_get_state_response_model_json['id'] = 'testString' - application_get_state_response_model_json['state'] = 'testString' + application_get_state_response_model_json['state'] = 'finished' application_get_state_response_model_json['start_time'] = 'testString' application_get_state_response_model_json['end_time'] = 'testString' application_get_state_response_model_json['finish_time'] = 'testString' @@ -1645,9 +1989,15 @@ def test_application_request_application_details_serialization(self): Test serialization/deserialization for ApplicationRequestApplicationDetails """ + # Construct dict forms of any model objects needed in order to build this model. + + runtime_model = {} # Runtime + runtime_model['spark_version'] = '3.1' + # Construct a json representation of a ApplicationRequestApplicationDetails model application_request_application_details_model_json = {} application_request_application_details_model_json['application'] = 'cos://bucket_name.my_cos/my_spark_app.py' + application_request_application_details_model_json['runtime'] = runtime_model application_request_application_details_model_json['jars'] = 'cos://cloud-object-storage/jars/tests.jar' application_request_application_details_model_json['packages'] = 'testString' application_request_application_details_model_json['repositories'] = 'testString' @@ -1687,7 +2037,7 @@ def test_application_response_serialization(self): # Construct a json representation of a ApplicationResponse model application_response_model_json = {} application_response_model_json['id'] = 'testString' - application_response_model_json['state'] = 'accepted' + application_response_model_json['state'] = 'finished' # Construct a model instance of ApplicationResponse by calling from_dict on the json representation application_response_model = ApplicationResponse.from_dict(application_response_model_json) @@ -1746,8 +2096,8 @@ def test_instance_serialization(self): # Construct dict forms of any model objects needed in order to build this model. - instance_default_runtime_model = {} # InstanceDefaultRuntime - instance_default_runtime_model['spark_version'] = 'testString' + runtime_model = {} # Runtime + runtime_model['spark_version'] = '3.1' instance_home_model = {} # InstanceHome instance_home_model['id'] = 'testString' @@ -1766,9 +2116,9 @@ def test_instance_serialization(self): instance_model_json = {} instance_model_json['id'] = 'testString' instance_model_json['href'] = 'testString' - instance_model_json['state'] = 'created' + instance_model_json['state'] = 'creation_accepted' instance_model_json['state_change_time'] = '2021-01-30T08:30:00Z' - instance_model_json['default_runtime'] = instance_default_runtime_model + instance_model_json['default_runtime'] = runtime_model instance_model_json['instance_home'] = instance_home_model instance_model_json['default_config'] = instance_default_config_model @@ -1816,35 +2166,6 @@ def test_instance_default_config_serialization(self): instance_default_config_model_json2 = instance_default_config_model.to_dict() assert instance_default_config_model_json2 == instance_default_config_model_json -class TestModel_InstanceDefaultRuntime(): - """ - Test Class for InstanceDefaultRuntime - """ - - def test_instance_default_runtime_serialization(self): - """ - Test serialization/deserialization for InstanceDefaultRuntime - """ - - # Construct a json representation of a InstanceDefaultRuntime model - instance_default_runtime_model_json = {} - instance_default_runtime_model_json['spark_version'] = 'testString' - - # Construct a model instance of InstanceDefaultRuntime by calling from_dict on the json representation - instance_default_runtime_model = InstanceDefaultRuntime.from_dict(instance_default_runtime_model_json) - assert instance_default_runtime_model != False - - # Construct a model instance of InstanceDefaultRuntime by calling from_dict on the json representation - instance_default_runtime_model_dict = InstanceDefaultRuntime.from_dict(instance_default_runtime_model_json).__dict__ - instance_default_runtime_model2 = InstanceDefaultRuntime(**instance_default_runtime_model_dict) - - # Verify the model instances are equivalent - assert instance_default_runtime_model == instance_default_runtime_model2 - - # Convert model instance back to dict and verify no loss of data - instance_default_runtime_model_json2 = instance_default_runtime_model.to_dict() - assert instance_default_runtime_model_json2 == instance_default_runtime_model_json - class TestModel_InstanceGetStateResponse(): """ Test Class for InstanceGetStateResponse @@ -1858,7 +2179,7 @@ def test_instance_get_state_response_serialization(self): # Construct a json representation of a InstanceGetStateResponse model instance_get_state_response_model_json = {} instance_get_state_response_model_json['id'] = 'testString' - instance_get_state_response_model_json['state'] = 'created' + instance_get_state_response_model_json['state'] = 'creation_accepted' # Construct a model instance of InstanceGetStateResponse by calling from_dict on the json representation instance_get_state_response_model = InstanceGetStateResponse.from_dict(instance_get_state_response_model_json) @@ -2077,6 +2398,65 @@ def test_logging_configuration_response_log_server_serialization(self): logging_configuration_response_log_server_model_json2 = logging_configuration_response_log_server_model.to_dict() assert logging_configuration_response_log_server_model_json2 == logging_configuration_response_log_server_model_json +class TestModel_ResourceConsumptionLimitsResponse(): + """ + Test Class for ResourceConsumptionLimitsResponse + """ + + def test_resource_consumption_limits_response_serialization(self): + """ + Test serialization/deserialization for ResourceConsumptionLimitsResponse + """ + + # Construct a json representation of a ResourceConsumptionLimitsResponse model + resource_consumption_limits_response_model_json = {} + resource_consumption_limits_response_model_json['max_cores'] = 'testString' + resource_consumption_limits_response_model_json['max_memory'] = 'testString' + + # Construct a model instance of ResourceConsumptionLimitsResponse by calling from_dict on the json representation + resource_consumption_limits_response_model = ResourceConsumptionLimitsResponse.from_dict(resource_consumption_limits_response_model_json) + assert resource_consumption_limits_response_model != False + + # Construct a model instance of ResourceConsumptionLimitsResponse by calling from_dict on the json representation + resource_consumption_limits_response_model_dict = ResourceConsumptionLimitsResponse.from_dict(resource_consumption_limits_response_model_json).__dict__ + resource_consumption_limits_response_model2 = ResourceConsumptionLimitsResponse(**resource_consumption_limits_response_model_dict) + + # Verify the model instances are equivalent + assert resource_consumption_limits_response_model == resource_consumption_limits_response_model2 + + # Convert model instance back to dict and verify no loss of data + resource_consumption_limits_response_model_json2 = resource_consumption_limits_response_model.to_dict() + assert resource_consumption_limits_response_model_json2 == resource_consumption_limits_response_model_json + +class TestModel_Runtime(): + """ + Test Class for Runtime + """ + + def test_runtime_serialization(self): + """ + Test serialization/deserialization for Runtime + """ + + # Construct a json representation of a Runtime model + runtime_model_json = {} + runtime_model_json['spark_version'] = '3.1' + + # Construct a model instance of Runtime by calling from_dict on the json representation + runtime_model = Runtime.from_dict(runtime_model_json) + assert runtime_model != False + + # Construct a model instance of Runtime by calling from_dict on the json representation + runtime_model_dict = Runtime.from_dict(runtime_model_json).__dict__ + runtime_model2 = Runtime(**runtime_model_dict) + + # Verify the model instances are equivalent + assert runtime_model == runtime_model2 + + # Convert model instance back to dict and verify no loss of data + runtime_model_json2 = runtime_model.to_dict() + assert runtime_model_json2 == runtime_model_json + # endregion ##############################################################################