diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml
index 7551b15cf1..e8947045ca 100644
--- a/.pre-commit-config.yaml
+++ b/.pre-commit-config.yaml
@@ -19,21 +19,13 @@ repos:
- id: check-yaml
- id: end-of-file-fixer
- id: trailing-whitespace
- - repo: 'https://github.com/asottile/pyupgrade'
- rev: v3.15.0
- hooks:
- - id: pyupgrade
- args:
- - '--py38-plus'
- repo: 'https://github.com/PyCQA/isort'
rev: 5.12.0
hooks:
- id: isort
- - repo: 'https://github.com/psf/black'
- rev: 23.11.0
- hooks:
- - id: black
- - repo: 'https://github.com/pycqa/flake8'
- rev: 6.1.0
+ - repo: https://github.com/astral-sh/ruff-pre-commit
+ rev: v0.4.8
hooks:
- - id: flake8
+ - id: ruff
+ args: [ --fix ]
+ - id: ruff-format
diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst
index c4de13054a..718b29489a 100644
--- a/CONTRIBUTING.rst
+++ b/CONTRIBUTING.rst
@@ -72,10 +72,11 @@ Reporting An Issue/Feature
Codestyle
---------
-This project uses flake8 to enforce codstyle requirements. We've codified this
-process using a tool called `pre-commit `__. pre-commit
-allows us to specify a config file with all tools required for code linting,
-and surfaces either a git commit hook, or single command, for enforcing these.
+This project uses `ruff `__ to enforce
+codstyle requirements. We've codified this process using a tool called
+`pre-commit `__. pre-commit allows us to specify a
+config file with all tools required for code linting, and surfaces either a
+git commit hook, or single command, for enforcing these.
To validate your PR prior to publishing, you can use the following
`installation guide `__ to setup pre-commit.
@@ -88,11 +89,7 @@ to automatically perform the codestyle validation:
$ pre-commit run
This will automatically perform simple updates (such as white space clean up)
-and provide a list of any failing flake8 checks. After these are addressed,
+and provide a list of any failing checks. After these are addressed,
you can commit the changes prior to publishing the PR.
These checks are also included in our CI setup under the "Lint" workflow which
will provide output on Github for anything missed locally.
-
-See the `flake8` section of the
-`setup.cfg `__ for the
-currently enforced rules.
diff --git a/botocore/args.py b/botocore/args.py
index dbbcbe8a99..758a3c3c92 100644
--- a/botocore/args.py
+++ b/botocore/args.py
@@ -16,6 +16,7 @@
considered internal, and *not* a public API.
"""
+
import copy
import logging
import socket
@@ -460,7 +461,7 @@ def _get_sts_regional_endpoints_config(self):
def _set_global_sts_endpoint(self, endpoint_config, is_secure):
scheme = 'https' if is_secure else 'http'
- endpoint_config['endpoint_url'] = '%s://sts.amazonaws.com' % scheme
+ endpoint_config['endpoint_url'] = f'{scheme}://sts.amazonaws.com'
endpoint_config['signing_region'] = 'us-east-1'
def _resolve_endpoint(
diff --git a/botocore/auth.py b/botocore/auth.py
index 8389c1579c..6b296cfaaa 100644
--- a/botocore/auth.py
+++ b/botocore/auth.py
@@ -432,12 +432,12 @@ def add_auth(self, request):
self._inject_signature_to_request(request, signature)
def _inject_signature_to_request(self, request, signature):
- auth_str = ['AWS4-HMAC-SHA256 Credential=%s' % self.scope(request)]
+ auth_str = [f'AWS4-HMAC-SHA256 Credential={self.scope(request)}']
headers_to_sign = self.headers_to_sign(request)
auth_str.append(
f"SignedHeaders={self.signed_headers(headers_to_sign)}"
)
- auth_str.append('Signature=%s' % signature)
+ auth_str.append(f'Signature={signature}')
request.headers['Authorization'] = ', '.join(auth_str)
return request
@@ -685,7 +685,7 @@ def _inject_signature_to_request(self, request, signature):
# Rather than calculating an "Authorization" header, for the query
# param quth, we just append an 'X-Amz-Signature' param to the end
# of the query string.
- request.url += '&X-Amz-Signature=%s' % signature
+ request.url += f'&X-Amz-Signature={signature}'
def _normalize_url_path(self, path):
# For S3, we do not normalize the path.
@@ -777,7 +777,7 @@ def _inject_signature_to_request(self, request, signature):
# Rather than calculating an "Authorization" header, for the query
# param quth, we just append an 'X-Amz-Signature' param to the end
# of the query string.
- request.url += '&X-Amz-Signature=%s' % signature
+ request.url += f'&X-Amz-Signature={signature}'
class S3SigV4QueryAuth(SigV4QueryAuth):
@@ -990,7 +990,7 @@ def get_signature(
string_to_sign = self.canonical_string(
method, split, headers, auth_path=auth_path
)
- logger.debug('StringToSign:\n%s', string_to_sign)
+ logger.debug(f'StringToSign:\n{string_to_sign}')
return self.sign_string(string_to_sign)
def add_auth(self, request):
@@ -998,7 +998,7 @@ def add_auth(self, request):
raise NoCredentialsError
logger.debug("Calculating signature using hmacv1 auth.")
split = urlsplit(request.url)
- logger.debug('HTTP request method: %s', request.method)
+ logger.debug(f'HTTP request method: {request.method}')
signature = self.get_signature(
request.method, split, request.headers, auth_path=request.auth_path
)
diff --git a/botocore/awsrequest.py b/botocore/awsrequest.py
index 9123e65c9d..49b4eee0d9 100644
--- a/botocore/awsrequest.py
+++ b/botocore/awsrequest.py
@@ -280,9 +280,9 @@ def prepare_request_dict(
percent_encode_sequence = botocore.utils.percent_encode_sequence
encoded_query_string = percent_encode_sequence(r['query_string'])
if '?' not in url:
- url += '?%s' % encoded_query_string
+ url += f'?{encoded_query_string}'
else:
- url += '&%s' % encoded_query_string
+ url += f'&{encoded_query_string}'
r['url'] = url
r['context'] = context
if context is None:
diff --git a/botocore/client.py b/botocore/client.py
index 1e36232834..e57d1ded31 100644
--- a/botocore/client.py
+++ b/botocore/client.py
@@ -199,7 +199,7 @@ def _create_client_class(self, service_name, service_model):
bases = [BaseClient]
service_id = service_model.service_id.hyphenize()
self._event_emitter.emit(
- 'creating-client-class.%s' % service_id,
+ f'creating-client-class.{service_id}',
class_attributes=class_attributes,
base_classes=bases,
)
@@ -223,10 +223,10 @@ def _normalize_fips_region(self, region_name, client_config):
else:
client_config = config_use_fips_endpoint
logger.warning(
- 'transforming region from %s to %s and setting '
+ f'transforming region from {region_name} to '
+ f'{normalized_region_name} and setting '
'use_fips_endpoint to true. client should not '
'be configured with a fips psuedo region.'
- % (region_name, normalized_region_name)
)
region_name = normalized_region_name
return region_name, client_config
@@ -289,7 +289,7 @@ def _register_legacy_retries(self, client):
handler = self._retry_handler_factory.create_retry_handler(
retry_config, endpoint_prefix
)
- unique_id = 'retry-config-%s' % service_event_name
+ unique_id = f'retry-config-{service_event_name}'
client.meta.events.register(
f"needs-retry.{service_event_name}", handler, unique_id=unique_id
)
@@ -573,7 +573,7 @@ def _api_call(self, *args, **kwargs):
method_name=operation_name,
event_emitter=self._event_emitter,
method_description=operation_model.documentation,
- example_prefix='response = client.%s' % py_operation_name,
+ example_prefix=f'response = client.{py_operation_name}',
include_signature=False,
)
_api_call.__doc__ = docstring
@@ -982,9 +982,7 @@ def _make_api_call(self, operation_name, api_params):
service_id = self._service_model.service_id.hyphenize()
handler, event_response = self.meta.events.emit_until_response(
- 'before-call.{service_id}.{operation_name}'.format(
- service_id=service_id, operation_name=operation_name
- ),
+ f'before-call.{service_id}.{operation_name}',
model=operation_model,
params=request_dict,
request_signer=self._request_signer,
@@ -1003,9 +1001,7 @@ def _make_api_call(self, operation_name, api_params):
)
self.meta.events.emit(
- 'after-call.{service_id}.{operation_name}'.format(
- service_id=service_id, operation_name=operation_name
- ),
+ f'after-call.{service_id}.{operation_name}',
http_response=http,
parsed=parsed_response,
model=operation_model,
@@ -1027,10 +1023,7 @@ def _make_request(self, operation_model, request_dict, request_context):
return self._endpoint.make_request(operation_model, request_dict)
except Exception as e:
self.meta.events.emit(
- 'after-call-error.{service_id}.{operation_name}'.format(
- service_id=self._service_model.service_id.hyphenize(),
- operation_name=operation_model.name,
- ),
+ f'after-call-error.{self._service_model.service_id.hyphenize()}.{operation_model.name}',
exception=e,
context=request_context,
)
@@ -1259,13 +1252,13 @@ def get_waiter(self, waiter_name):
"""
config = self._get_waiter_config()
if not config:
- raise ValueError("Waiter does not exist: %s" % waiter_name)
+ raise ValueError(f"Waiter does not exist: {waiter_name}")
model = waiter.WaiterModel(config)
mapping = {}
for name in model.waiter_names:
mapping[xform_name(name)] = name
if waiter_name not in mapping:
- raise ValueError("Waiter does not exist: %s" % waiter_name)
+ raise ValueError(f"Waiter does not exist: {waiter_name}")
return waiter.create_waiter_with_client(
mapping[waiter_name], model, self
diff --git a/botocore/configprovider.py b/botocore/configprovider.py
index 3b68fca57f..5ed2dc63ce 100644
--- a/botocore/configprovider.py
+++ b/botocore/configprovider.py
@@ -13,6 +13,7 @@
"""This module contains the interface for controlling how configuration
is loaded.
"""
+
import copy
import logging
import os
@@ -697,7 +698,7 @@ def _convert_type(self, value):
return value
def __repr__(self):
- return '[%s]' % ', '.join([str(p) for p in self._providers])
+ return '[{}]'.format(', '.join([str(p) for p in self._providers]))
class InstanceVarProvider(BaseProvider):
@@ -728,10 +729,7 @@ def provide(self):
return value
def __repr__(self):
- return 'InstanceVarProvider(instance_var={}, session={})'.format(
- self._instance_var,
- self._session,
- )
+ return f'InstanceVarProvider(instance_var={self._instance_var}, session={self._session})'
class ScopedConfigProvider(BaseProvider):
@@ -767,10 +765,7 @@ def provide(self):
return scoped_config.get(self._config_var_name)
def __repr__(self):
- return 'ScopedConfigProvider(config_var_name={}, session={})'.format(
- self._config_var_name,
- self._session,
- )
+ return f'ScopedConfigProvider(config_var_name={self._config_var_name}, session={self._session})'
class EnvironmentProvider(BaseProvider):
@@ -878,7 +873,7 @@ def provide(self):
return self._value
def __repr__(self):
- return 'ConstantProvider(value=%s)' % self._value
+ return f'ConstantProvider(value={self._value})'
class ConfiguredEndpointProvider(BaseProvider):
diff --git a/botocore/credentials.py b/botocore/credentials.py
index e48f0dec7c..dd7e718255 100644
--- a/botocore/credentials.py
+++ b/botocore/credentials.py
@@ -753,7 +753,7 @@ def __init__(
super().__init__(cache, expiry_window_seconds)
def _generate_assume_role_name(self):
- self._role_session_name = 'botocore-session-%s' % (int(time.time()))
+ self._role_session_name = f'botocore-session-{int(time.time())}'
self._assume_kwargs['RoleSessionName'] = self._role_session_name
self._using_default_session_name = True
@@ -848,7 +848,7 @@ def _assume_role_kwargs(self):
mfa_serial = assume_role_kwargs.get('SerialNumber')
if mfa_serial is not None:
- prompt = 'Enter MFA code for %s: ' % mfa_serial
+ prompt = f'Enter MFA code for {mfa_serial}: '
token_code = self._mfa_prompter(prompt)
assume_role_kwargs['TokenCode'] = token_code
@@ -1570,8 +1570,8 @@ def _get_role_config(self, profile_name):
if credential_source is not None and source_profile is not None:
raise InvalidConfigError(
error_msg=(
- 'The profile "%s" contains both source_profile and '
- 'credential_source.' % profile_name
+ f'The profile "{profile_name}" contains both '
+ 'source_profile and credential_source.'
)
)
elif credential_source is None and source_profile is None:
@@ -1720,7 +1720,7 @@ def _resolve_credentials_from_source(
provider=credential_source,
error_msg=(
'No credentials found in credential_source referenced '
- 'in profile %s' % profile_name
+ f'in profile {profile_name}'
),
)
return credentials
@@ -2242,8 +2242,8 @@ def _load_sso_config(self):
missing = ', '.join(missing_config_vars)
raise InvalidConfigError(
error_msg=(
- 'The profile "%s" is configured to use SSO but is missing '
- 'required configuration: %s' % (profile_name, missing)
+ f'The profile "{profile_name}" is configured to use SSO '
+ f'but is missing required configuration: {missing}'
)
)
return config
diff --git a/botocore/discovery.py b/botocore/discovery.py
index 9c68001dea..95b51b81ba 100644
--- a/botocore/discovery.py
+++ b/botocore/discovery.py
@@ -185,8 +185,7 @@ def describe_endpoint(self, **kwargs):
if not self._always_discover and not discovery_required:
# Discovery set to only run on required operations
logger.debug(
- 'Optional discovery disabled. Skipping discovery for Operation: %s'
- % operation
+ f'Optional discovery disabled. Skipping discovery for Operation: {operation}'
)
return None
@@ -228,12 +227,12 @@ def __init__(self, manager):
def register(self, events, service_id):
events.register(
- 'before-parameter-build.%s' % service_id, self.gather_identifiers
+ f'before-parameter-build.{service_id}', self.gather_identifiers
)
events.register_first(
- 'request-created.%s' % service_id, self.discover_endpoint
+ f'request-created.{service_id}', self.discover_endpoint
)
- events.register('needs-retry.%s' % service_id, self.handle_retries)
+ events.register(f'needs-retry.{service_id}', self.handle_retries)
def gather_identifiers(self, params, model, context, **kwargs):
endpoint_discovery = model.endpoint_discovery
diff --git a/botocore/docs/bcdoc/docstringparser.py b/botocore/docs/bcdoc/docstringparser.py
index 16e74e7d20..ebe16ba5e5 100644
--- a/botocore/docs/bcdoc/docstringparser.py
+++ b/botocore/docs/bcdoc/docstringparser.py
@@ -83,9 +83,9 @@ def add_tag(self, tag, attrs=None, is_start=True):
def _doc_has_handler(self, tag, is_start):
if is_start:
- handler_name = 'start_%s' % tag
+ handler_name = f'start_{tag}'
else:
- handler_name = 'end_%s' % tag
+ handler_name = f'end_{tag}'
return hasattr(self.doc.style, handler_name)
@@ -226,12 +226,12 @@ def collapse_whitespace(self):
child.collapse_whitespace()
def _write_start(self, doc):
- handler_name = 'start_%s' % self.tag
+ handler_name = f'start_{self.tag}'
if hasattr(doc.style, handler_name):
getattr(doc.style, handler_name)(self.attrs)
def _write_end(self, doc, next_child):
- handler_name = 'end_%s' % self.tag
+ handler_name = f'end_{self.tag}'
if hasattr(doc.style, handler_name):
if handler_name == 'end_a':
# We use lookahead to determine if a space is needed after a link node
@@ -248,7 +248,7 @@ class DataNode(Node):
def __init__(self, data, parent=None):
super().__init__(parent)
if not isinstance(data, str):
- raise ValueError("Expecting string type, %s given." % type(data))
+ raise ValueError(f"Expecting string type, {type(data)} given.")
self._leading_whitespace = ''
self._trailing_whitespace = ''
self._stripped_data = ''
diff --git a/botocore/docs/bcdoc/style.py b/botocore/docs/bcdoc/style.py
index f2a165a932..205d238d7a 100644
--- a/botocore/docs/bcdoc/style.py
+++ b/botocore/docs/bcdoc/style.py
@@ -34,7 +34,7 @@ def indentation(self, value):
self._indent = value
def new_paragraph(self):
- return '\n%s' % self.spaces()
+ return f'\n{self.spaces()}'
def indent(self):
self._indent += 1
@@ -83,10 +83,10 @@ def __init__(self, doc, indent_width=2):
self.list_depth = 0
def new_paragraph(self):
- self.doc.write('\n\n%s' % self.spaces())
+ self.doc.write(f'\n\n{self.spaces()}')
def new_line(self):
- self.doc.write('\n%s' % self.spaces())
+ self.doc.write(f'\n{self.spaces()}')
def _start_inline(self, markup):
# Insert space between any directly adjacent bold and italic inlines to
@@ -165,11 +165,11 @@ def italics(self, s):
def start_p(self, attrs=None):
if self.do_p:
- self.doc.write('\n\n%s' % self.spaces())
+ self.doc.write(f'\n\n{self.spaces()}')
def end_p(self):
if self.do_p:
- self.doc.write('\n\n%s' % self.spaces())
+ self.doc.write(f'\n\n{self.spaces()}')
def start_code(self, attrs=None):
self.doc.do_translation = True
@@ -268,14 +268,14 @@ def end_a(self, next_child=None):
if ':' in last_write:
last_write = last_write.replace(':', r'\:')
self.doc.push_write(last_write)
- self.doc.push_write(' <%s>`__' % self.a_href)
+ self.doc.push_write(f' <{self.a_href}>`__')
elif last_write == '`':
# Look at start_a(). It will do a self.doc.write('`')
# which is the start of the link title. If that is the
# case then there was no link text. We should just
# use an inline link. The syntax of this is
# ``_
- self.doc.push_write('`<%s>`__' % self.a_href)
+ self.doc.push_write(f'`<{self.a_href}>`__')
else:
self.doc.push_write(self.a_href)
self.doc.hrefs[self.a_href] = self.a_href
@@ -375,9 +375,9 @@ def tocitem(self, item, file_name=None):
self.li(item)
else:
if file_name:
- self.doc.writeln(' %s' % file_name)
+ self.doc.writeln(f' {file_name}')
else:
- self.doc.writeln(' %s' % item)
+ self.doc.writeln(f' {item}')
def hidden_toctree(self):
if self.doc.target == 'html':
@@ -394,11 +394,11 @@ def table_of_contents(self, title=None, depth=None):
if title is not None:
self.doc.writeln(title)
if depth is not None:
- self.doc.writeln(' :depth: %s' % depth)
+ self.doc.writeln(f' :depth: {depth}')
def start_sphinx_py_class(self, class_name):
self.new_paragraph()
- self.doc.write('.. py:class:: %s' % class_name)
+ self.doc.write(f'.. py:class:: {class_name}')
self.indent()
self.new_paragraph()
@@ -408,9 +408,9 @@ def end_sphinx_py_class(self):
def start_sphinx_py_method(self, method_name, parameters=None):
self.new_paragraph()
- content = '.. py:method:: %s' % method_name
+ content = f'.. py:method:: {method_name}'
if parameters is not None:
- content += '(%s)' % parameters
+ content += f'({parameters})'
self.doc.write(content)
self.indent()
self.new_paragraph()
@@ -421,7 +421,7 @@ def end_sphinx_py_method(self):
def start_sphinx_py_attr(self, attr_name):
self.new_paragraph()
- self.doc.write('.. py:attribute:: %s' % attr_name)
+ self.doc.write(f'.. py:attribute:: {attr_name}')
self.indent()
self.new_paragraph()
diff --git a/botocore/docs/client.py b/botocore/docs/client.py
index bc9b2658c9..41e37426ec 100644
--- a/botocore/docs/client.py
+++ b/botocore/docs/client.py
@@ -120,9 +120,7 @@ def _add_client_creation_example(self, section):
section.style.start_codeblock()
section.style.new_line()
section.write(
- 'client = session.create_client(\'{service}\')'.format(
- service=self._service_name
- )
+ f'client = session.create_client(\'{self._service_name}\')'
)
section.style.end_codeblock()
@@ -177,14 +175,14 @@ def _add_method_exceptions_list(self, section, operation_model):
class_name = (
f'{self._client_class_name}.Client.exceptions.{error.name}'
)
- error_section.style.li(':py:class:`%s`' % class_name)
+ error_section.style.li(f':py:class:`{class_name}`')
def _add_model_driven_method(self, section, method_name):
service_model = self._client.meta.service_model
operation_name = self._client.meta.method_to_api_mapping[method_name]
operation_model = service_model.operation_model(operation_name)
- example_prefix = 'response = client.%s' % method_name
+ example_prefix = f'response = client.{method_name}'
full_method_name = (
f"{section.context.get('qualifier', '')}{method_name}"
)
@@ -344,7 +342,7 @@ def _add_exception_catch_example(self, section, shape):
section.write('...')
section.style.dedent()
section.style.new_line()
- section.write('except client.exceptions.%s as e:' % shape.name)
+ section.write(f'except client.exceptions.{shape.name} as e:')
section.style.indent()
section.style.new_line()
section.write('print(e.response)')
diff --git a/botocore/docs/example.py b/botocore/docs/example.py
index 9f831bcde1..cb43db5509 100644
--- a/botocore/docs/example.py
+++ b/botocore/docs/example.py
@@ -68,7 +68,7 @@ def document_shape_type_string(
):
if 'enum' in shape.metadata:
for i, enum in enumerate(shape.metadata['enum']):
- section.write('\'%s\'' % enum)
+ section.write(f'\'{enum}\'')
if i < len(shape.metadata['enum']) - 1:
section.write('|')
else:
@@ -107,7 +107,7 @@ def document_shape_type_structure(
if exclude and param in exclude:
continue
param_section = section.add_new_section(param)
- param_section.write('\'%s\': ' % param)
+ param_section.write(f'\'{param}\': ')
param_shape = input_members[param]
param_value_section = param_section.add_new_section(
'member-value', context={'shape': param_shape.name}
diff --git a/botocore/docs/paginator.py b/botocore/docs/paginator.py
index 1ac4dd4848..2c9b30034f 100644
--- a/botocore/docs/paginator.py
+++ b/botocore/docs/paginator.py
@@ -223,9 +223,7 @@ def document_paginate_method(
paginate_description = (
'Creates an iterator that will paginate through responses '
- 'from :py:meth:`{}.Client.{}`.'.format(
- get_service_module_name(service_model), xform_name(paginator_name)
- )
+ f'from :py:meth:`{get_service_module_name(service_model)}.Client.{xform_name(paginator_name)}`.'
)
document_model_driven_method(
diff --git a/botocore/docs/params.py b/botocore/docs/params.py
index cddaf12fc3..74747ec27e 100644
--- a/botocore/docs/params.py
+++ b/botocore/docs/params.py
@@ -163,7 +163,7 @@ def _add_member_documentation(self, section, shape, name=None, **kwargs):
name_section = section.add_new_section('param-name')
name_section.write('- ')
if name is not None:
- name_section.style.bold('%s' % name)
+ name_section.style.bold(f'{name}')
name_section.write(' ')
type_section = section.add_new_section('param-type')
self._document_non_top_level_param_type(type_section, shape)
@@ -186,7 +186,7 @@ def _add_member_documentation(self, section, shape, name=None, **kwargs):
' as follows'
)
tagged_union_members_str = ', '.join(
- ['``%s``' % key for key in shape.members.keys()]
+ [f'``{key}``' for key in shape.members.keys()]
)
unknown_code_example = (
'\'SDK_UNKNOWN_MEMBER\': '
@@ -255,13 +255,13 @@ def _add_member_documentation(
end_type_section = type_section.add_new_section('end-param-type')
end_type_section.style.new_line()
name_section = section.add_new_section('param-name')
- name_section.write(':param %s: ' % name)
+ name_section.write(f':param {name}: ')
else:
name_section = section.add_new_section('param-name')
name_section.write('- ')
if name is not None:
- name_section.style.bold('%s' % name)
+ name_section.style.bold(f'{name}')
name_section.write(' ')
type_section = section.add_new_section('param-type')
self._document_non_top_level_param_type(type_section, shape)
@@ -286,7 +286,7 @@ def _add_member_documentation(
' following top level keys can be set: %s. '
)
tagged_union_members_str = ', '.join(
- ['``%s``' % key for key in shape.members.keys()]
+ [f'``{key}``' for key in shape.members.keys()]
)
tagged_union_docs.write(note % (tagged_union_members_str))
documentation_section.include_doc_string(shape.documentation)
diff --git a/botocore/docs/sharedexample.py b/botocore/docs/sharedexample.py
index 58cdfa594c..29d3df5fc9 100644
--- a/botocore/docs/sharedexample.py
+++ b/botocore/docs/sharedexample.py
@@ -104,14 +104,14 @@ def _document_dict(
dict_section = section.add_new_section('dict-value')
self._start_nested_value(dict_section, '{')
for key, val in value.items():
- path.append('.%s' % key)
+ path.append(f'.{key}')
item_section = dict_section.add_new_section(key)
item_section.style.new_line()
item_comment = self._get_comment(path, comments)
if item_comment:
item_section.write(item_comment)
item_section.style.new_line()
- item_section.write("'%s': " % key)
+ item_section.write(f"'{key}': ")
# Shape could be none if there is no output besides ResponseMetadata
item_shape = None
@@ -131,7 +131,7 @@ def _document_params(self, section, value, comments, path, shape):
param_section = section.add_new_section('param-values')
self._start_nested_value(param_section, '(')
for key, val in value.items():
- path.append('.%s' % key)
+ path.append(f'.{key}')
item_section = param_section.add_new_section(key)
item_section.style.new_line()
item_comment = self._get_comment(path, comments)
@@ -156,7 +156,7 @@ def _document_list(self, section, value, comments, path, shape):
for index, val in enumerate(value):
item_section = list_section.add_new_section(index)
item_section.style.new_line()
- path.append('[%s]' % index)
+ path.append(f'[{index}]')
item_comment = self._get_comment(path, comments)
if item_comment:
item_section.write(item_comment)
@@ -173,14 +173,14 @@ def _document_str(self, section, value, path):
section.write(f"'{safe_value}',")
def _document_number(self, section, value, path):
- section.write("%s," % str(value))
+ section.write(f"{str(value)},")
def _document_datetime(self, section, value, path):
datetime_tuple = parse_timestamp(value).timetuple()
datetime_str = str(datetime_tuple[0])
for i in range(1, len(datetime_tuple)):
datetime_str += ", " + str(datetime_tuple[i])
- section.write("datetime(%s)," % datetime_str)
+ section.write(f"datetime({datetime_str}),")
def _get_comment(self, path, comments):
key = re.sub(r'^\.', '', ''.join(path))
diff --git a/botocore/docs/utils.py b/botocore/docs/utils.py
index eb6cae145c..161e260229 100644
--- a/botocore/docs/utils.py
+++ b/botocore/docs/utils.py
@@ -214,8 +214,11 @@ def append_documentation(self, event_name, section, **kwargs):
}
# Combines all CONTROLS keys into a big or regular expression
_ESCAPE_CONTROLS_RE = re.compile('|'.join(map(re.escape, _CONTROLS)))
+
+
# Based on the match get the appropriate replacement from CONTROLS
-_CONTROLS_MATCH_HANDLER = lambda match: _CONTROLS[match.group(0)]
+def _CONTROLS_MATCH_HANDLER(match):
+ return _CONTROLS[match.group(0)]
def escape_controls(value):
diff --git a/botocore/docs/waiter.py b/botocore/docs/waiter.py
index c5226d460e..0108099565 100644
--- a/botocore/docs/waiter.py
+++ b/botocore/docs/waiter.py
@@ -72,7 +72,7 @@ def _add_single_waiter(self, section, waiter_name):
waiter_section.style.start_codeblock()
waiter_section.style.new_line()
waiter_section.write(
- 'waiter = client.get_waiter(\'%s\')' % xform_name(waiter_name)
+ f'waiter = client.get_waiter(\'{xform_name(waiter_name)}\')'
)
waiter_section.style.end_codeblock()
@@ -135,7 +135,7 @@ def document_wait_method(
type_name='integer',
documentation=(
'The amount of time in seconds to wait between '
- 'attempts. Default: {}
'.format(waiter_model.delay)
+ f'attempts. Default: {waiter_model.delay}
'
),
)
@@ -144,7 +144,7 @@ def document_wait_method(
type_name='integer',
documentation=(
'The maximum number of attempts to be made. '
- 'Default: {}
'.format(waiter_model.max_attempts)
+ f'Default: {waiter_model.max_attempts}'
),
)
@@ -161,14 +161,10 @@ def document_wait_method(
]
wait_description = (
- 'Polls :py:meth:`{}.Client.{}` every {} '
+ f'Polls :py:meth:`{get_service_module_name(service_model)}.Client.'
+ f'{xform_name(waiter_model.operation)}` every {waiter_model.delay} '
'seconds until a successful state is reached. An error is '
- 'returned after {} failed checks.'.format(
- get_service_module_name(service_model),
- xform_name(waiter_model.operation),
- waiter_model.delay,
- waiter_model.max_attempts,
- )
+ f'returned after {waiter_model.max_attempts} failed checks.'
)
document_model_driven_method(
diff --git a/botocore/endpoint.py b/botocore/endpoint.py
index adc622c25a..59f3d86c8e 100644
--- a/botocore/endpoint.py
+++ b/botocore/endpoint.py
@@ -128,9 +128,7 @@ def create_request(self, params, operation_model=None):
]
)
service_id = operation_model.service_model.service_id.hyphenize()
- event_name = 'request-created.{service_id}.{op_name}'.format(
- service_id=service_id, op_name=operation_model.name
- )
+ event_name = f'request-created.{service_id}.{operation_model.name}'
self._event_emitter.emit(
event_name,
request=request,
@@ -224,9 +222,9 @@ def _send_request(self, request_dict, operation_model):
):
# We want to share num retries, not num attempts.
total_retries = attempts - 1
- success_response[1]['ResponseMetadata'][
- 'RetryAttempts'
- ] = total_retries
+ success_response[1]['ResponseMetadata']['RetryAttempts'] = (
+ total_retries
+ )
if exception is not None:
raise exception
else:
@@ -298,9 +296,9 @@ def _do_get_response(self, request, operation_model, context):
)
http_response_record_dict = response_dict.copy()
- http_response_record_dict[
- 'streaming'
- ] = operation_model.has_streaming_output
+ http_response_record_dict['streaming'] = (
+ operation_model.has_streaming_output
+ )
history_recorder.record('HTTP_RESPONSE', http_response_record_dict)
protocol = operation_model.metadata['protocol']
@@ -399,7 +397,7 @@ def create_endpoint(
if not is_valid_endpoint_url(
endpoint_url
) and not is_valid_ipv6_endpoint_url(endpoint_url):
- raise ValueError("Invalid endpoint: %s" % endpoint_url)
+ raise ValueError(f"Invalid endpoint: {endpoint_url}")
if proxies is None:
proxies = self._get_proxies(endpoint_url)
diff --git a/botocore/endpoint_provider.py b/botocore/endpoint_provider.py
index 1be5a25c8d..9439086c53 100644
--- a/botocore/endpoint_provider.py
+++ b/botocore/endpoint_provider.py
@@ -20,7 +20,6 @@
or you can look at the test files in /tests/unit/data/endpoints/valid-rules/
"""
-
import logging
import re
from enum import Enum
diff --git a/botocore/errorfactory.py b/botocore/errorfactory.py
index d9a1e9cd9c..6084e51da4 100644
--- a/botocore/errorfactory.py
+++ b/botocore/errorfactory.py
@@ -49,8 +49,8 @@ def __getattr__(self, name):
for exception_cls in self._code_to_exception.values()
]
raise AttributeError(
- fr"{self} object has no attribute {name}. "
- fr"Valid exceptions are: {', '.join(exception_cls_names)}"
+ rf"{self} object has no attribute {name}. "
+ rf"Valid exceptions are: {', '.join(exception_cls_names)}"
)
diff --git a/botocore/eventstream.py b/botocore/eventstream.py
index 11baf81a32..b7999a6e50 100644
--- a/botocore/eventstream.py
+++ b/botocore/eventstream.py
@@ -10,7 +10,7 @@
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
-"""Binary Event Stream Decoding """
+"""Binary Event Stream Decoding"""
from binascii import crc32
from struct import unpack
@@ -33,7 +33,7 @@ class DuplicateHeader(ParserError):
"""Duplicate header found in the event."""
def __init__(self, header):
- message = 'Duplicate header present: "%s"' % header
+ message = f'Duplicate header present: "{header}"'
super().__init__(message)
@@ -41,10 +41,7 @@ class InvalidHeadersLength(ParserError):
"""Headers length is longer than the maximum."""
def __init__(self, length):
- message = 'Header length of {} exceeded the maximum of {}'.format(
- length,
- _MAX_HEADERS_LENGTH,
- )
+ message = f'Header length of {length} exceeded the maximum of {_MAX_HEADERS_LENGTH}'
super().__init__(message)
@@ -52,10 +49,7 @@ class InvalidPayloadLength(ParserError):
"""Payload length is longer than the maximum."""
def __init__(self, length):
- message = 'Payload length of {} exceeded the maximum of {}'.format(
- length,
- _MAX_PAYLOAD_LENGTH,
- )
+ message = f'Payload length of {length} exceeded the maximum of {_MAX_PAYLOAD_LENGTH}'
super().__init__(message)
@@ -63,12 +57,7 @@ class ChecksumMismatch(ParserError):
"""Calculated checksum did not match the expected checksum."""
def __init__(self, expected, calculated):
- message = (
- 'Checksum mismatch: expected 0x{:08x}, calculated 0x{:08x}'.format(
- expected,
- calculated,
- )
- )
+ message = f'Checksum mismatch: expected 0x{expected:08x}, calculated 0x{calculated:08x}'
super().__init__(message)
diff --git a/botocore/handlers.py b/botocore/handlers.py
index a6e5f7ed8c..211ed0477c 100644
--- a/botocore/handlers.py
+++ b/botocore/handlers.py
@@ -251,8 +251,7 @@ def generate_idempotent_uuid(params, model, **kwargs):
if name not in params:
params[name] = str(uuid.uuid4())
logger.debug(
- "injecting idempotency token (%s) into param '%s'."
- % (params[name], name)
+ f"injecting idempotency token ({params[name]}) into param '{name}'."
)
@@ -454,7 +453,7 @@ def _quote_source_header_from_dict(source_dict):
)
final = percent_encode(final, safe=SAFE_CHARS + '/')
if version_id is not None:
- final += '?versionId=%s' % version_id
+ final += f'?versionId={version_id}'
return final
@@ -632,8 +631,8 @@ def validate_ascii_metadata(params, **kwargs):
except UnicodeEncodeError:
error_msg = (
'Non ascii characters found in S3 metadata '
- 'for key "%s", value: "%s". \nS3 metadata can only '
- 'contain ASCII characters. ' % (key, value)
+ f'for key "{key}", value: "{value}". \nS3 metadata can only '
+ 'contain ASCII characters. '
)
raise ParamValidationError(report=error_msg)
@@ -761,10 +760,10 @@ def check_openssl_supports_tls_version_1_2(**kwargs):
openssl_version_tuple = ssl.OPENSSL_VERSION_INFO
if openssl_version_tuple < (1, 0, 1):
warnings.warn(
- 'Currently installed openssl version: %s does not '
+ f'Currently installed openssl version: {ssl.OPENSSL_VERSION} does not '
'support TLS 1.2, which is required for use of iot-data. '
'Please use python installed with openssl version 1.0.1 or '
- 'higher.' % (ssl.OPENSSL_VERSION),
+ 'higher.',
UnsupportedTLSVersionWarning,
)
# We cannot check the openssl version on python2.6, so we should just
diff --git a/botocore/hooks.py b/botocore/hooks.py
index 01248a1ea9..583cb39c3b 100644
--- a/botocore/hooks.py
+++ b/botocore/hooks.py
@@ -170,7 +170,7 @@ def unregister(
def _verify_is_callable(self, func):
if not callable(func):
- raise ValueError("Event handler %s must be callable." % func)
+ raise ValueError(f"Event handler {func} must be callable.")
def _verify_accept_kwargs(self, func):
"""Verifies a callable accepts kwargs
@@ -314,20 +314,20 @@ def _register_section(
if unique_id_uses_count:
if not count:
raise ValueError(
- "Initial registration of unique id %s was "
+ f"Initial registration of unique id {unique_id} was "
"specified to use a counter. Subsequent register "
"calls to unique id must specify use of a counter "
- "as well." % unique_id
+ "as well."
)
else:
self._unique_id_handlers[unique_id]['count'] += 1
else:
if count:
raise ValueError(
- "Initial registration of unique id %s was "
+ f"Initial registration of unique id {unique_id} was "
"specified to not use a counter. Subsequent "
"register calls to unique id must specify not to "
- "use a counter as well." % unique_id
+ "use a counter as well."
)
return
else:
@@ -364,9 +364,9 @@ def unregister(
if unique_id_uses_count:
if count is None:
raise ValueError(
- "Initial registration of unique id %s was specified to "
+ f"Initial registration of unique id {unique_id} was specified to "
"use a counter. Subsequent unregister calls to unique "
- "id must specify use of a counter as well." % unique_id
+ "id must specify use of a counter as well."
)
elif count == 1:
handler = self._unique_id_handlers.pop(unique_id)[
@@ -378,10 +378,10 @@ def unregister(
else:
if count:
raise ValueError(
- "Initial registration of unique id %s was specified "
+ f"Initial registration of unique id {unique_id} was specified "
"to not use a counter. Subsequent unregister calls "
"to unique id must specify not to use a counter as "
- "well." % unique_id
+ "well."
)
handler = self._unique_id_handlers.pop(unique_id)['handler']
try:
diff --git a/botocore/httpchecksum.py b/botocore/httpchecksum.py
index 3e812c65e7..a97eb430d4 100644
--- a/botocore/httpchecksum.py
+++ b/botocore/httpchecksum.py
@@ -11,13 +11,14 @@
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
-""" The interfaces in this module are not intended for public use.
+"""The interfaces in this module are not intended for public use.
This module defines interfaces for applying checksums to HTTP requests within
the context of botocore. This involves both resolving the checksum to be used
based on client configuration and environment, as well as application of the
checksum to the request.
"""
+
import base64
import io
import logging
@@ -264,7 +265,7 @@ def resolve_request_checksum_algorithm(
)
)
raise FlexibleChecksumError(
- error_msg="Unsupported checksum algorithm: %s" % algorithm_name
+ error_msg=f"Unsupported checksum algorithm: {algorithm_name}"
)
location_type = "header"
@@ -278,7 +279,7 @@ def resolve_request_checksum_algorithm(
algorithm = {
"algorithm": algorithm_name,
"in": location_type,
- "name": "x-amz-checksum-%s" % algorithm_name,
+ "name": f"x-amz-checksum-{algorithm_name}",
}
if algorithm["name"] in request["headers"]:
@@ -313,7 +314,7 @@ def apply_request_checksum(request):
_apply_request_trailer_checksum(request)
else:
raise FlexibleChecksumError(
- error_msg="Unknown checksum variant: %s" % algorithm["in"]
+ error_msg="Unknown checksum variant: {}".format(algorithm["in"])
)
@@ -400,7 +401,7 @@ def handle_checksum_body(http_response, response, context, operation_model):
return
for algorithm in algorithms:
- header_name = "x-amz-checksum-%s" % algorithm
+ header_name = f"x-amz-checksum-{algorithm}"
# If the header is not found, check the next algorithm
if header_name not in headers:
continue
@@ -434,7 +435,7 @@ def handle_checksum_body(http_response, response, context, operation_model):
def _handle_streaming_response(http_response, response, algorithm):
checksum_cls = _CHECKSUM_CLS.get(algorithm)
- header_name = "x-amz-checksum-%s" % algorithm
+ header_name = f"x-amz-checksum-{algorithm}"
return StreamingChecksumBody(
http_response.raw,
response["headers"].get("content-length"),
@@ -445,18 +446,15 @@ def _handle_streaming_response(http_response, response, algorithm):
def _handle_bytes_response(http_response, response, algorithm):
body = http_response.content
- header_name = "x-amz-checksum-%s" % algorithm
+ header_name = f"x-amz-checksum-{algorithm}"
checksum_cls = _CHECKSUM_CLS.get(algorithm)
checksum = checksum_cls()
checksum.update(body)
expected = response["headers"][header_name]
if checksum.digest() != base64.b64decode(expected):
error_msg = (
- "Expected checksum %s did not match calculated checksum: %s"
- % (
- expected,
- checksum.b64digest(),
- )
+ f"Expected checksum {expected} did not match calculated "
+ f"checksum: {checksum.b64digest()}"
)
raise FlexibleChecksumError(error_msg=error_msg)
return body
diff --git a/botocore/loaders.py b/botocore/loaders.py
index 2baf4196fc..f5072a3e5f 100644
--- a/botocore/loaders.py
+++ b/botocore/loaders.py
@@ -101,6 +101,7 @@
for the sdk. For instance, additional operation parameters might be added here
which don't represent the actual service api.
"""
+
import logging
import os
diff --git a/botocore/model.py b/botocore/model.py
index 8aa3d2dcc6..df9159e36e 100644
--- a/botocore/model.py
+++ b/botocore/model.py
@@ -11,6 +11,7 @@
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""Abstractions to interact with service models."""
+
from collections import defaultdict
from typing import NamedTuple, Union
diff --git a/botocore/paginate.py b/botocore/paginate.py
index 42e74d0819..228cdd3cd2 100644
--- a/botocore/paginate.py
+++ b/botocore/paginate.py
@@ -179,7 +179,7 @@ def get_paginator(self, operation_name):
single_paginator_config = self._paginator_config[operation_name]
except KeyError:
raise ValueError(
- "Paginator for operation does not exist: %s" % operation_name
+ f"Paginator for operation does not exist: {operation_name}"
)
return single_paginator_config
@@ -232,7 +232,7 @@ def resume_token(self):
@resume_token.setter
def resume_token(self, value):
if not isinstance(value, dict):
- raise ValueError("Bad starting token: %s" % value)
+ raise ValueError(f"Bad starting token: {value}")
if 'boto_truncate_amount' in value:
token_keys = sorted(self._input_token + ['boto_truncate_amount'])
@@ -243,7 +243,7 @@ def resume_token(self, value):
if token_keys == dict_keys:
self._resume_token = self._token_encoder.encode(value)
else:
- raise ValueError("Bad starting token: %s" % value)
+ raise ValueError(f"Bad starting token: {value}")
@property
def non_aggregate_part(self):
@@ -546,7 +546,7 @@ def _parse_starting_token_deprecated(self):
"""
log.debug(
"Attempting to fall back to old starting token parser. For "
- "token: %s" % self._starting_token
+ f"token: {self._starting_token}"
)
if self._starting_token is None:
return None
@@ -577,7 +577,7 @@ def _convert_deprecated_starting_token(self, deprecated_token):
len_deprecated_token = len(deprecated_token)
len_input_token = len(self._input_token)
if len_deprecated_token > len_input_token:
- raise ValueError("Bad starting token: %s" % self._starting_token)
+ raise ValueError(f"Bad starting token: {self._starting_token}")
elif len_deprecated_token < len_input_token:
log.debug(
"Old format starting token does not contain all input "
diff --git a/botocore/parsers.py b/botocore/parsers.py
index 3905757c85..0c7a34f218 100644
--- a/botocore/parsers.py
+++ b/botocore/parsers.py
@@ -114,6 +114,7 @@
}
"""
+
import base64
import http.client
import json
@@ -315,7 +316,7 @@ def _do_generic_error_parse(self, response):
}
def _do_parse(self, response, shape):
- raise NotImplementedError("%s._do_parse" % self.__class__.__name__)
+ raise NotImplementedError(f"{self.__class__.__name__}._do_parse")
def _do_error_parse(self, response, shape):
raise NotImplementedError(f"{self.__class__.__name__}._do_error_parse")
@@ -398,7 +399,7 @@ def _handle_map(self, shape, node):
elif tag_name == value_location_name:
val_name = self._parse_shape(value_shape, single_pair)
else:
- raise ResponseParserError("Unknown tag: %s" % tag_name)
+ raise ResponseParserError(f"Unknown tag: {tag_name}")
parsed[key_name] = val_name
return parsed
@@ -506,9 +507,8 @@ def _parse_xml_string_to_dom(self, xml_string):
root = parser.close()
except XMLParseError as e:
raise ResponseParserError(
- "Unable to parse response (%s), "
- "invalid XML received. Further retries may succeed:\n%s"
- % (e, xml_string)
+ f"Unable to parse response ({e}), "
+ f"invalid XML received. Further retries may succeed:\n{xml_string}"
)
return root
diff --git a/botocore/regions.py b/botocore/regions.py
index 0fe8f0ee0e..ab20130304 100644
--- a/botocore/regions.py
+++ b/botocore/regions.py
@@ -16,6 +16,7 @@
given service and region and resolving the available endpoints for a service
in a specific AWS partition.
"""
+
import copy
import logging
import re
@@ -261,7 +262,7 @@ def _endpoint_for_partition(
):
error_msg = (
"Dualstack endpoints are currently not supported"
- " for %s partition" % partition_name
+ f" for {partition_name} partition"
)
raise EndpointVariantError(tags=['dualstack'], error_msg=error_msg)
@@ -357,8 +358,7 @@ def _resolve(
if endpoint_data.get('deprecated'):
LOG.warning(
- 'Client is configured with the deprecated endpoint: %s'
- % (endpoint_name)
+ f'Client is configured with the deprecated endpoint: {endpoint_name}'
)
service_defaults = service_data.get('defaults', {})
@@ -496,7 +496,7 @@ def construct_endpoint(
operation_model, call_args, request_context
)
LOG.debug(
- 'Calling endpoint provider with parameters: %s' % provider_params
+ f'Calling endpoint provider with parameters: {provider_params}'
)
try:
provider_result = self._provider.resolve_endpoint(
@@ -510,7 +510,7 @@ def construct_endpoint(
raise
else:
raise botocore_exception from ex
- LOG.debug('Endpoint provider result: %s' % provider_result.url)
+ LOG.debug(f'Endpoint provider result: {provider_result.url}')
# The endpoint provider does not support non-secure transport.
if not self._use_ssl and provider_result.url.startswith('https://'):
@@ -633,7 +633,7 @@ def _get_customized_builtins(
customized_builtins = copy.copy(self._builtins)
# Handlers are expected to modify the builtins dict in place.
self._event_emitter.emit(
- 'before-endpoint-resolution.%s' % service_id,
+ f'before-endpoint-resolution.{service_id}',
builtins=customized_builtins,
model=operation_model,
params=call_args,
diff --git a/botocore/retries/bucket.py b/botocore/retries/bucket.py
index 1818e5d57b..09d33c77d0 100644
--- a/botocore/retries/bucket.py
+++ b/botocore/retries/bucket.py
@@ -1,4 +1,5 @@
"""This module implements token buckets used for client side throttling."""
+
import threading
import time
diff --git a/botocore/retries/quota.py b/botocore/retries/quota.py
index c3e91ae367..f03942912a 100644
--- a/botocore/retries/quota.py
+++ b/botocore/retries/quota.py
@@ -1,7 +1,5 @@
-"""Retry quota implementation.
+"""Retry quota implementation."""
-
-"""
import threading
diff --git a/botocore/retries/special.py b/botocore/retries/special.py
index 9ce18b1fa3..9b782601da 100644
--- a/botocore/retries/special.py
+++ b/botocore/retries/special.py
@@ -5,6 +5,7 @@
module. Ideally we should be able to remove this module.
"""
+
import logging
from binascii import crc32
diff --git a/botocore/retries/standard.py b/botocore/retries/standard.py
index b00cb7aed4..8801530b00 100644
--- a/botocore/retries/standard.py
+++ b/botocore/retries/standard.py
@@ -23,6 +23,7 @@
based API used by botocore.
"""
+
import logging
import random
@@ -57,9 +58,9 @@ def register_retry_handler(client, max_attempts=DEFAULT_MAX_ATTEMPTS):
retry_quota=retry_quota,
)
- unique_id = 'retry-config-%s' % service_event_name
+ unique_id = f'retry-config-{service_event_name}'
client.meta.events.register(
- 'needs-retry.%s' % service_event_name,
+ f'needs-retry.{service_event_name}',
handler.needs_retry,
unique_id=unique_id,
)
diff --git a/botocore/retryhandler.py b/botocore/retryhandler.py
index deef1bfe9e..c2eed1d9d3 100644
--- a/botocore/retryhandler.py
+++ b/botocore/retryhandler.py
@@ -288,9 +288,9 @@ def __call__(
if attempt_number >= self._max_attempts:
# explicitly set MaxAttemptsReached
if response is not None and 'ResponseMetadata' in response[1]:
- response[1]['ResponseMetadata'][
- 'MaxAttemptsReached'
- ] = True
+ response[1]['ResponseMetadata']['MaxAttemptsReached'] = (
+ True
+ )
logger.debug(
"Reached the maximum number of retry attempts: %s",
attempt_number,
diff --git a/botocore/serialize.py b/botocore/serialize.py
index 306441e060..82ed58d09a 100644
--- a/botocore/serialize.py
+++ b/botocore/serialize.py
@@ -37,6 +37,7 @@
with the exception of blob types. Those are assumed to be binary,
and if a str/unicode type is passed in, it will be encoded as utf-8.
"""
+
import base64
import calendar
import datetime
@@ -277,7 +278,7 @@ def _serialize_type_map(self, serialized, value, shape, prefix=''):
if self._is_shape_flattened(shape):
full_prefix = prefix
else:
- full_prefix = '%s.entry' % prefix
+ full_prefix = f'{prefix}.entry'
template = full_prefix + '.{i}.{suffix}'
key_shape = shape.key
value_shape = shape.value
@@ -356,7 +357,7 @@ def serialize_to_request(self, parameters, operation_model):
)
serialized['headers'] = {
'X-Amz-Target': target,
- 'Content-Type': 'application/x-amz-json-%s' % json_version,
+ 'Content-Type': f'application/x-amz-json-{json_version}',
}
body = self.MAP_TYPE()
input_shape = operation_model.input_shape
@@ -373,7 +374,7 @@ def serialize_to_request(self, parameters, operation_model):
def _serialize(self, serialized, value, shape, key=None):
method = getattr(
self,
- '_serialize_type_%s' % shape.type_name,
+ f'_serialize_type_{shape.type_name}',
self._default_serialize,
)
method(serialized, value, shape, key)
@@ -713,7 +714,7 @@ def _serialize_body_params(self, params, shape):
def _serialize(self, shape, params, xmlnode, name):
method = getattr(
self,
- '_serialize_type_%s' % shape.type_name,
+ f'_serialize_type_{shape.type_name}',
self._default_serialize,
)
method(xmlnode, params, shape, name)
@@ -725,7 +726,7 @@ def _serialize_type_structure(self, xmlnode, params, shape, name):
namespace_metadata = shape.serialization['xmlNamespace']
attribute_name = 'xmlns'
if namespace_metadata.get('prefix'):
- attribute_name += ':%s' % namespace_metadata['prefix']
+ attribute_name += f":{namespace_metadata['prefix']}"
structure_node.attrib[attribute_name] = namespace_metadata['uri']
for key, value in params.items():
member_shape = shape.members[key]
diff --git a/botocore/session.py b/botocore/session.py
index 0739286ec6..93d020757a 100644
--- a/botocore/session.py
+++ b/botocore/session.py
@@ -557,11 +557,11 @@ def user_agent(self):
f'{platform.system()}/{platform.release()}'
)
if HAS_CRT:
- base += ' awscrt/%s' % self._get_crt_version()
+ base += f' awscrt/{self._get_crt_version()}'
if os.environ.get('AWS_EXECUTION_ENV') is not None:
- base += ' exec-env/%s' % os.environ.get('AWS_EXECUTION_ENV')
+ base += ' exec-env/{}'.format(os.environ.get('AWS_EXECUTION_ENV'))
if self.user_agent_extra:
- base += ' %s' % self.user_agent_extra
+ base += f' {self.user_agent_extra}'
return base
@@ -615,7 +615,7 @@ def get_service_data(self, service_name, api_version=None):
)
service_id = EVENT_ALIASES.get(service_name, service_name)
self._events.emit(
- 'service-data-loaded.%s' % service_id,
+ f'service-data-loaded.{service_id}',
service_data=service_data,
service_name=service_name,
session=self,
@@ -803,9 +803,9 @@ def get_component(self, name):
except ValueError:
if name in ['endpoint_resolver', 'exceptions_factory']:
warnings.warn(
- 'Fetching the %s component with the get_component() '
+ f'Fetching the {name} component with the get_component() '
'method is deprecated as the component has always been '
- 'considered an internal interface of botocore' % name,
+ 'considered an internal interface of botocore',
DeprecationWarning,
)
return self._internal_components.get_component(name)
@@ -1153,7 +1153,7 @@ def get_component(self, name):
try:
return self._components[name]
except KeyError:
- raise ValueError("Unknown component: %s" % name)
+ raise ValueError(f"Unknown component: {name}")
def register_component(self, name, component):
self._components[name] = component
diff --git a/botocore/signers.py b/botocore/signers.py
index ef51805fe5..89319af10b 100644
--- a/botocore/signers.py
+++ b/botocore/signers.py
@@ -152,9 +152,7 @@ def sign(
# Allow mutating request before signing
self._event_emitter.emit(
- 'before-sign.{}.{}'.format(
- self._service_id.hyphenize(), operation_name
- ),
+ f'before-sign.{self._service_id.hyphenize()}.{operation_name}',
request=request,
signing_name=signing_name,
region_name=self._region_name,
@@ -231,9 +229,7 @@ def _choose_signer(self, operation_name, signing_type, context):
signature_version += suffix
handler, response = self._event_emitter.emit_until_response(
- 'choose-signer.{}.{}'.format(
- self._service_id.hyphenize(), operation_name
- ),
+ f'choose-signer.{self._service_id.hyphenize()}.{operation_name}',
signing_name=signing_name,
region_name=region_name,
signature_version=signature_version,
@@ -428,9 +424,9 @@ def generate_presigned_url(self, url, date_less_than=None, policy=None):
if isinstance(policy, str):
policy = policy.encode('utf8')
if date_less_than is not None:
- params = ['Expires=%s' % int(datetime2timestamp(date_less_than))]
+ params = [f'Expires={int(datetime2timestamp(date_less_than))}']
else:
- params = ['Policy=%s' % self._url_b64encode(policy).decode('utf8')]
+ params = [f"Policy={self._url_b64encode(policy).decode('utf8')}"]
signature = self.rsa_signer(policy)
params.extend(
[
diff --git a/botocore/stub.py b/botocore/stub.py
index 137cfe4288..018fc08706 100644
--- a/botocore/stub.py
+++ b/botocore/stub.py
@@ -239,8 +239,8 @@ def add_response(self, method, service_response, expected_params=None):
def _add_response(self, method, service_response, expected_params):
if not hasattr(self.client, method):
raise ValueError(
- "Client %s does not have method: %s"
- % (self.client.meta.service_model.service_name, method)
+ f"Client {self.client.meta.service_model.service_name} "
+ f"does not have method: {method}"
)
# Create a successful http response
@@ -383,16 +383,20 @@ def _assert_expected_params(self, model, params, context, **kwargs):
if param not in params or expected_params[param] != params[param]:
raise StubAssertionError(
operation_name=model.name,
- reason='Expected parameters:\n%s,\nbut received:\n%s'
- % (pformat(expected_params), pformat(params)),
+ reason=(
+ f'Expected parameters:\n{pformat(expected_params)},\n'
+ f'but received:\n{pformat(params)}'
+ ),
)
# Ensure there are no extra params hanging around
if sorted(expected_params.keys()) != sorted(params.keys()):
raise StubAssertionError(
operation_name=model.name,
- reason='Expected parameters:\n%s,\nbut received:\n%s'
- % (pformat(expected_params), pformat(params)),
+ reason=(
+ f'Expected parameters:\n{pformat(expected_params)},\n'
+ f'but received:\n{pformat(params)}'
+ ),
)
def _should_not_stub(self, context):
diff --git a/botocore/useragent.py b/botocore/useragent.py
index f837fc8699..a9a611910b 100644
--- a/botocore/useragent.py
+++ b/botocore/useragent.py
@@ -22,6 +22,7 @@
* The ``user_agent_extra`` field in the :py:class:`botocore.config.Config`.
"""
+
import os
import platform
from copy import copy
diff --git a/botocore/utils.py b/botocore/utils.py
index cba9a57c02..0efd7ef325 100644
--- a/botocore/utils.py
+++ b/botocore/utils.py
@@ -431,7 +431,7 @@ def _select_base_url(self, base_url, config):
else:
chosen_base_url = METADATA_BASE_URL
- logger.debug("IMDS ENDPOINT: %s" % chosen_base_url)
+ logger.debug(f"IMDS ENDPOINT: {chosen_base_url}")
if not is_valid_uri(chosen_base_url):
raise InvalidIMDSEndpointError(endpoint=chosen_base_url)
@@ -996,7 +996,7 @@ def parse_timestamp(value):
exc_info=e,
)
raise RuntimeError(
- 'Unable to calculate correct timezone offset for "%s"' % value
+ f'Unable to calculate correct timezone offset for "{value}"'
)
@@ -1793,17 +1793,16 @@ def redirect_from_error(self, request_dict, response, operation, **kwargs):
if new_region is None:
logger.debug(
- "S3 client configured for region %s but the bucket %s is not "
- "in that region and the proper region could not be "
- "automatically determined." % (client_region, bucket)
+ f"S3 client configured for region {client_region} but the "
+ f"bucket {bucket} is not in that region and the proper region "
+ "could not be automatically determined."
)
return
logger.debug(
- "S3 client configured for region %s but the bucket %s is in region"
- " %s; Please configure the proper region to avoid multiple "
- "unnecessary redirects and signing attempts."
- % (client_region, bucket, new_region)
+ f"S3 client configured for region {client_region} but the bucket {bucket} "
+ f"is in region {new_region}; Please configure the proper region to "
+ f"avoid multiple unnecessary redirects and signing attempts."
)
# Adding the new region to _cache will make construct_endpoint() to
# use the new region as value for the AWS::Region builtin parameter.
@@ -1992,17 +1991,16 @@ def redirect_from_error(self, request_dict, response, operation, **kwargs):
if new_region is None:
logger.debug(
- "S3 client configured for region %s but the bucket %s is not "
+ f"S3 client configured for region {client_region} but the bucket {bucket} is not "
"in that region and the proper region could not be "
- "automatically determined." % (client_region, bucket)
+ "automatically determined."
)
return
logger.debug(
- "S3 client configured for region %s but the bucket %s is in region"
- " %s; Please configure the proper region to avoid multiple "
+ f"S3 client configured for region {client_region} but the bucket {bucket} is in region"
+ f" {new_region}; Please configure the proper region to avoid multiple "
"unnecessary redirects and signing attempts."
- % (client_region, bucket, new_region)
)
endpoint = self._endpoint_resolver.resolve('s3', new_region)
endpoint = endpoint['endpoint_url']
@@ -2087,8 +2085,8 @@ def parse_arn(self, arn):
arn_parts = arn.split(':', 5)
if len(arn_parts) < 6:
raise InvalidArnException(
- 'Provided ARN: %s must be of the format: '
- 'arn:partition:service:region:account:resource' % arn
+ f'Provided ARN: {arn} must be of the format: '
+ 'arn:partition:service:region:account:resource'
)
return {
'partition': arn_parts[1],
@@ -2271,8 +2269,8 @@ def set_endpoint(self, request, **kwargs):
raise UnsupportedS3ConfigurationError(
msg=(
'Client is configured to use the FIPS psuedo region '
- 'for "%s", but S3 Accelerate does not have any FIPS '
- 'compatible endpoints.' % (self._region)
+ f'for "{self._region}", but S3 Accelerate does not have any FIPS '
+ 'compatible endpoints.'
)
)
switch_host_s3_accelerate(request=request, **kwargs)
@@ -2292,9 +2290,8 @@ def _validate_fips_supported(self, request):
if 'outpost_name' in request.context['s3_accesspoint']:
raise UnsupportedS3AccesspointConfigurationError(
msg=(
- 'Client is configured to use the FIPS psuedo-region "%s", '
+ f'Client is configured to use the FIPS psuedo-region "{self._region}", '
'but outpost ARNs do not support FIPS endpoints.'
- % (self._region)
)
)
# Transforming psuedo region to actual region
@@ -2306,11 +2303,10 @@ def _validate_fips_supported(self, request):
raise UnsupportedS3AccesspointConfigurationError(
msg=(
'Client is configured to use the FIPS psuedo-region '
- 'for "%s", but the access-point ARN provided is for '
- 'the "%s" region. For clients using a FIPS '
+ f'for "{self._region}", but the access-point ARN provided is for '
+ f'the "{accesspoint_region}" region. For clients using a FIPS '
'psuedo-region calls to access-point ARNs in another '
'region are not allowed.'
- % (self._region, accesspoint_region)
)
)
@@ -2321,8 +2317,8 @@ def _validate_global_regions(self, request):
raise UnsupportedS3AccesspointConfigurationError(
msg=(
'Client is configured to use the global psuedo-region '
- '"%s". When providing access-point ARNs a regional '
- 'endpoint must be specified.' % self._region
+ f'"{self._region}". When providing access-point ARNs a regional '
+ 'endpoint must be specified.'
)
)
@@ -2338,10 +2334,9 @@ def _validate_accesspoint_supported(self, request):
if request_partition != self._partition:
raise UnsupportedS3AccesspointConfigurationError(
msg=(
- 'Client is configured for "%s" partition, but access-point'
- ' ARN provided is for "%s" partition. The client and '
+ f'Client is configured for "{self._partition}" partition, but access-point'
+ f' ARN provided is for "{request_partition}" partition. The client and '
' access-point partition must be the same.'
- % (self._partition, request_partition)
)
)
s3_service = request.context['s3_accesspoint'].get('service')
@@ -2486,7 +2481,7 @@ def _get_accesspoint_netloc(self, request_context, region_name):
def _inject_fips_if_needed(self, component, request_context):
if self._use_fips_endpoint:
- return '%s-fips' % component
+ return f'{component}-fips'
return component
def _get_accesspoint_path(self, original_path, request_context):
@@ -2663,18 +2658,17 @@ def _validate_endpoint_from_arn_details_supported(self, request):
if arn_region != self._region:
error_msg = (
'The use_arn_region configuration is disabled but '
- 'received arn for "%s" when the client is configured '
- 'to use "%s"'
- ) % (arn_region, self._region)
+ f'received arn for "{arn_region}" when the client is configured '
+ f'to use "{self._region}"'
+ )
raise UnsupportedS3ControlConfigurationError(msg=error_msg)
request_partion = request.context['arn_details']['partition']
if request_partion != self._partition:
raise UnsupportedS3ControlConfigurationError(
msg=(
- 'Client is configured for "%s" partition, but arn '
- 'provided is for "%s" partition. The client and '
+ f'Client is configured for "{self._partition}" partition, but arn '
+ f'provided is for "{request_partion}" partition. The client and '
'arn partition must be the same.'
- % (self._partition, request_partion)
)
)
if self._s3_config.get('use_accelerate_endpoint'):
@@ -2874,8 +2868,8 @@ def _override_account_id_param(self, params, arn_details):
if 'AccountId' in params and params['AccountId'] != account_id:
error_msg = (
'Account ID in arn does not match the AccountId parameter '
- 'provided: "%s"'
- ) % params['AccountId']
+ 'provided: "{}"'
+ ).format(params['AccountId'])
raise UnsupportedS3ControlArnError(
arn=arn_details['original'],
msg=error_msg,
diff --git a/botocore/validate.py b/botocore/validate.py
index dfcca3daa8..82aabd66e4 100644
--- a/botocore/validate.py
+++ b/botocore/validate.py
@@ -210,7 +210,7 @@ def _validate(self, params, shape, errors, name):
if special_validator:
special_validator(params, shape, errors, name)
else:
- getattr(self, '_validate_%s' % shape.type_name)(
+ getattr(self, f'_validate_{shape.type_name}')(
params, shape, errors, name
)
diff --git a/botocore/waiter.py b/botocore/waiter.py
index 47f71886d4..ebac2c9f82 100644
--- a/botocore/waiter.py
+++ b/botocore/waiter.py
@@ -64,8 +64,7 @@ def wait(self, **kwargs):
# Rename the waiter class based on the type of waiter.
waiter_class_name = str(
- '%s.Waiter.%s'
- % (get_service_module_name(client.meta.service_model), waiter_name)
+ f'{get_service_module_name(client.meta.service_model)}.Waiter.{waiter_name}'
)
# Create the new waiter class
@@ -127,8 +126,8 @@ def _verify_supported_version(self, version):
raise WaiterConfigError(
error_msg=(
"Unsupported waiter version, supported version "
- "must be: %s, but version of waiter config "
- "is: %s" % (self.SUPPORTED_VERSION, version)
+ f"must be: {self.SUPPORTED_VERSION}, but version "
+ f"of waiter config is: {version}"
)
)
@@ -136,7 +135,7 @@ def get_waiter(self, waiter_name):
try:
single_waiter_config = self._waiter_config[waiter_name]
except KeyError:
- raise ValueError("Waiter does not exist: %s" % waiter_name)
+ raise ValueError(f"Waiter does not exist: {waiter_name}")
return SingleWaiterConfig(single_waiter_config)
@@ -178,28 +177,23 @@ def __init__(self, config):
@property
def explanation(self):
if self.matcher == 'path':
- return 'For expression "{}" we matched expected path: "{}"'.format(
- self.argument,
- self.expected,
- )
+ return f'For expression "{self.argument}" we matched expected path: "{self.expected}"'
elif self.matcher == 'pathAll':
return (
- 'For expression "%s" all members matched excepted path: "%s"'
- % (self.argument, self.expected)
+ f'For expression "{self.argument}" all members matched '
+ f'expected path: "{self.expected}"'
)
elif self.matcher == 'pathAny':
return (
- 'For expression "%s" we matched expected path: "%s" at least once'
- % (self.argument, self.expected)
+ f'For expression "{self.argument}" we matched expected '
+ f'path: "{self.expected}" at least once'
)
elif self.matcher == 'status':
- return 'Matched expected HTTP status code: %s' % self.expected
+ return f'Matched expected HTTP status code: {self.expected}'
elif self.matcher == 'error':
- return 'Matched expected service error code: %s' % self.expected
+ return f'Matched expected service error code: {self.expected}'
else:
- return (
- 'No explanation for unknown waiter type: "%s"' % self.matcher
- )
+ return f'No explanation for unknown waiter type: "{self.matcher}"'
def _create_matcher_func(self):
# An acceptor function is a callable that takes a single value. The
@@ -222,7 +216,7 @@ def _create_matcher_func(self):
return self._create_error_matcher()
else:
raise WaiterConfigError(
- error_msg="Unknown acceptor: %s" % self.matcher
+ error_msg=f"Unknown acceptor: {self.matcher}"
)
def _create_path_matcher(self):
@@ -364,8 +358,7 @@ def wait(self, **kwargs):
# can just handle here by raising an exception.
raise WaiterError(
name=self.name,
- reason='An error occurred (%s): %s'
- % (
+ reason='An error occurred ({}): {}'.format(
response['Error'].get('Code', 'Unknown'),
response['Error'].get('Message', 'Unknown'),
),
@@ -377,9 +370,7 @@ def wait(self, **kwargs):
)
return
if current_state == 'failure':
- reason = 'Waiter encountered a terminal failure state: %s' % (
- acceptor.explanation
- )
+ reason = f'Waiter encountered a terminal failure state: {acceptor.explanation}'
raise WaiterError(
name=self.name,
reason=reason,
@@ -390,8 +381,8 @@ def wait(self, **kwargs):
reason = 'Max attempts exceeded'
else:
reason = (
- 'Max attempts exceeded. Previously accepted state: %s'
- % (acceptor.explanation)
+ f'Max attempts exceeded. Previously accepted state: '
+ f'{acceptor.explanation}'
)
raise WaiterError(
name=self.name,
diff --git a/pyproject.toml b/pyproject.toml
index 5b2146b409..456a848967 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -10,6 +10,63 @@ line_length = 79
honor_noqa = true
src_paths = ["botocore", "tests"]
-[tool.black]
+[tool.ruff]
+exclude = [
+ ".bzr",
+ ".direnv",
+ ".eggs",
+ ".git",
+ ".git-rewrite",
+ ".hg",
+ ".ipynb_checkpoints",
+ ".mypy_cache",
+ ".nox",
+ ".pants.d",
+ ".pyenv",
+ ".pytest_cache",
+ ".pytype",
+ ".ruff_cache",
+ ".svn",
+ ".tox",
+ ".venv",
+ ".vscode",
+ "__pypackages__",
+ "_build",
+ "buck-out",
+ "build",
+ "dist",
+ "node_modules",
+ "site-packages",
+ "venv",
+]
+
+# Format same as Black.
line-length = 79
-skip_string_normalization = true
+indent-width = 4
+
+target-version = "py38"
+
+[tool.ruff.lint]
+# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
+# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
+# McCabe complexity (`C901`) by default.
+select = ["E4", "E7", "E9", "F", "UP"]
+ignore = []
+
+# Allow fix for all enabled rules (when `--fix`) is provided.
+fixable = ["ALL"]
+unfixable = []
+
+# Allow unused variables when underscore-prefixed.
+dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
+
+[tool.ruff.format]
+# Like Black, use double quotes for strings, spaces for indents
+# and trailing commas.
+quote-style = "preserve"
+indent-style = "space"
+skip-magic-trailing-comma = false
+line-ending = "auto"
+
+docstring-code-format = false
+docstring-code-line-length = "dynamic"
diff --git a/scripts/ci/install b/scripts/ci/install
index ed813995e5..0f29217840 100755
--- a/scripts/ci/install
+++ b/scripts/ci/install
@@ -43,4 +43,4 @@ if __name__ == "__main__":
package = os.path.join('dist', wheel_dist)
if args.extras:
package = f"\"{package}[{args.extras}]\""
- run('pip install %s' % package)
+ run(f'pip install {package}')
diff --git a/scripts/get-model-filename b/scripts/get-model-filename
index fa81a5315e..a490e366e8 100755
--- a/scripts/get-model-filename
+++ b/scripts/get-model-filename
@@ -21,6 +21,7 @@ will be created if it does not exist.
Copied: /tmp/myfile.json -> /Users/foo/botocore/data/aws/cloudwatch/2010-08-01.normal.json
"""
+
import json
# Note we're using optparse for 2.6 compat.
@@ -127,12 +128,14 @@ class TestDeterminePath(unittest.TestCase):
)
# The special casing of elasticloadbalancing -> elb.
- self.given_metadata(
- {
- 'apiVersion': '2015-01-01',
- 'endpointPrefix': 'elasticloadbalancing',
- }
- ),
+ (
+ self.given_metadata(
+ {
+ 'apiVersion': '2015-01-01',
+ 'endpointPrefix': 'elasticloadbalancing',
+ }
+ ),
+ )
self.assert_filename_is('botocore/data/aws/elb/2015-01-01.normal.json')
diff --git a/scripts/new-change b/scripts/new-change
index 716ad9c201..b902c63364 100755
--- a/scripts/new-change
+++ b/scripts/new-change
@@ -36,6 +36,7 @@ You can then use the ``scripts/gen-changelog`` to generate the
CHANGELOG.rst file.
"""
+
import argparse
import json
import os
@@ -132,9 +133,7 @@ def replace_issue_references(parsed, repo_name):
def linkify(match):
number = match.group()[1:]
- return '`{} `__'.format(
- match.group(), repo_name, number
- )
+ return f'`{match.group()} `__'
new_description = re.sub(r'#\d+', linkify, description)
parsed['description'] = new_description
diff --git a/setup.cfg b/setup.cfg
index 2bf03be6e7..d27c8e2cd9 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -10,6 +10,3 @@ requires_dist =
[options.extras_require]
crt = awscrt==0.20.11
-
-[flake8]
-ignore = E203,E226,E501,E731,W503,W504
diff --git a/tests/__init__.py b/tests/__init__.py
index 33307c0081..0a647aef94 100644
--- a/tests/__init__.py
+++ b/tests/__init__.py
@@ -121,9 +121,7 @@ def temporary_file(mode):
"""
temporary_directory = tempfile.mkdtemp()
- basename = 'tmpfile-{}-{}'.format(
- int(time.time()), random.randint(1, 1000)
- )
+ basename = f'tmpfile-{int(time.time())}-{random.randint(1, 1000)}'
full_filename = os.path.join(temporary_directory, basename)
open(full_filename, 'w').close()
try:
@@ -523,8 +521,9 @@ def wait(self, check, *args, **kwargs):
raise ConsistencyWaiterException(fail_msg)
def _fail_message(self, attempts, successes):
- format_args = (attempts, successes)
- return 'Failed after %s attempts, only had %s successes' % format_args
+ return (
+ f'Failed after {attempts} attempts, only had {successes} successes'
+ )
class StubbedSession(botocore.session.Session):
diff --git a/tests/acceptance/features/environment.py b/tests/acceptance/features/environment.py
index 302e51abcb..140cbcb7c9 100644
--- a/tests/acceptance/features/environment.py
+++ b/tests/acceptance/features/environment.py
@@ -39,9 +39,7 @@ def before_feature(context, feature):
service_name = tag
break
else:
- raise RuntimeError(
- "Unable to create a client for " "feature: %s" % feature
- )
+ raise RuntimeError(f"Unable to create a client for feature: {feature}")
if service_name in SKIP_SERVICES:
feature.mark_skipped()
diff --git a/tests/acceptance/features/steps/base.py b/tests/acceptance/features/steps/base.py
index 7ab58ae672..41d15099c2 100644
--- a/tests/acceptance/features/steps/base.py
+++ b/tests/acceptance/features/steps/base.py
@@ -84,7 +84,7 @@ def then_expected_type_is_list(context, expression):
# the response is a dict to ensure it made it through
# our response parser properly.
if not isinstance(context.response, dict):
- raise AssertionError("Response is not a dict: %s" % context.response)
+ raise AssertionError(f"Response is not a dict: {context.response}")
@then('the response should contain a "{}"')
@@ -93,13 +93,12 @@ def then_should_contain_key(context, key):
# We really just care that the request succeeded for these
# smoke tests.
if not isinstance(context.response, dict):
- raise AssertionError("Response is not a dict: %s" % context.response)
+ raise AssertionError(f"Response is not a dict: {context.response}")
@then('I expect the response error to contain a message')
def then_error_has_message(context):
if 'Message' not in context.error_response.response['Error']:
raise AssertionError(
- "Message key missing from error response: %s"
- % context.error_response.response
+ f"Message key missing from error response: {context.error_response.response}"
)
diff --git a/tests/functional/docs/__init__.py b/tests/functional/docs/__init__.py
index 87e0a76691..2fca0fd5c2 100644
--- a/tests/functional/docs/__init__.py
+++ b/tests/functional/docs/__init__.py
@@ -89,7 +89,7 @@ def get_method_document_block(self, operation_name, contents):
def get_parameter_document_block(self, param_name, contents):
contents = contents.decode('utf-8')
- start_param_document = ' :type %s:' % param_name
+ start_param_document = f' :type {param_name}:'
start_index = contents.find(start_param_document)
self.assertNotEqual(start_index, -1, 'Param is not found in contents')
contents = contents[start_index:]
@@ -126,7 +126,7 @@ def assert_is_documented_as_autopopulated_param(
# Ensure it is not in the example.
self.assert_not_contains_line(
- '%s=\'string\'' % param_name, method_contents
+ f'{param_name}=\'string\'', method_contents
)
# Ensure it is in the params.
diff --git a/tests/functional/docs/test_lex.py b/tests/functional/docs/test_lex.py
index d7a792021b..4059461a1f 100644
--- a/tests/functional/docs/test_lex.py
+++ b/tests/functional/docs/test_lex.py
@@ -21,11 +21,11 @@ def test_jsonheader_docs(self):
self.assert_contains_lines_in_order(
[
'**Request Syntax**',
- 'sessionAttributes=%s,' % self.TYPE_STRING,
+ f'sessionAttributes={self.TYPE_STRING},',
':type sessionAttributes: JSON serializable',
'**Response Syntax**',
- '\'slots\': %s,' % self.TYPE_STRING,
- '\'sessionAttributes\': %s' % self.TYPE_STRING,
+ f'\'slots\': {self.TYPE_STRING},',
+ f'\'sessionAttributes\': {self.TYPE_STRING}',
'**slots** (JSON serializable)',
'**sessionAttributes** (JSON serializable)',
],
diff --git a/tests/functional/docs/test_s3.py b/tests/functional/docs/test_s3.py
index fc9f125b6f..513d0793c1 100644
--- a/tests/functional/docs/test_s3.py
+++ b/tests/functional/docs/test_s3.py
@@ -69,7 +69,7 @@ def test_copy_source_documented_as_union_type(self):
"{'Bucket': 'string', 'Key': 'string', 'VersionId': 'string'}"
)
self.assert_contains_line(
- "CopySource='string' or %s" % dict_form, content
+ f"CopySource='string' or {dict_form}", content
)
def test_copy_source_param_docs_also_modified(self):
diff --git a/tests/functional/docs/test_shared_example_config.py b/tests/functional/docs/test_shared_example_config.py
index 3211e562e6..57d9d50bf4 100644
--- a/tests/functional/docs/test_shared_example_config.py
+++ b/tests/functional/docs/test_shared_example_config.py
@@ -53,9 +53,8 @@ def assert_valid_values(service_name, operation_model, example_config):
if input_shape is None and example_input:
raise AssertionError(
- "Input found in example for %s from %s with id %s, but no input "
- "shape is defined."
- % (operation_model.name, service_name, example_id)
+ f"Input found in example for {operation_model.name} from {service_name} "
+ f"with id {example_id}, but no input shape is defined."
)
example_output = example_config.get('output')
@@ -63,9 +62,8 @@ def assert_valid_values(service_name, operation_model, example_config):
if output_shape is None and example_output:
raise AssertionError(
- "Output found in example for %s from %s with id %s, but no output "
- "shape is defined."
- % (operation_model.name, service_name, example_id)
+ f"Output found in example for {operation_model.name} from {service_name} "
+ f"with id {example_id}, but no output shape is defined."
)
try:
@@ -80,9 +78,8 @@ def assert_valid_values(service_name, operation_model, example_config):
)
except AssertionError as e:
raise AssertionError(
- "Invalid value in example for {} from {} with id {}: {}".format(
- operation_model.name, service_name, example_id, e
- )
+ f"Invalid value in example for {operation_model.name} from "
+ f"{service_name} with id {example_id}: {e}"
)
@@ -104,9 +101,7 @@ def _assert_valid_structure_values(shape, example_dict, path):
if invalid_members:
dotted_path = '.'.join(path)
raise AssertionError(
- "Invalid members found for {}: {}".format(
- dotted_path, invalid_members
- )
+ f"Invalid members found for {dotted_path}: {invalid_members}"
)
for member_name, example_value in example_dict.items():
@@ -133,9 +128,7 @@ def _assert_valid_timestamp(timestamp, path):
except Exception as e:
dotted_path = '.'.join(path)
raise AssertionError(
- 'Failed to parse timestamp {} for {}: {}'.format(
- timestamp, dotted_path, e
- )
+ f'Failed to parse timestamp {timestamp} for {dotted_path}: {e}'
)
@@ -144,7 +137,6 @@ def assert_operation_exists(service_model, operation_name):
service_model.operation_model(operation_name)
except OperationNotFoundError:
raise AssertionError(
- "Examples found in {} for operation {} that does not exist.".format(
- service_model.service_name, operation_name
- )
+ f"Examples found in {service_model.service_name} for operation "
+ f"{operation_name} that does not exist."
)
diff --git a/tests/functional/test_alias.py b/tests/functional/test_alias.py
index 750cb933e9..0812c85b0e 100644
--- a/tests/functional/test_alias.py
+++ b/tests/functional/test_alias.py
@@ -82,6 +82,8 @@ def _can_use_parameter_in_client_call(session, case, use_alias=True):
getattr(client, operation)(**params)
except ParamValidationError as e:
raise AssertionError(
- 'Expecting %s to be valid parameter for %s.%s but received '
- '%s.' % (case['new_name'], case['service'], case['operation'], e)
+ 'Expecting {} to be valid parameter for {}.{} but received '
+ '{}.'.format(
+ case['new_name'], case['service'], case['operation'], e
+ )
)
diff --git a/tests/functional/test_credentials.py b/tests/functional/test_credentials.py
index ddd5f2beea..a6ba1f8b4b 100644
--- a/tests/functional/test_credentials.py
+++ b/tests/functional/test_credentials.py
@@ -127,9 +127,8 @@ def _run_in_thread(collected):
max_calls_allowed = math.ceil((end - start) / 2.0) + 1
self.assertTrue(
creds.refresh_counter <= max_calls_allowed,
- "Too many cred refreshes, max: %s, actual: %s, "
- "time_delta: %.4f"
- % (max_calls_allowed, creds.refresh_counter, (end - start)),
+ f"Too many cred refreshes, max: {max_calls_allowed}, actual: "
+ f"{creds.refresh_counter}, time_delta: {end - start:.4f}",
)
def test_no_race_for_immediate_advisory_expiration(self):
@@ -206,9 +205,9 @@ def create_assume_role_response(self, credentials, expiration=None):
def create_random_credentials(self):
return Credentials(
- 'fake-%s' % random_chars(15),
- 'fake-%s' % random_chars(35),
- 'fake-%s' % random_chars(45),
+ f'fake-{random_chars(15)}',
+ f'fake-{random_chars(35)}',
+ f'fake-{random_chars(45)}',
)
def assert_creds_equal(self, c1, c2):
@@ -241,9 +240,7 @@ def setUp(self):
credential_process = os.path.join(
current_dir, 'utils', 'credentialprocess.py'
)
- self.credential_process = '{} {}'.format(
- sys.executable, credential_process
- )
+ self.credential_process = f'{sys.executable} {credential_process}'
def mock_provider(self, provider_cls):
mock_instance = mock.Mock(spec=provider_cls)
@@ -487,7 +484,7 @@ def test_process_source_profile(self):
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n'
'[profile B]\n'
- 'credential_process = %s\n' % self.credential_process
+ f'credential_process = {self.credential_process}\n'
)
self.write_config(config)
@@ -520,7 +517,7 @@ def test_web_identity_source_profile(self):
'source_profile = B\n'
'[profile B]\n'
'role_arn = arn:aws:iam::123456789:role/RoleB\n'
- 'web_identity_token_file = %s\n' % token_path
+ f'web_identity_token_file = {token_path}\n'
)
self.write_config(config)
@@ -561,7 +558,7 @@ def test_web_identity_source_profile_ignores_env_vars(self):
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'source_profile = B\n'
'[profile B]\n'
- 'web_identity_token_file = %s\n' % token_path
+ f'web_identity_token_file = {token_path}\n'
)
self.write_config(config)
@@ -803,8 +800,8 @@ def test_assume_role(self):
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'role_session_name = sname\n'
- 'web_identity_token_file = %s\n'
- ) % self.token_file
+ f'web_identity_token_file = {self.token_file}\n'
+ )
self.write_config(config)
expected_params = {
'RoleArn': 'arn:aws:iam::123456789:role/RoleA',
@@ -832,8 +829,8 @@ def test_assume_role_env_vars_do_not_take_precedence(self):
'[profile A]\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
'role_session_name = aname\n'
- 'web_identity_token_file = %s\n'
- ) % self.token_file
+ f'web_identity_token_file = {self.token_file}\n'
+ )
self.write_config(config)
different_token = os.path.join(self.tempdir, str(uuid.uuid4()))
@@ -856,9 +853,7 @@ def setUp(self):
credential_process = os.path.join(
current_dir, 'utils', 'credentialprocess.py'
)
- self.credential_process = '{} {}'.format(
- sys.executable, credential_process
- )
+ self.credential_process = f'{sys.executable} {credential_process}'
self.environ = os.environ.copy()
self.environ_patch = mock.patch('os.environ', self.environ)
self.environ_patch.start()
@@ -921,8 +916,8 @@ def add_assume_role_with_web_identity_http_response(self, stubber):
def _get_assume_role_body(self, method_name):
expiration = self.some_future_time()
body = (
- '<{method_name}Response>'
- ' <{method_name}Result>'
+ f'<{method_name}Response>'
+ f' <{method_name}Result>'
' '
' arn:aws:sts::0123456:user'
' AKID:mysession-1567020004'
@@ -931,11 +926,11 @@ def _get_assume_role_body(self, method_name):
' AccessKey'
' SecretKey'
' SessionToken'
- ' {expiration}'
+ f' {expiration}'
' '
- ' {method_name}Result>'
- '{method_name}Response>'
- ).format(method_name=method_name, expiration=expiration)
+ f' {method_name}Result>'
+ f'{method_name}Response>'
+ )
return body.encode('utf-8')
def make_stubbed_client_call_to_region(self, session, stubber, region):
@@ -976,11 +971,11 @@ def test_assume_role_web_identity_uses_same_region_as_client(self):
'[profile A]\n'
'sts_regional_endpoints = regional\n'
'role_arn = arn:aws:iam::123456789:role/RoleA\n'
- 'web_identity_token_file = %s\n'
+ f'web_identity_token_file = {token_file}\n'
'source_profile = B\n\n'
'[profile B]\n'
'aws_access_key_id = abc123\n'
- 'aws_secret_access_key = def456\n' % token_file
+ 'aws_secret_access_key = def456\n'
)
self.write_config(config)
# Make an arbitrary client and API call as we are really only
diff --git a/tests/functional/test_discovery.py b/tests/functional/test_discovery.py
index c6c8b0383d..8842d5b086 100644
--- a/tests/functional/test_discovery.py
+++ b/tests/functional/test_discovery.py
@@ -57,7 +57,7 @@ def add_describe_endpoints_response(self, stubber, discovered_endpoint):
def set_endpoint_discovery_config_file(self, fileobj, config_val):
fileobj.write(
- '[default]\n' 'endpoint_discovery_enabled=%s\n' % config_val
+ '[default]\n' f'endpoint_discovery_enabled={config_val}\n'
)
fileobj.flush()
self.environ['AWS_CONFIG_FILE'] = fileobj.name
diff --git a/tests/functional/test_event_alias.py b/tests/functional/test_event_alias.py
index 77ae231923..a9839ad414 100644
--- a/tests/functional/test_event_alias.py
+++ b/tests/functional/test_event_alias.py
@@ -381,7 +381,7 @@ def _hook(**kwargs):
hook_calls.append(kwargs['event_name'])
session = _get_session()
- session.register('creating-client-class.%s' % event_part, _hook)
+ session.register(f'creating-client-class.{event_part}', _hook)
session.create_client(client_name)
assert len(hook_calls) == 1
diff --git a/tests/functional/test_h2_required.py b/tests/functional/test_h2_required.py
index 6362a95c18..8754cf2744 100644
--- a/tests/functional/test_h2_required.py
+++ b/tests/functional/test_h2_required.py
@@ -54,7 +54,7 @@ def _all_test_cases():
@pytest.mark.parametrize("h2_service", H2_SERVICES)
def test_all_uses_of_h2_are_known(h2_service):
# Validates that a service that requires HTTP 2 for all operations is known
- message = 'Found unknown HTTP 2 service: %s' % h2_service
+ message = f'Found unknown HTTP 2 service: {h2_service}'
assert _KNOWN_SERVICES.get(h2_service) is _H2_REQUIRED, message
@@ -63,7 +63,5 @@ def test_all_uses_of_h2_are_known(h2_service):
def test_all_h2_operations_are_known(h2_service, operation):
# Validates that an operation that requires HTTP 2 is known
known_operations = _KNOWN_SERVICES.get(h2_service, [])
- message = 'Found unknown HTTP 2 operation: {}.{}'.format(
- h2_service, operation
- )
+ message = f'Found unknown HTTP 2 operation: {h2_service}.{operation}'
assert operation in known_operations, message
diff --git a/tests/functional/test_paginator_config.py b/tests/functional/test_paginator_config.py
index b6a9885904..1d2155b370 100644
--- a/tests/functional/test_paginator_config.py
+++ b/tests/functional/test_paginator_config.py
@@ -162,8 +162,7 @@ def _validate_known_pagination_keys(page_config):
for key in page_config:
if key not in KNOWN_PAGE_KEYS:
raise AssertionError(
- "Unknown key '%s' in pagination config: %s"
- % (key, page_config)
+ f"Unknown key '{key}' in pagination config: {page_config}"
)
@@ -171,7 +170,7 @@ def _valiate_result_key_exists(page_config):
if 'result_key' not in page_config:
raise AssertionError(
"Required key 'result_key' is missing "
- "from pagination config: %s" % page_config
+ f"from pagination config: {page_config}"
)
@@ -179,7 +178,7 @@ def _validate_referenced_operation_exists(operation_name, service_model):
if operation_name not in service_model.operation_names:
raise AssertionError(
"Pagination config refers to operation that "
- "does not exist: %s" % operation_name
+ f"does not exist: {operation_name}"
)
@@ -189,7 +188,7 @@ def _validate_operation_has_output(operation_name, service_model):
if output is None or not output.members:
raise AssertionError(
"Pagination config refers to operation "
- "that does not have any output: %s" % operation_name
+ f"that does not have any output: {operation_name}"
)
@@ -203,17 +202,16 @@ def _validate_input_keys_match(operation_name, page_config, service_model):
for token in input_tokens:
if token not in valid_input_names:
raise AssertionError(
- "input_token '%s' refers to a non existent "
- "input member for operation: %s" % (token, operation_name)
+ f"input_token '{token}' refers to a non existent "
+ f"input member for operation: {operation_name}"
)
if 'limit_key' in page_config:
limit_key = page_config['limit_key']
if limit_key not in valid_input_names:
raise AssertionError(
- "limit_key '%s' refers to a non existent "
- "input member for operation: %s, valid keys: "
- "%s"
- % (
+ "limit_key '{}' refers to a non existent "
+ "input member for operation: {}, valid keys: "
+ "{}".format(
limit_key,
operation_name,
', '.join(list(valid_input_names)),
@@ -237,24 +235,21 @@ def _validate_output_keys_match(operation_name, page_config, service_model):
else:
if output_key not in output_members:
raise AssertionError(
- "Pagination key '%s' refers to an output "
- "member that does not exist: %s" % (key_name, output_key)
+ f"Pagination key '{key_name}' refers to an output "
+ f"member that does not exist: {output_key}"
)
output_members.remove(output_key)
for member in list(output_members):
- key = "{}.{}.{}".format(
- service_model.service_name, operation_name, member
- )
+ key = f"{service_model.service_name}.{operation_name}.{member}"
if key in KNOWN_EXTRA_OUTPUT_KEYS:
output_members.remove(member)
if output_members:
raise AssertionError(
"There are member names in the output shape of "
- "%s that are not accounted for in the pagination "
- "config for service %s: %s"
- % (
+ "{} that are not accounted for in the pagination "
+ "config for service {}: {}".format(
operation_name,
service_model.service_name,
', '.join(output_members),
@@ -274,7 +269,7 @@ def _validate_jmespath_compiles(expression):
except JMESPathError as e:
raise AssertionError(
"Invalid JMESPath expression used "
- "in pagination config: %s\nerror: %s" % (expression, e)
+ f"in pagination config: {expression}\nerror: {e}"
)
diff --git a/tests/functional/test_regions.py b/tests/functional/test_regions.py
index 74aaeb7738..11a882f91f 100644
--- a/tests/functional/test_regions.py
+++ b/tests/functional/test_regions.py
@@ -447,7 +447,7 @@ def test_single_service_region_endpoint(
resolver = patched_session._get_internal_component('endpoint_resolver')
bridge = ClientEndpointBridge(resolver, None, None)
result = bridge.resolve(service_name, region_name)
- expected = 'https://%s' % expected_endpoint
+ expected = f'https://{expected_endpoint}'
assert result['endpoint_url'] == expected
diff --git a/tests/functional/test_retry.py b/tests/functional/test_retry.py
index 27dafb0909..da7460313c 100644
--- a/tests/functional/test_retry.py
+++ b/tests/functional/test_retry.py
@@ -44,7 +44,7 @@ def assert_will_retry_n_times(
for _ in range(num_responses):
http_stubber.add_response(status=status, body=body)
with self.assertRaisesRegex(
- ClientError, 'reached max retries: %s' % num_retries
+ ClientError, f'reached max retries: {num_retries}'
):
yield
self.assertEqual(len(http_stubber.requests), num_responses)
diff --git a/tests/functional/test_s3.py b/tests/functional/test_s3.py
index e4f99ac97f..0c04f858e0 100644
--- a/tests/functional/test_s3.py
+++ b/tests/functional/test_s3.py
@@ -1363,7 +1363,7 @@ def test_endpoint_redirection(self):
self.assert_signing_region(request, region)
expected_endpoint = (
"endpoint-io.a1c1d5c7.s3-object-lambda."
- "%s.amazonaws.com" % region
+ f"{region}.amazonaws.com"
)
self.assert_endpoint(request, expected_endpoint)
@@ -3596,7 +3596,7 @@ def _verify_presigned_url_addressing(
# We're not trying to verify the params for URL presigning,
# those are tested elsewhere. We just care about the hostname/path.
parts = urlsplit(url)
- actual = "%s://%s%s" % parts[:3]
+ actual = "{}://{}{}".format(*parts[:3])
assert actual == expected_url
diff --git a/tests/functional/test_s3_control_redirects.py b/tests/functional/test_s3_control_redirects.py
index 1aff27a5ed..4ac00123d4 100644
--- a/tests/functional/test_s3_control_redirects.py
+++ b/tests/functional/test_s3_control_redirects.py
@@ -375,12 +375,9 @@ def _assert_test_case(test_case, client, stubber):
exception_cls = getattr(exceptions, assertions['exception'])
if exception_raised is None:
raise RuntimeError(
- 'Expected exception "%s" was not raised' % exception_cls
+ f'Expected exception "{exception_cls}" was not raised'
)
- error_msg = ('Expected exception "%s", got "%s"') % (
- exception_cls,
- type(exception_raised),
- )
+ error_msg = f'Expected exception "{exception_cls}", got "{type(exception_raised)}"'
assert isinstance(exception_raised, exception_cls), error_msg
else:
assert len(stubber.requests) == 1
diff --git a/tests/functional/test_six_imports.py b/tests/functional/test_six_imports.py
index f7ee511341..41086f56f7 100644
--- a/tests/functional/test_six_imports.py
+++ b/tests/functional/test_six_imports.py
@@ -36,20 +36,18 @@ def visit_Import(self, node):
if getattr(alias, 'name', '') == 'six':
line = self._get_line_content(self.filename, node.lineno)
raise AssertionError(
- "A bare 'import six' was found in %s:\n"
- "\n%s: %s\n"
+ f"A bare 'import six' was found in {self.filename}:\n"
+ f"\n{node.lineno}: {line}\n"
"Please use 'from botocore.compat import six' instead"
- % (self.filename, node.lineno, line)
)
def visit_ImportFrom(self, node):
if node.module == 'six':
line = self._get_line_content(self.filename, node.lineno)
raise AssertionError(
- "A bare 'from six import ...' was found in %s:\n"
- "\n%s:%s\n"
+ f"A bare 'from six import ...' was found in {self.filename}:\n"
+ f"\n{node.lineno}:{line}\n"
"Please use 'from botocore.compat import six' instead"
- % (self.filename, node.lineno, line)
)
def _get_line_content(self, filename, lineno):
diff --git a/tests/functional/test_six_threading.py b/tests/functional/test_six_threading.py
index 7321e5cbd6..c4460f7205 100644
--- a/tests/functional/test_six_threading.py
+++ b/tests/functional/test_six_threading.py
@@ -1,6 +1,7 @@
"""
Regression test for six issue #98 (https://github.com/benjaminp/six/issues/98)
"""
+
import sys
import threading
import time
diff --git a/tests/functional/test_sts.py b/tests/functional/test_sts.py
index 2040fb3017..42fac96711 100644
--- a/tests/functional/test_sts.py
+++ b/tests/functional/test_sts.py
@@ -67,7 +67,7 @@ def create_sts_client(
)
def set_sts_regional_for_config_file(self, fileobj, config_val):
- fileobj.write('[default]\n' 'sts_regional_endpoints=%s\n' % config_val)
+ fileobj.write('[default]\n' f'sts_regional_endpoints={config_val}\n')
fileobj.flush()
self.environ['AWS_CONFIG_FILE'] = fileobj.name
diff --git a/tests/functional/test_useragent.py b/tests/functional/test_useragent.py
index d69451253c..79290459ab 100644
--- a/tests/functional/test_useragent.py
+++ b/tests/functional/test_useragent.py
@@ -285,10 +285,7 @@ def test_s3transfer_user_agent(patched_session):
def test_chalice_user_agent(patched_session):
# emulate behavior from chalice's cli.factory._add_chalice_user_agent
- suffix = '{}/{}'.format(
- patched_session.user_agent_name,
- patched_session.user_agent_version,
- )
+ suffix = f'{patched_session.user_agent_name}/{patched_session.user_agent_version}'
patched_session.user_agent_name = 'aws-chalice'
patched_session.user_agent_version = '0.1.2'
patched_session.user_agent_extra = suffix
diff --git a/tests/functional/test_waiter_config.py b/tests/functional/test_waiter_config.py
index e7c8e9b876..fc1ab619fb 100644
--- a/tests/functional/test_waiter_config.py
+++ b/tests/functional/test_waiter_config.py
@@ -118,23 +118,20 @@ def _lint_single_waiter(client, waiter_name, service_model):
# Needs to reference an existing operation name.
if operation_name not in service_model.operation_names:
raise AssertionError(
- "Waiter config references unknown "
- "operation: %s" % operation_name
+ "Waiter config references unknown " f"operation: {operation_name}"
)
# Needs to have at least one acceptor.
if not waiter.config.acceptors:
raise AssertionError(
"Waiter config must have at least "
- "one acceptor state: %s" % waiter.name
+ f"one acceptor state: {waiter.name}"
)
op_model = service_model.operation_model(operation_name)
for acceptor in acceptors:
_validate_acceptor(acceptor, op_model, waiter.name)
if not waiter.name.isalnum():
- raise AssertionError(
- "Waiter name %s is not alphanumeric." % waiter_name
- )
+ raise AssertionError(f"Waiter name {waiter_name} is not alphanumeric.")
def _validate_schema(validator, waiter_json):
@@ -151,10 +148,7 @@ def _validate_acceptor(acceptor, op_model, waiter_name):
output_shape = op_model.output_shape
assert (
output_shape is not None
- ), "Waiter '{}' has JMESPath expression with no output shape: {}".format(
- waiter_name,
- op_model,
- )
+ ), f"Waiter '{waiter_name}' has JMESPath expression with no output shape: {op_model}"
# We want to check if the JMESPath expression makes sense.
# To do this, we'll generate sample output and evaluate the
# JMESPath expression against the output. We'll then
diff --git a/tests/functional/utils/credentialprocess.py b/tests/functional/utils/credentialprocess.py
index 6529c7f493..12c8068cd3 100644
--- a/tests/functional/utils/credentialprocess.py
+++ b/tests/functional/utils/credentialprocess.py
@@ -11,6 +11,7 @@
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
"""This is a dummy implementation of a credential provider process."""
+
import argparse
import json
diff --git a/tests/integration/test_client_http.py b/tests/integration/test_client_http.py
index c2ee605552..7a2b3d94b6 100644
--- a/tests/integration/test_client_http.py
+++ b/tests/integration/test_client_http.py
@@ -23,7 +23,7 @@
class TestClientHTTPBehavior(unittest.TestCase):
def setUp(self):
self.port = unused_port()
- self.localhost = 'http://localhost:%s/' % self.port
+ self.localhost = f'http://localhost:{self.port}/'
self.session = botocore.session.get_session()
# We need to set fake credentials to ensure credentials aren't searched
# for which might make additional API calls (assume role, etc).
@@ -31,7 +31,7 @@ def setUp(self):
@unittest.skip('Test has suddenly become extremely flakey.')
def test_can_proxy_https_request_with_auth(self):
- proxy_url = 'http://user:pass@localhost:%s/' % self.port
+ proxy_url = f'http://user:pass@localhost:{self.port}/'
config = Config(proxies={'https': proxy_url}, region_name='us-west-1')
client = self.session.create_client('ec2', config=config)
@@ -51,7 +51,7 @@ def validate_auth(self):
@unittest.skip('Proxy cannot connect to service when run in CodeBuild.')
def test_proxy_request_includes_host_header(self):
- proxy_url = 'http://user:pass@localhost:%s/' % self.port
+ proxy_url = f'http://user:pass@localhost:{self.port}/'
config = Config(
proxies={'https': proxy_url},
proxies_config={'proxy_use_forwarding_for_https': True},
diff --git a/tests/integration/test_cloudformation.py b/tests/integration/test_cloudformation.py
index fe6c9a7b4e..fdc354de17 100644
--- a/tests/integration/test_cloudformation.py
+++ b/tests/integration/test_cloudformation.py
@@ -25,7 +25,7 @@ def test_handles_errors_with_template_body(self):
# it handles the case when a stack does not exist.
with self.assertRaises(ClientError):
self.client.get_template(
- StackName='does-not-exist-%s' % random_chars(10)
+ StackName=f'does-not-exist-{random_chars(10)}'
)
diff --git a/tests/integration/test_cognito_identity.py b/tests/integration/test_cognito_identity.py
index 046e87e76a..bce7b2d7b7 100644
--- a/tests/integration/test_cognito_identity.py
+++ b/tests/integration/test_cognito_identity.py
@@ -22,7 +22,7 @@ def setUp(self):
)
def test_can_create_and_delete_identity_pool(self):
- pool_name = 'test%s' % random_chars(10)
+ pool_name = f'test{random_chars(10)}'
response = self.client.create_identity_pool(
IdentityPoolName=pool_name, AllowUnauthenticatedIdentities=True
)
diff --git a/tests/integration/test_credentials.py b/tests/integration/test_credentials.py
index 239c206a70..d49c266e2e 100644
--- a/tests/integration/test_credentials.py
+++ b/tests/integration/test_credentials.py
@@ -157,7 +157,7 @@ def setUp(self):
"Statement": [
{
"Effect": "Allow",
- "Principal": {"AWS": "arn:aws:iam::%s:root" % account_id},
+ "Principal": {"AWS": f"arn:aws:iam::{account_id}:root"},
"Action": "sts:AssumeRole",
}
],
@@ -254,7 +254,7 @@ def _wait_for_assume_role(
else:
raise
- raise Exception("Unable to assume role %s" % role_arn)
+ raise Exception(f"Unable to assume role {role_arn}")
def create_assume_policy(self, role_arn):
policy_document = {
diff --git a/tests/integration/test_elastictranscoder.py b/tests/integration/test_elastictranscoder.py
index 4d58056d60..a72e723621 100644
--- a/tests/integration/test_elastictranscoder.py
+++ b/tests/integration/test_elastictranscoder.py
@@ -38,7 +38,7 @@ def setUp(self):
self.iam_client = self.session.create_client('iam', 'us-east-1')
def create_bucket(self):
- bucket_name = 'ets-bucket-1-%s' % random_chars(50)
+ bucket_name = f'ets-bucket-1-{random_chars(50)}'
self.s3_client.create_bucket(Bucket=bucket_name)
waiter = self.s3_client.get_waiter('bucket_exists')
waiter.wait(Bucket=bucket_name)
@@ -46,7 +46,7 @@ def create_bucket(self):
return bucket_name
def create_iam_role(self):
- role_name = 'ets-role-name-1-%s' % random_chars(10)
+ role_name = f'ets-role-name-1-{random_chars(10)}'
parsed = self.iam_client.create_role(
RoleName=role_name, AssumeRolePolicyDocument=DEFAULT_ROLE_POLICY
)
@@ -68,7 +68,7 @@ def test_create_pipeline(self):
input_bucket = self.create_bucket()
output_bucket = self.create_bucket()
role = self.create_iam_role()
- pipeline_name = 'botocore-test-create-%s' % random_chars(10)
+ pipeline_name = f'botocore-test-create-{random_chars(10)}'
parsed = self.client.create_pipeline(
InputBucket=input_bucket,
diff --git a/tests/integration/test_s3.py b/tests/integration/test_s3.py
index 88ba79a481..712f9f357c 100644
--- a/tests/integration/test_s3.py
+++ b/tests/integration/test_s3.py
@@ -288,9 +288,7 @@ def assert_num_uploads_found(
# Sleep and try again.
time.sleep(2)
self.fail(
- "Expected to see {} uploads, instead saw: {}".format(
- num_uploads, amount_seen
- )
+ f"Expected to see {num_uploads} uploads, instead saw: {amount_seen}"
)
def create_client(self):
@@ -359,7 +357,7 @@ def test_can_delete_urlencoded_object(self):
@pytest.mark.slow
def test_can_paginate(self):
for i in range(5):
- key_name = 'key%s' % i
+ key_name = f'key{i}'
self.create_object(key_name)
# Eventual consistency.
time.sleep(3)
@@ -373,7 +371,7 @@ def test_can_paginate(self):
@pytest.mark.slow
def test_can_paginate_with_page_size(self):
for i in range(5):
- key_name = 'key%s' % i
+ key_name = f'key{i}'
self.create_object(key_name)
# Eventual consistency.
time.sleep(3)
@@ -392,7 +390,7 @@ def test_result_key_iters(self):
for i in range(5):
key_name = f'key/{i}/{i}'
self.create_object(key_name)
- key_name2 = 'key/%s' % i
+ key_name2 = f'key/{i}'
self.create_object(key_name2)
time.sleep(3)
paginator = self.client.get_paginator('list_objects')
@@ -576,7 +574,7 @@ def test_thread_safe_auth(self):
threads = []
for i in range(10):
t = threading.Thread(
- target=self.create_object_catch_exceptions, args=('foo%s' % i,)
+ target=self.create_object_catch_exceptions, args=(f'foo{i}',)
)
t.daemon = True
threads.append(t)
@@ -587,13 +585,12 @@ def test_thread_safe_auth(self):
self.assertEqual(
self.caught_exceptions,
[],
- "Unexpectedly caught exceptions: %s" % self.caught_exceptions,
+ f"Unexpectedly caught exceptions: {self.caught_exceptions}",
)
self.assertEqual(
len(set(self.auth_paths)),
10,
- "Expected 10 unique auth paths, instead received: %s"
- % (self.auth_paths),
+ f"Expected 10 unique auth paths, instead received: {self.auth_paths}",
)
def test_non_normalized_key_paths(self):
@@ -714,12 +711,10 @@ def test_presign_sigv2(self):
)
self.assertTrue(
presigned_url.startswith(
- 'https://{}.s3.amazonaws.com/{}'.format(
- self.bucket_name, self.key
- )
+ f'https://{self.bucket_name}.s3.amazonaws.com/{self.key}'
),
"Host was suppose to use DNS style, instead "
- "got: %s" % presigned_url,
+ f"got: {presigned_url}",
)
# Try to retrieve the object using the presigned url.
self.assertEqual(http_get(presigned_url).data, b'foo')
@@ -750,12 +745,10 @@ def test_presign_sigv4(self):
)
self.assertTrue(
presigned_url.startswith(
- 'https://{}.s3.amazonaws.com/{}'.format(
- self.bucket_name, self.key
- )
+ f'https://{self.bucket_name}.s3.amazonaws.com/{self.key}'
),
"Host was suppose to be the us-east-1 endpoint, instead "
- "got: %s" % presigned_url,
+ f"got: {presigned_url}",
)
# Try to retrieve the object using the presigned url.
self.assertEqual(http_get(presigned_url).data, b'foo')
@@ -787,10 +780,11 @@ def test_presign_post_sigv2(self):
# Make sure the correct endpoint is being used
self.assertTrue(
post_args['url'].startswith(
- 'https://%s.s3.amazonaws.com' % self.bucket_name
+ f'https://{self.bucket_name}.s3.amazonaws.com'
+ ),
+ "Host was suppose to use DNS style, instead " "got: {}".format(
+ post_args['url']
),
- "Host was suppose to use DNS style, instead "
- "got: %s" % post_args['url'],
)
# Try to retrieve the object using the presigned url.
@@ -824,10 +818,10 @@ def test_presign_post_sigv4(self):
# Make sure the correct endpoint is being used
self.assertTrue(
post_args['url'].startswith(
- 'https://%s.s3.amazonaws.com/' % self.bucket_name
+ f'https://{self.bucket_name}.s3.amazonaws.com/'
),
"Host was suppose to use us-east-1 endpoint, instead "
- "got: %s" % post_args['url'],
+ "got: {}".format(post_args['url']),
)
r = http_post(post_args['url'], data=post_args['fields'], files=files)
@@ -854,12 +848,10 @@ def test_presign_sigv2(self):
)
self.assertTrue(
presigned_url.startswith(
- 'https://{}.s3.amazonaws.com/{}'.format(
- self.bucket_name, self.key
- )
+ f'https://{self.bucket_name}.s3.amazonaws.com/{self.key}'
),
"Host was suppose to use DNS style, instead "
- "got: %s" % presigned_url,
+ f"got: {presigned_url}",
)
# Try to retrieve the object using the presigned url.
self.assertEqual(http_get(presigned_url).data, b'foo')
@@ -882,12 +874,10 @@ def test_presign_sigv4(self):
self.assertTrue(
presigned_url.startswith(
- 'https://s3.us-west-2.amazonaws.com/{}/{}'.format(
- self.bucket_name, self.key
- )
+ f'https://s3.us-west-2.amazonaws.com/{self.bucket_name}/{self.key}'
),
"Host was suppose to be the us-west-2 endpoint, instead "
- "got: %s" % presigned_url,
+ f"got: {presigned_url}",
)
# Try to retrieve the object using the presigned url.
self.assertEqual(http_get(presigned_url).data, b'foo')
@@ -919,10 +909,11 @@ def test_presign_post_sigv2(self):
# Make sure the correct endpoint is being used
self.assertTrue(
post_args['url'].startswith(
- 'https://%s.s3.amazonaws.com' % self.bucket_name
+ f'https://{self.bucket_name}.s3.amazonaws.com'
+ ),
+ "Host was suppose to use DNS style, instead " "got: {}".format(
+ post_args['url']
),
- "Host was suppose to use DNS style, instead "
- "got: %s" % post_args['url'],
)
r = http_post(post_args['url'], data=post_args['fields'], files=files)
@@ -955,10 +946,11 @@ def test_presign_post_sigv4(self):
# Make sure the correct endpoint is being used
self.assertTrue(
post_args['url'].startswith(
- 'https://%s.s3.amazonaws.com/' % self.bucket_name
+ f'https://{self.bucket_name}.s3.amazonaws.com/'
+ ),
+ "Host was suppose to use DNS style, instead " "got: {}".format(
+ post_args['url']
),
- "Host was suppose to use DNS style, instead "
- "got: %s" % post_args['url'],
)
r = http_post(post_args['url'], data=post_args['fields'], files=files)
@@ -1421,7 +1413,7 @@ def test_redirects_head_object(self):
)
self.assertEqual(response.get('ContentLength'), len(key))
except ClientError as e:
- self.fail("S3 Client failed to redirect Head Object: %s" % e)
+ self.fail(f"S3 Client failed to redirect Head Object: {e}")
class TestBucketWithVersions(BaseS3ClientTest):
diff --git a/tests/integration/test_session.py b/tests/integration/test_session.py
index 924dfd9cf0..95d2ec1626 100644
--- a/tests/integration/test_session.py
+++ b/tests/integration/test_session.py
@@ -30,7 +30,7 @@ def test_can_change_timestamp_with_clients(self):
dates = [bucket['CreationDate'] for bucket in parsed['Buckets']]
self.assertTrue(
all(isinstance(date, str) for date in dates),
- "Expected all str types but instead got: %s" % dates,
+ f"Expected all str types but instead got: {dates}",
)
def test_maps_service_name_when_overriden(self):
diff --git a/tests/integration/test_smoke.py b/tests/integration/test_smoke.py
index 12240cc125..e31a5c9287 100644
--- a/tests/integration/test_smoke.py
+++ b/tests/integration/test_smoke.py
@@ -10,6 +10,7 @@
to use and all the services in SMOKE_TESTS/ERROR_TESTS will be tested.
"""
+
import logging
import os
import warnings
@@ -317,7 +318,7 @@ def test_client_can_retry_request_properly(
except ClientError as e:
assert False, (
'Request was not retried properly, '
- 'received error:\n%s' % pformat(e)
+ f'received error:\n{pformat(e)}'
)
# Ensure we used the stubber as we're not using it in strict mode
assert len(http_stubber.responses) == 0, 'Stubber was not used!'
diff --git a/tests/integration/test_waiters.py b/tests/integration/test_waiters.py
index 5631d9184b..a627cec0de 100644
--- a/tests/integration/test_waiters.py
+++ b/tests/integration/test_waiters.py
@@ -24,7 +24,7 @@ def setUp(self):
self.client = self.session.create_client('dynamodb', 'us-west-2')
def test_create_table_and_wait(self):
- table_name = 'botocoretest-%s' % random_chars(10)
+ table_name = f'botocoretest-{random_chars(10)}'
self.client.create_table(
TableName=table_name,
ProvisionedThroughput={
diff --git a/tests/unit/auth/test_signers.py b/tests/unit/auth/test_signers.py
index 9e92654228..d1b301e35d 100644
--- a/tests/unit/auth/test_signers.py
+++ b/tests/unit/auth/test_signers.py
@@ -115,10 +115,10 @@ def test_bucket_operations(self):
'object-lock',
)
for operation in operations:
- url = '/quotes?%s' % operation
+ url = f'/quotes?{operation}'
split = urlsplit(url)
cr = self.hmacv1.canonical_resource(split)
- self.assertEqual(cr, '/quotes?%s' % operation)
+ self.assertEqual(cr, f'/quotes?{operation}')
def test_sign_with_token(self):
credentials = botocore.credentials.Credentials(
@@ -357,7 +357,7 @@ def test_signature_is_not_normalized(self):
def test_query_string_params_in_urls(self):
if not hasattr(self.AuthClass, 'canonical_query_string'):
raise unittest.SkipTest(
- '%s does not expose interim steps' % self.AuthClass.__name__
+ f'{self.AuthClass.__name__} does not expose interim steps'
)
request = AWSRequest()
@@ -704,9 +704,7 @@ def setUp(self):
self.request = AWSRequest()
self.bucket = 'mybucket'
self.key = 'myobject'
- self.path = 'https://s3.amazonaws.com/{}/{}'.format(
- self.bucket, self.key
- )
+ self.path = f'https://s3.amazonaws.com/{self.bucket}/{self.key}'
self.request.url = self.path
self.request.method = 'GET'
@@ -986,9 +984,9 @@ def test_presign_content_type_form_encoded_not_signed(self):
request = AWSRequest()
request.method = 'GET'
request.url = 'https://myservice.us-east-1.amazonaws.com/'
- request.headers[
- 'Content-Type'
- ] = 'application/x-www-form-urlencoded; charset=utf-8'
+ request.headers['Content-Type'] = (
+ 'application/x-www-form-urlencoded; charset=utf-8'
+ )
self.auth.add_auth(request)
query_string = self.get_parsed_query_string(request)
signed_headers = query_string.get('X-Amz-SignedHeaders')
@@ -1022,7 +1020,7 @@ def setUp(self):
}
self.request = AWSRequest()
- self.request.url = 'https://s3.amazonaws.com/%s' % self.bucket
+ self.request.url = f'https://s3.amazonaws.com/{self.bucket}'
self.request.method = 'POST'
self.request.context['s3-presign-post-fields'] = self.fields
@@ -1075,7 +1073,7 @@ def test_presign_post_with_security_token(self):
def test_empty_fields_and_policy(self):
self.request = AWSRequest()
- self.request.url = 'https://s3.amazonaws.com/%s' % self.bucket
+ self.request.url = f'https://s3.amazonaws.com/{self.bucket}'
self.request.method = 'POST'
self.auth.add_auth(self.request)
@@ -1152,7 +1150,7 @@ def test_presign_post_with_security_token(self):
def test_empty_fields_and_policy(self):
self.request = AWSRequest()
- self.request.url = 'https://s3.amazonaws.com/%s' % self.bucket
+ self.request.url = f'https://s3.amazonaws.com/{self.bucket}'
self.request.method = 'POST'
self.auth.add_auth(self.request)
diff --git a/tests/unit/auth/test_sigv4.py b/tests/unit/auth/test_sigv4.py
index 86c37ceca9..e09220f11e 100644
--- a/tests/unit/auth/test_sigv4.py
+++ b/tests/unit/auth/test_sigv4.py
@@ -22,6 +22,7 @@
generate testcases based on these files.
"""
+
import datetime
import io
import logging
@@ -164,9 +165,9 @@ def _test_signature_version_4(test_case):
def assert_equal(actual, expected, raw_request, part):
if actual != expected:
- message = "The %s did not match" % part
+ message = f"The {part} did not match"
message += f"\nACTUAL:{actual!r} !=\nEXPECT:{expected!r}"
- message += '\nThe raw request was:\n%s' % raw_request
+ message += f'\nThe raw request was:\n{raw_request}'
raise AssertionError(message)
diff --git a/tests/unit/docs/test_method.py b/tests/unit/docs/test_method.py
index a23fb98817..0c08d11738 100644
--- a/tests/unit/docs/test_method.py
+++ b/tests/unit/docs/test_method.py
@@ -352,9 +352,9 @@ def test_exclude_output(self):
def test_streaming_body_in_output(self):
self.add_shape_to_params('Body', 'Blob')
self.json_model['shapes']['Blob'] = {'type': 'blob'}
- self.json_model['shapes']['SampleOperationInputOutput'][
- 'payload'
- ] = 'Body'
+ self.json_model['shapes']['SampleOperationInputOutput']['payload'] = (
+ 'Body'
+ )
document_model_driven_method(
self.doc_structure,
'foo',
@@ -367,9 +367,9 @@ def test_streaming_body_in_output(self):
def test_event_stream_body_in_output(self):
self.add_shape_to_params('Payload', 'EventStream')
- self.json_model['shapes']['SampleOperationInputOutput'][
- 'payload'
- ] = 'Payload'
+ self.json_model['shapes']['SampleOperationInputOutput']['payload'] = (
+ 'Payload'
+ )
self.json_model['shapes']['EventStream'] = {
'type': 'structure',
'eventstream': True,
@@ -416,9 +416,9 @@ def test_streaming_body_in_input(self):
del self.json_model['operations']['SampleOperation']['output']
self.add_shape_to_params('Body', 'Blob')
self.json_model['shapes']['Blob'] = {'type': 'blob'}
- self.json_model['shapes']['SampleOperationInputOutput'][
- 'payload'
- ] = 'Body'
+ self.json_model['shapes']['SampleOperationInputOutput']['payload'] = (
+ 'Body'
+ )
document_model_driven_method(
self.doc_structure,
'foo',
diff --git a/tests/unit/docs/test_utils.py b/tests/unit/docs/test_utils.py
index 25fb6aa46a..fd8cdb37aa 100644
--- a/tests/unit/docs/test_utils.py
+++ b/tests/unit/docs/test_utils.py
@@ -196,7 +196,7 @@ def setUp(self):
def test_hides_params_from_doc_string(self):
section = self.doc_structure.add_new_section(self.name)
- param_signature = ':param %s: ' % self.name
+ param_signature = f':param {self.name}: '
section.write(param_signature)
self.assert_contains_line(param_signature)
self.param.hide_param(
@@ -208,7 +208,7 @@ def test_hides_params_from_doc_string(self):
def test_hides_param_from_example(self):
structure = self.doc_structure.add_new_section('structure-value')
section = structure.add_new_section(self.name)
- example = '%s: \'string\'' % self.name
+ example = f'{self.name}: \'string\''
section.write(example)
self.assert_contains_line(example)
self.param.hide_param(
diff --git a/tests/unit/response_parsing/test_response_parsing.py b/tests/unit/response_parsing/test_response_parsing.py
index f5ecefd9a9..3e3b9838cf 100644
--- a/tests/unit/response_parsing/test_response_parsing.py
+++ b/tests/unit/response_parsing/test_response_parsing.py
@@ -70,7 +70,7 @@ def _test_parsed_response(xmlfile, operation_model, expected):
response = {'body': response_body, 'status_code': 200, 'headers': {}}
for case in SPECIAL_CASES:
if case in xmlfile:
- print("SKIP: %s" % xmlfile)
+ print(f"SKIP: {xmlfile}")
return
if 'errors' in xmlfile:
response['status_code'] = 400
@@ -109,7 +109,7 @@ def _test_parsed_response(xmlfile, operation_model, expected):
pretty_d1 = pprint.pformat(d1, width=1).splitlines()
pretty_d2 = pprint.pformat(d2, width=1).splitlines()
diff = '\n' + '\n'.join(difflib.ndiff(pretty_d1, pretty_d2))
- raise AssertionError("Dicts are not equal:\n%s" % diff)
+ raise AssertionError(f"Dicts are not equal:\n{diff}")
def _convert_bytes_to_str(parsed):
@@ -135,7 +135,7 @@ def _xml_test_cases():
for dp in ['responses', 'errors']:
data_path = os.path.join(os.path.dirname(__file__), 'xml')
data_path = os.path.join(data_path, dp)
- xml_files = glob.glob('%s/*.xml' % data_path)
+ xml_files = glob.glob(f'{data_path}/*.xml')
service_names = set()
for fn in xml_files:
service_names.add(os.path.split(fn)[1].split('-')[0])
diff --git a/tests/unit/test_client.py b/tests/unit/test_client.py
index 962dab01a7..86974fd553 100644
--- a/tests/unit/test_client.py
+++ b/tests/unit/test_client.py
@@ -179,9 +179,9 @@ def create_client_creator(
if retry_config_translator is None:
retry_config_translator = botocore.translate
if endpoint_prefix is not None:
- self.service_description['metadata'][
- 'endpointPrefix'
- ] = endpoint_prefix
+ self.service_description['metadata']['endpointPrefix'] = (
+ endpoint_prefix
+ )
if endpoint_creator is not None:
self.endpoint_creator_cls.return_value = endpoint_creator
@@ -232,9 +232,9 @@ def test_client_name(self):
self.assertTrue(service_client.__class__.__name__, 'MyService')
def test_client_name_with_amazon(self):
- self.service_description['metadata'][
- 'serviceFullName'
- ] = 'Amazon MyService'
+ self.service_description['metadata']['serviceFullName'] = (
+ 'Amazon MyService'
+ )
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials
@@ -242,9 +242,9 @@ def test_client_name_with_amazon(self):
self.assertTrue(service_client.__class__.__name__, 'MyService')
def test_client_name_using_abreviation(self):
- self.service_description['metadata'][
- 'serviceAbbreviation'
- ] = 'Abbreviation'
+ self.service_description['metadata']['serviceAbbreviation'] = (
+ 'Abbreviation'
+ )
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials
@@ -252,9 +252,9 @@ def test_client_name_using_abreviation(self):
self.assertTrue(service_client.__class__.__name__, 'Abbreviation')
def test_client_name_with_non_alphabet_characters(self):
- self.service_description['metadata'][
- 'serviceFullName'
- ] = 'Amazon My-Service'
+ self.service_description['metadata']['serviceFullName'] = (
+ 'Amazon My-Service'
+ )
creator = self.create_client_creator()
service_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials
@@ -1284,7 +1284,10 @@ def test_event_emitted_when_invoked(self):
creator = self.create_client_creator(event_emitter=event_emitter)
calls = []
- handler = lambda **kwargs: calls.append(kwargs)
+
+ def handler(**kwargs):
+ return calls.append(kwargs)
+
event_emitter.register('before-call', handler)
service_client = creator.create_client(
@@ -1298,10 +1301,14 @@ def test_events_are_per_client(self):
creator = self.create_client_creator(event_emitter=event_emitter)
first_calls = []
- first_handler = lambda **kwargs: first_calls.append(kwargs)
+
+ def first_handler(**kwargs):
+ return first_calls.append(kwargs)
second_calls = []
- second_handler = lambda **kwargs: second_calls.append(kwargs)
+
+ def second_handler(**kwargs):
+ return second_calls.append(kwargs)
first_client = creator.create_client(
'myservice', 'us-west-2', credentials=self.credentials
@@ -1339,7 +1346,10 @@ def test_clients_inherit_handlers_from_session(self):
# So if an event handler is registered before any clients are created:
base_calls = []
- base_handler = lambda **kwargs: base_calls.append(kwargs)
+
+ def base_handler(**kwargs):
+ return base_calls.append(kwargs)
+
event_emitter.register('before-call', base_handler)
# Then any client created from this point forward from the
@@ -1371,7 +1381,10 @@ def test_clients_inherit_only_at_create_time(self):
# 2. Now register an event handler from the originating event emitter.
base_calls = []
- base_handler = lambda **kwargs: base_calls.append(kwargs)
+
+ def base_handler(**kwargs):
+ return base_calls.append(kwargs)
+
event_emitter.register('before-call', base_handler)
# 3. The client will _not_ see this because it already has its
diff --git a/tests/unit/test_credentials.py b/tests/unit/test_credentials.py
index c0480c856c..3017444a6f 100644
--- a/tests/unit/test_credentials.py
+++ b/tests/unit/test_credentials.py
@@ -2111,9 +2111,9 @@ def test_cache_key_is_windows_safe(self):
},
}
cache = {}
- self.fake_config['profiles']['development'][
- 'role_arn'
- ] = 'arn:aws:iam::foo-role'
+ self.fake_config['profiles']['development']['role_arn'] = (
+ 'arn:aws:iam::foo-role'
+ )
client_creator = self.create_client_creator(with_response=response)
provider = credentials.AssumeRoleProvider(
@@ -2140,12 +2140,12 @@ def test_cache_key_with_role_session_name(self):
},
}
cache = {}
- self.fake_config['profiles']['development'][
- 'role_arn'
- ] = 'arn:aws:iam::foo-role'
- self.fake_config['profiles']['development'][
- 'role_session_name'
- ] = 'foo_role_session_name'
+ self.fake_config['profiles']['development']['role_arn'] = (
+ 'arn:aws:iam::foo-role'
+ )
+ self.fake_config['profiles']['development']['role_session_name'] = (
+ 'foo_role_session_name'
+ )
client_creator = self.create_client_creator(with_response=response)
provider = credentials.AssumeRoleProvider(
@@ -2277,9 +2277,9 @@ def test_assume_role_with_duration(self):
)
def test_assume_role_with_bad_duration(self):
- self.fake_config['profiles']['development'][
- 'duration_seconds'
- ] = 'garbage value'
+ self.fake_config['profiles']['development']['duration_seconds'] = (
+ 'garbage value'
+ )
response = {
'Credentials': {
'AccessKeyId': 'foo',
@@ -2780,9 +2780,9 @@ def __init__(self, profile_name):
def load(self):
return Credentials(
- '%s-access-key' % self._profile_name,
- '%s-secret-key' % self._profile_name,
- '%s-token' % self._profile_name,
+ f'{self._profile_name}-access-key',
+ f'{self._profile_name}-secret-key',
+ f'{self._profile_name}-token',
self.METHOD,
)
diff --git a/tests/unit/test_endpoint_provider.py b/tests/unit/test_endpoint_provider.py
index 51a07079bc..c1f82ace2e 100644
--- a/tests/unit/test_endpoint_provider.py
+++ b/tests/unit/test_endpoint_provider.py
@@ -249,7 +249,6 @@ def test_invalid_arn_returns_none(rule_lib):
"service": "s3",
"region": "",
"accountId": "",
- "region": "",
"resourceId": ["myBucket"],
},
),
diff --git a/tests/unit/test_eventstream.py b/tests/unit/test_eventstream.py
index c83811b3bb..a683e37e7f 100644
--- a/tests/unit/test_eventstream.py
+++ b/tests/unit/test_eventstream.py
@@ -10,7 +10,8 @@
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
-"""Unit tests for the binary event stream decoder. """
+"""Unit tests for the binary event stream decoder."""
+
import pytest
from botocore.eventstream import (
@@ -214,7 +215,7 @@
CORRUPTED_HEADER_LENGTH = (
(
- b"\x00\x00\x00=\xFF\x00\x01\x02\x07\xfd\x83\x96\x0ccontent-type\x07\x00"
+ b"\x00\x00\x00=\xff\x00\x01\x02\x07\xfd\x83\x96\x0ccontent-type\x07\x00"
b"\x10application/json{'foo':'bar'}\x8d\x9c\x08\xb1"
),
InvalidHeadersLength,
@@ -375,37 +376,37 @@ def test_message_to_response_dict_error():
def test_unpack_uint8():
- (value, bytes_consumed) = DecodeUtils.unpack_uint8(b'\xDE')
+ (value, bytes_consumed) = DecodeUtils.unpack_uint8(b'\xde')
assert bytes_consumed == 1
assert value == 0xDE
def test_unpack_uint32():
- (value, bytes_consumed) = DecodeUtils.unpack_uint32(b'\xDE\xAD\xBE\xEF')
+ (value, bytes_consumed) = DecodeUtils.unpack_uint32(b'\xde\xad\xbe\xef')
assert bytes_consumed == 4
assert value == 0xDEADBEEF
def test_unpack_int8():
- (value, bytes_consumed) = DecodeUtils.unpack_int8(b'\xFE')
+ (value, bytes_consumed) = DecodeUtils.unpack_int8(b'\xfe')
assert bytes_consumed == 1
assert value == -2
def test_unpack_int16():
- (value, bytes_consumed) = DecodeUtils.unpack_int16(b'\xFF\xFE')
+ (value, bytes_consumed) = DecodeUtils.unpack_int16(b'\xff\xfe')
assert bytes_consumed == 2
assert value == -2
def test_unpack_int32():
- (value, bytes_consumed) = DecodeUtils.unpack_int32(b'\xFF\xFF\xFF\xFE')
+ (value, bytes_consumed) = DecodeUtils.unpack_int32(b'\xff\xff\xff\xfe')
assert bytes_consumed == 4
assert value == -2
def test_unpack_int64():
- test_bytes = b'\xFF\xFF\xFF\xFF\xFF\xFF\xFF\xFE'
+ test_bytes = b'\xff\xff\xff\xff\xff\xff\xff\xfe'
(value, bytes_consumed) = DecodeUtils.unpack_int64(test_bytes)
assert bytes_consumed == 8
assert value == -2
diff --git a/tests/unit/test_exceptions.py b/tests/unit/test_exceptions.py
index ad0223dac9..4d1c84c802 100644
--- a/tests/unit/test_exceptions.py
+++ b/tests/unit/test_exceptions.py
@@ -48,8 +48,7 @@ def test_retry_info_added_when_present():
error_msg = str(exceptions.ClientError(response, 'operation'))
if '(reached max retries: 3)' not in error_msg:
raise AssertionError(
- "retry information not inject into error "
- "message: %s" % error_msg
+ "retry information not inject into error " f"message: {error_msg}"
)
@@ -67,7 +66,7 @@ def test_retry_info_not_added_if_retry_attempts_not_present():
raise AssertionError(
"Retry information should not be in exception "
"message when retry attempts not in response "
- "metadata: %s" % error_msg
+ f"metadata: {error_msg}"
)
@@ -84,7 +83,7 @@ def test_can_handle_when_response_missing_error_key():
if 'An error occurred (Unknown)' not in str(e):
raise AssertionError(
"Error code should default to 'Unknown' "
- "when missing error response, instead got: %s" % str(e)
+ f"when missing error response, instead got: {str(e)}"
)
diff --git a/tests/unit/test_handlers.py b/tests/unit/test_handlers.py
index 2701c32efe..44ebbecc3f 100644
--- a/tests/unit/test_handlers.py
+++ b/tests/unit/test_handlers.py
@@ -767,7 +767,7 @@ def test_switch_host_with_param(self):
request = AWSRequest()
url = 'https://machinelearning.us-east-1.amazonaws.com'
new_endpoint = 'https://my-custom-endpoint.amazonaws.com'
- data = '{"PredictEndpoint":"%s"}' % new_endpoint
+ data = f'{{"PredictEndpoint":"{new_endpoint}"}}'
request.data = data.encode('utf-8')
request.url = url
handlers.switch_host_with_param(request, 'PredictEndpoint')
@@ -822,7 +822,7 @@ def test_validation_is_s3_accesspoint_arn(self):
arn = 'arn:aws:s3:us-west-2:123456789012:accesspoint:endpoint'
handlers.validate_bucket_name({'Bucket': arn})
except ParamValidationError:
- self.fail('The s3 arn: %s should pass validation' % arn)
+ self.fail(f'The s3 arn: {arn} should pass validation')
def test_validation_is_s3_outpost_arn(self):
try:
@@ -832,7 +832,7 @@ def test_validation_is_s3_outpost_arn(self):
)
handlers.validate_bucket_name({'Bucket': arn})
except ParamValidationError:
- self.fail('The s3 arn: %s should pass validation' % arn)
+ self.fail(f'The s3 arn: {arn} should pass validation')
def test_validation_is_global_s3_bucket_arn(self):
with self.assertRaises(ParamValidationError):
@@ -1213,7 +1213,7 @@ def test_sse_params(self):
'UploadPartCopy',
'SelectObjectContent',
):
- event = 'before-parameter-build.s3.%s' % op
+ event = f'before-parameter-build.s3.{op}'
params = {
'SSECustomerKey': b'bar',
'SSECustomerAlgorithm': 'AES256',
@@ -1235,7 +1235,7 @@ def test_sse_params_as_str(self):
def test_copy_source_sse_params(self):
for op in ['CopyObject', 'UploadPartCopy']:
- event = 'before-parameter-build.s3.%s' % op
+ event = f'before-parameter-build.s3.{op}'
params = {
'CopySourceSSECustomerKey': b'bar',
'CopySourceSSECustomerAlgorithm': 'AES256',
@@ -1619,10 +1619,10 @@ def test_does_validate_host_with_illegal_char(self):
(
{
'AWS_LAMBDA_FUNCTION_NAME': 'foo',
- '_X_AMZN_TRACE_ID': 'test123-=;:+&[]{}\"\'',
+ '_X_AMZN_TRACE_ID': 'test123-=;:+&[]{}"\'',
},
{},
- {'X-Amzn-Trace-Id': 'test123-=;:+&[]{}\"\''},
+ {'X-Amzn-Trace-Id': 'test123-=;:+&[]{}"\''},
),
],
)
diff --git a/tests/unit/test_hooks.py b/tests/unit/test_hooks.py
index ea46ae33f5..5b085e5b22 100644
--- a/tests/unit/test_hooks.py
+++ b/tests/unit/test_hooks.py
@@ -262,7 +262,7 @@ def assert_hook_is_called_given_event(self, event):
self.emitter.emit(event)
after = len(self.hook_calls)
if not after > starting:
- self.fail("Handler was not called for event: %s" % event)
+ self.fail(f"Handler was not called for event: {event}")
self.assertEqual(self.hook_calls[-1]['event_name'], event)
def assert_hook_is_not_called_given_event(self, event):
@@ -272,8 +272,7 @@ def assert_hook_is_not_called_given_event(self, event):
if not after == starting:
self.fail(
"Handler was called for event but was not "
- "suppose to be called: %s, last_event: %s"
- % (event, self.hook_calls[-1])
+ f"suppose to be called: {event}, last_event: {self.hook_calls[-1]}"
)
def test_one_level_wildcard_handler(self):
@@ -399,7 +398,9 @@ def test_register_with_unique_id(self):
self.assertEqual(len(self.hook_calls), 0)
def test_remove_handler_with_unique_id(self):
- hook2 = lambda **kwargs: self.hook_calls.append(kwargs)
+ def hook2(**kwargs):
+ return self.hook_calls.append(kwargs)
+
self.emitter.register('foo.bar.baz', self.hook, unique_id='foo')
self.emitter.register('foo.bar.baz', hook2)
self.emitter.emit('foo.bar.baz')
diff --git a/tests/unit/test_http_session.py b/tests/unit/test_http_session.py
index 9e059ad5a8..8fb081e1ab 100644
--- a/tests/unit/test_http_session.py
+++ b/tests/unit/test_http_session.py
@@ -505,7 +505,7 @@ def test_close_proxied(self):
proxies = {'https': 'http://proxy.com', 'http': 'http://proxy2.com'}
session = URLLib3Session(proxies=proxies)
for proxy, proxy_url in proxies.items():
- self.request.url = '%s://example.com/' % proxy
+ self.request.url = f'{proxy}://example.com/'
session.send(self.request.prepare())
session.close()
diff --git a/tests/unit/test_protocols.py b/tests/unit/test_protocols.py
index a2ff077faa..253ec5f7a5 100644
--- a/tests/unit/test_protocols.py
+++ b/tests/unit/test_protocols.py
@@ -50,6 +50,7 @@
BOTOCORE_TEST_ID=5:1 pytest test/unit/test_protocols.py
"""
+
import copy
import os
from base64 import b64decode
@@ -136,7 +137,7 @@ def test_input_compliance(json_description, case, basename):
try:
protocol_serializer = PROTOCOL_SERIALIZERS[protocol_type]
except KeyError:
- raise RuntimeError("Unknown protocol: %s" % protocol_type)
+ raise RuntimeError(f"Unknown protocol: {protocol_type}")
serializer = protocol_serializer()
serializer.MAP_TYPE = OrderedDict
operation_model = OperationModel(case['given'], model)
@@ -155,7 +156,7 @@ def _assert_request_body_is_bytes(body):
if not isinstance(body, bytes):
raise AssertionError(
"Expected body to be serialized as type "
- "bytes(), instead got: %s" % type(body)
+ f"bytes(), instead got: {type(body)}"
)
@@ -217,10 +218,9 @@ def test_output_compliance(json_description, case, basename):
parsed = _fixup_parsed_result(parsed)
except Exception as e:
msg = (
- "\nFailed to run test : %s\n"
- "Protocol : %s\n"
- "Description : %s (%s:%s)\n"
- % (
+ "\nFailed to run test : {}\n"
+ "Protocol : {}\n"
+ "Description : {} ({}:{})\n".format(
e,
model.metadata['protocol'],
case['description'],
@@ -312,14 +312,13 @@ def _output_failure_message(
):
j = _try_json_dump
error_message = (
- "\nDescription : %s (%s:%s)\n"
- "Protocol: : %s\n"
- "Given : %s\n"
- "Response : %s\n"
- "Expected serialization: %s\n"
- "Actual serialization : %s\n"
- "Assertion message : %s\n"
- % (
+ "\nDescription : {} ({}:{})\n"
+ "Protocol: : {}\n"
+ "Given : {}\n"
+ "Response : {}\n"
+ "Expected serialization: {}\n"
+ "Actual serialization : {}\n"
+ "Assertion message : {}\n".format(
case['description'],
case['suite_id'],
case['test_id'],
@@ -337,14 +336,13 @@ def _output_failure_message(
def _input_failure_message(protocol_type, case, actual_request, error):
j = _try_json_dump
error_message = (
- "\nDescription : %s (%s:%s)\n"
- "Protocol: : %s\n"
- "Given : %s\n"
- "Params : %s\n"
- "Expected serialization: %s\n"
- "Actual serialization : %s\n"
- "Assertion message : %s\n"
- % (
+ "\nDescription : {} ({}:{})\n"
+ "Protocol: : {}\n"
+ "Given : {}\n"
+ "Params : {}\n"
+ "Expected serialization: {}\n"
+ "Actual serialization : {}\n"
+ "Assertion message : {}\n".format(
case['description'],
case['suite_id'],
case['test_id'],
@@ -373,15 +371,9 @@ def assert_equal(first, second, prefix):
assert first == second
except Exception:
try:
- better = "{} (actual != expected)\n{} !=\n{}".format(
- prefix,
- json.dumps(first, indent=2),
- json.dumps(second, indent=2),
- )
+ better = f"{prefix} (actual != expected)\n{json.dumps(first, indent=2)} !=\n{json.dumps(second, indent=2)}"
except (ValueError, TypeError):
- better = "{} (actual != expected)\n{} !=\n{}".format(
- prefix, first, second
- )
+ better = f"{prefix} (actual != expected)\n{first} !=\n{second}"
raise AssertionError(better)
@@ -397,9 +389,9 @@ def _serialize_request_description(request_dict):
# test runner we need to handle the case where the url_path
# already has query params.
if '?' not in request_dict['url_path']:
- request_dict['url_path'] += '?%s' % encoded
+ request_dict['url_path'] += f'?{encoded}'
else:
- request_dict['url_path'] += '&%s' % encoded
+ request_dict['url_path'] += f'&{encoded}'
def _assert_requests_equal(actual, expected):
diff --git a/tests/unit/test_serialize.py b/tests/unit/test_serialize.py
index ca57d01af2..639523fd59 100644
--- a/tests/unit/test_serialize.py
+++ b/tests/unit/test_serialize.py
@@ -11,6 +11,7 @@
may result in a a coverage gap that would otherwise be untested.
"""
+
import base64
import datetime
import io
@@ -430,7 +431,7 @@ def test_instantiate_without_validation(self):
except ParamValidationError as e:
self.fail(
"Shouldn't fail serializing valid parameter without "
- "validation: {}".format(e)
+ f"validation: {e}"
)
try:
@@ -438,7 +439,7 @@ def test_instantiate_without_validation(self):
except ParamValidationError as e:
self.fail(
"Shouldn't fail serializing invalid parameter without "
- "validation: {}".format(e)
+ f"validation: {e}"
)
def test_instantiate_with_validation(self):
@@ -450,7 +451,7 @@ def test_instantiate_with_validation(self):
except ParamValidationError as e:
self.fail(
"Shouldn't fail serializing invalid parameter without "
- "validation: {}".format(e)
+ f"validation: {e}"
)
with self.assertRaises(ParamValidationError):
diff --git a/tests/unit/test_session.py b/tests/unit/test_session.py
index 597cd15dc6..d3eabbdf08 100644
--- a/tests/unit/test_session.py
+++ b/tests/unit/test_session.py
@@ -245,7 +245,10 @@ def test_path_not_in_available_profiles(self):
def test_emit_delegates_to_emitter(self):
calls = []
- handler = lambda **kwargs: calls.append(kwargs)
+
+ def handler(**kwargs):
+ return calls.append(kwargs)
+
self.session.register('foo', handler)
self.session.emit('foo')
self.assertEqual(len(calls), 1)
@@ -255,7 +258,10 @@ def test_emitter_can_be_passed_in(self):
events = HierarchicalEmitter()
session = create_session(event_hooks=events)
calls = []
- handler = lambda **kwargs: calls.append(kwargs)
+
+ def handler(**kwargs):
+ return calls.append(kwargs)
+
events.register('foo', handler)
session.emit('foo')
@@ -289,7 +295,10 @@ def test_general_purpose_logger(self, formatter, file_handler, get_logger):
def test_register_with_unique_id(self):
calls = []
- handler = lambda **kwargs: calls.append(kwargs)
+
+ def handler(**kwargs):
+ return calls.append(kwargs)
+
self.session.register('foo', handler, unique_id='bar')
self.session.emit('foo')
self.assertEqual(calls[0]['event_name'], 'foo')
@@ -711,7 +720,7 @@ def test_create_client_uses_api_version_from_config(self, client_creator):
f.write('[default]\n')
f.write(
'foo_api_versions =\n'
- ' myservice = %s\n' % config_api_version
+ f' myservice = {config_api_version}\n'
)
f.flush()
@@ -760,7 +769,7 @@ def test_param_api_version_overrides_config_value(self, client_creator):
f.write('[default]\n')
f.write(
'foo_api_versions =\n'
- ' myservice = %s\n' % config_api_version
+ f' myservice = {config_api_version}\n'
)
f.flush()
@@ -901,14 +910,20 @@ def test_last_registration_wins(self):
def test_can_lazy_register_a_component(self):
component = object()
- lazy = lambda: component
+
+ def lazy():
+ return component
+
self.components.lazy_register_component('foo', lazy)
self.assertIs(self.components.get_component('foo'), component)
def test_latest_registration_wins_even_if_lazy(self):
first = object()
second = object()
- lazy_second = lambda: second
+
+ def lazy_second():
+ return second
+
self.components.register_component('foo', first)
self.components.lazy_register_component('foo', lazy_second)
self.assertIs(self.components.get_component('foo'), second)
@@ -916,7 +931,10 @@ def test_latest_registration_wins_even_if_lazy(self):
def test_latest_registration_overrides_lazy(self):
first = object()
second = object()
- lazy_first = lambda: first
+
+ def lazy_first():
+ return first
+
self.components.lazy_register_component('foo', lazy_first)
self.components.register_component('foo', second)
self.assertIs(self.components.get_component('foo'), second)
@@ -996,6 +1014,8 @@ def init_hook(session):
self.assertEqual(call_args, [])
def test_unregister_hook_raises_value_error(self):
- not_registered = lambda session: None
+ def not_registered(session):
+ return None
+
with self.assertRaises(ValueError):
self.assertRaises(unregister_initializer(not_registered))
diff --git a/tests/unit/test_session_legacy.py b/tests/unit/test_session_legacy.py
index 5388ffa2d4..4ace6c8905 100644
--- a/tests/unit/test_session_legacy.py
+++ b/tests/unit/test_session_legacy.py
@@ -236,7 +236,10 @@ def test_path_not_in_available_profiles(self):
def test_emit_delegates_to_emitter(self):
calls = []
- handler = lambda **kwargs: calls.append(kwargs)
+
+ def handler(**kwargs):
+ return calls.append(kwargs)
+
self.session.register('foo', handler)
self.session.emit('foo')
self.assertEqual(len(calls), 1)
@@ -248,7 +251,10 @@ def test_emitter_can_be_passed_in(self):
session_vars=self.env_vars, event_hooks=events
)
calls = []
- handler = lambda **kwargs: calls.append(kwargs)
+
+ def handler(**kwargs):
+ return calls.append(kwargs)
+
events.register('foo', handler)
session.emit('foo')
@@ -283,7 +289,10 @@ def test_general_purpose_logger(self, formatter, file_handler, get_logger):
def test_register_with_unique_id(self):
calls = []
- handler = lambda **kwargs: calls.append(kwargs)
+
+ def handler(**kwargs):
+ return calls.append(kwargs)
+
self.session.register('foo', handler, unique_id='bar')
self.session.emit('foo')
self.assertEqual(calls[0]['event_name'], 'foo')
@@ -723,7 +732,7 @@ def test_create_client_uses_api_version_from_config(self, client_creator):
f.write('[default]\n')
f.write(
'foo_api_versions =\n'
- ' myservice = %s\n' % config_api_version
+ f' myservice = {config_api_version}\n'
)
f.flush()
@@ -774,7 +783,7 @@ def test_param_api_version_overrides_config_value(self, client_creator):
f.write('[default]\n')
f.write(
'foo_api_versions =\n'
- ' myservice = %s\n' % config_api_version
+ f' myservice = {config_api_version}\n'
)
f.flush()
@@ -842,14 +851,20 @@ def test_last_registration_wins(self):
def test_can_lazy_register_a_component(self):
component = object()
- lazy = lambda: component
+
+ def lazy():
+ return component
+
self.components.lazy_register_component('foo', lazy)
self.assertIs(self.components.get_component('foo'), component)
def test_latest_registration_wins_even_if_lazy(self):
first = object()
second = object()
- lazy_second = lambda: second
+
+ def lazy_second():
+ return second
+
self.components.register_component('foo', first)
self.components.lazy_register_component('foo', lazy_second)
self.assertIs(self.components.get_component('foo'), second)
@@ -857,7 +872,10 @@ def test_latest_registration_wins_even_if_lazy(self):
def test_latest_registration_overrides_lazy(self):
first = object()
second = object()
- lazy_first = lambda: first
+
+ def lazy_first():
+ return first
+
self.components.lazy_register_component('foo', lazy_first)
self.components.register_component('foo', second)
self.assertIs(self.components.get_component('foo'), second)
diff --git a/tests/unit/test_signers.py b/tests/unit/test_signers.py
index 92a3d5da3f..d538934593 100644
--- a/tests/unit/test_signers.py
+++ b/tests/unit/test_signers.py
@@ -996,7 +996,7 @@ def test_generate_presign_url_emits_is_presign_in_context(self):
self.assertTrue(
kwargs.get('context', {}).get('is_presign_request'),
'The context did not have is_presign_request set to True for '
- 'the following kwargs emitted: %s' % kwargs,
+ f'the following kwargs emitted: {kwargs}',
)
def test_context_param_from_event_handler_sent_to_endpoint_resolver(self):
diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py
index 32c9f336c3..240dd6b535 100644
--- a/tests/unit/test_utils.py
+++ b/tests/unit/test_utils.py
@@ -1308,25 +1308,25 @@ def test_default(self):
)
def test_client_name_with_amazon(self):
- self.service_description['metadata'][
- 'serviceFullName'
- ] = 'Amazon MyService'
+ self.service_description['metadata']['serviceFullName'] = (
+ 'Amazon MyService'
+ )
self.assertEqual(
get_service_module_name(self.service_model), 'MyService'
)
def test_client_name_using_abreviation(self):
- self.service_description['metadata'][
- 'serviceAbbreviation'
- ] = 'Abbreviation'
+ self.service_description['metadata']['serviceAbbreviation'] = (
+ 'Abbreviation'
+ )
self.assertEqual(
get_service_module_name(self.service_model), 'Abbreviation'
)
def test_client_name_with_non_alphabet_characters(self):
- self.service_description['metadata'][
- 'serviceFullName'
- ] = 'Amazon My-Service'
+ self.service_description['metadata']['serviceFullName'] = (
+ 'Amazon My-Service'
+ )
self.assertEqual(
get_service_module_name(self.service_model), 'MyService'
)
@@ -2199,9 +2199,9 @@ def get_s3_request(
if bucket:
url += bucket
if key:
- url += '/%s' % key
+ url += f'/{key}'
if querystring:
- url += '?%s' % querystring
+ url += f'?{querystring}'
return AWSRequest(method='GET', headers={}, url=url)
def get_s3_outpost_request(self, **s3_request_kwargs):
@@ -2268,65 +2268,60 @@ def test_register(self):
def test_outpost_endpoint(self):
request = self.get_s3_outpost_request()
self.call_set_endpoint(self.endpoint_setter, request=request)
- expected_url = 'https://{}-{}.{}.s3-outposts.{}.amazonaws.com/'.format(
- self.accesspoint_name,
- self.account,
- self.outpost_name,
- self.region_name,
+ outpost_prefix = (
+ f'{self.accesspoint_name}-{self.account}.{self.outpost_name}'
)
+ dns_suffix = f'{self.region_name}.amazonaws.com'
+ expected_url = f'https://{outpost_prefix}.s3-outposts.{dns_suffix}/'
self.assertEqual(request.url, expected_url)
def test_outpost_endpoint_preserves_key_in_path(self):
request = self.get_s3_outpost_request(key=self.key)
self.call_set_endpoint(self.endpoint_setter, request=request)
+ outpost_prefix = (
+ f'{self.accesspoint_name}-{self.account}.{self.outpost_name}'
+ )
+ dns_suffix = f'{self.region_name}.amazonaws.com'
expected_url = (
- 'https://{}-{}.{}.s3-outposts.{}.amazonaws.com/{}'.format(
- self.accesspoint_name,
- self.account,
- self.outpost_name,
- self.region_name,
- self.key,
- )
+ f'https://{outpost_prefix}.s3-outposts.{dns_suffix}/{self.key}'
)
self.assertEqual(request.url, expected_url)
def test_accesspoint_endpoint(self):
request = self.get_s3_accesspoint_request()
self.call_set_endpoint(self.endpoint_setter, request=request)
- expected_url = 'https://{}-{}.s3-accesspoint.{}.amazonaws.com/'.format(
- self.accesspoint_name, self.account, self.region_name
+ accesspoint_prefix = f'{self.accesspoint_name}-{self.account}'
+ dns_suffix = f'{self.region_name}.amazonaws.com'
+ expected_url = (
+ f'https://{accesspoint_prefix}.s3-accesspoint.{dns_suffix}/'
)
self.assertEqual(request.url, expected_url)
def test_accesspoint_preserves_key_in_path(self):
request = self.get_s3_accesspoint_request(key=self.key)
self.call_set_endpoint(self.endpoint_setter, request=request)
- expected_url = (
- 'https://{}-{}.s3-accesspoint.{}.amazonaws.com/{}'.format(
- self.accesspoint_name, self.account, self.region_name, self.key
- )
- )
+ accesspoint_prefix = f'{self.accesspoint_name}-{self.account}'
+ dns_suffix = f'{self.region_name}.amazonaws.com'
+ expected_url = f'https://{accesspoint_prefix}.s3-accesspoint.{dns_suffix}/{self.key}'
self.assertEqual(request.url, expected_url)
def test_accesspoint_preserves_scheme(self):
request = self.get_s3_accesspoint_request(scheme='http://')
self.call_set_endpoint(self.endpoint_setter, request=request)
- expected_url = 'http://{}-{}.s3-accesspoint.{}.amazonaws.com/'.format(
- self.accesspoint_name,
- self.account,
- self.region_name,
+ accesspoint_prefix = f'{self.accesspoint_name}-{self.account}'
+ dns_suffix = f'{self.region_name}.amazonaws.com'
+ expected_url = (
+ f'http://{accesspoint_prefix}.s3-accesspoint.{dns_suffix}/'
)
self.assertEqual(request.url, expected_url)
def test_accesspoint_preserves_query_string(self):
request = self.get_s3_accesspoint_request(querystring='acl')
self.call_set_endpoint(self.endpoint_setter, request=request)
+ accesspoint_prefix = f'{self.accesspoint_name}-{self.account}'
+ dns_suffix = f'{self.region_name}.amazonaws.com'
expected_url = (
- 'https://{}-{}.s3-accesspoint.{}.amazonaws.com/?acl'.format(
- self.accesspoint_name,
- self.account,
- self.region_name,
- )
+ f'https://{accesspoint_prefix}.s3-accesspoint.{dns_suffix}/?acl'
)
self.assertEqual(request.url, expected_url)
@@ -2336,10 +2331,10 @@ def test_uses_resolved_dns_suffix(self):
}
request = self.get_s3_accesspoint_request()
self.call_set_endpoint(self.endpoint_setter, request=request)
- expected_url = 'https://{}-{}.s3-accesspoint.{}.mysuffix.com/'.format(
- self.accesspoint_name,
- self.account,
- self.region_name,
+ accesspoint_prefix = f'{self.accesspoint_name}-{self.account}'
+ dns_suffix = f'{self.region_name}.mysuffix.com'
+ expected_url = (
+ f'https://{accesspoint_prefix}.s3-accesspoint.{dns_suffix}/'
)
self.assertEqual(request.url, expected_url)
@@ -2350,10 +2345,10 @@ def test_uses_region_of_client_if_use_arn_disabled(self):
)
request = self.get_s3_accesspoint_request()
self.call_set_endpoint(self.endpoint_setter, request=request)
- expected_url = 'https://{}-{}.s3-accesspoint.{}.amazonaws.com/'.format(
- self.accesspoint_name,
- self.account,
- client_region,
+ accesspoint_prefix = f'{self.accesspoint_name}-{self.account}'
+ dns_suffix = f'{client_region}.amazonaws.com'
+ expected_url = (
+ f'https://{accesspoint_prefix}.s3-accesspoint.{dns_suffix}/'
)
self.assertEqual(request.url, expected_url)
@@ -2363,9 +2358,8 @@ def test_accesspoint_supports_custom_endpoint(self):
)
request = self.get_s3_accesspoint_request()
self.call_set_endpoint(endpoint_setter, request=request)
- expected_url = 'https://{}-{}.custom.com/'.format(
- self.accesspoint_name,
- self.account,
+ expected_url = (
+ f'https://{self.accesspoint_name}-{self.account}.custom.com/'
)
self.assertEqual(request.url, expected_url)
@@ -2395,8 +2389,8 @@ def test_set_endpoint_for_auto(self):
)
request = self.get_s3_request(self.bucket, self.key)
self.call_set_endpoint(endpoint_setter, request)
- expected_url = 'https://{}.s3.us-west-2.amazonaws.com/{}'.format(
- self.bucket, self.key
+ expected_url = (
+ f'https://{self.bucket}.s3.us-west-2.amazonaws.com/{self.key}'
)
self.assertEqual(request.url, expected_url)
@@ -2406,8 +2400,8 @@ def test_set_endpoint_for_virtual(self):
)
request = self.get_s3_request(self.bucket, self.key)
self.call_set_endpoint(endpoint_setter, request)
- expected_url = 'https://{}.s3.us-west-2.amazonaws.com/{}'.format(
- self.bucket, self.key
+ expected_url = (
+ f'https://{self.bucket}.s3.us-west-2.amazonaws.com/{self.key}'
)
self.assertEqual(request.url, expected_url)
@@ -2417,8 +2411,8 @@ def test_set_endpoint_for_path(self):
)
request = self.get_s3_request(self.bucket, self.key)
self.call_set_endpoint(endpoint_setter, request)
- expected_url = 'https://s3.us-west-2.amazonaws.com/{}/{}'.format(
- self.bucket, self.key
+ expected_url = (
+ f'https://s3.us-west-2.amazonaws.com/{self.bucket}/{self.key}'
)
self.assertEqual(request.url, expected_url)
@@ -2428,8 +2422,8 @@ def test_set_endpoint_for_accelerate(self):
)
request = self.get_s3_request(self.bucket, self.key)
self.call_set_endpoint(endpoint_setter, request)
- expected_url = 'https://{}.s3-accelerate.amazonaws.com/{}'.format(
- self.bucket, self.key
+ expected_url = (
+ f'https://{self.bucket}.s3-accelerate.amazonaws.com/{self.key}'
)
self.assertEqual(request.url, expected_url)