Skip to content

Commit

Permalink
Fixup string concatenations (#19099)
Browse files Browse the repository at this point in the history
  • Loading branch information
blag authored Oct 21, 2021
1 parent 9ad8d2e commit f5ad26d
Show file tree
Hide file tree
Showing 16 changed files with 121 additions and 114 deletions.
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -563,7 +563,7 @@ repos:
additional_dependencies:
- 'PyYAML==5.3.1'
- 'jsonschema==3.2.0'
- 'tabulate==0.8.7'
- 'tabulate==0.8.8'
- 'jsonpath-ng==1.5.3'
- 'rich==10.9.0'
- id: mermaid
Expand Down
7 changes: 4 additions & 3 deletions airflow/models/dagcode.py
Original file line number Diff line number Diff line change
Expand Up @@ -94,10 +94,11 @@ def bulk_sync_to_db(cls, filelocs: Iterable[str], session=None):
hashes_to_filelocs = {DagCode.dag_fileloc_hash(fileloc): fileloc for fileloc in filelocs}
message = ""
for fileloc in conflicting_filelocs:
filename = hashes_to_filelocs[DagCode.dag_fileloc_hash(fileloc)]
message += (
"Filename '{}' causes a hash collision in the "
+ "database with '{}'. Please rename the file."
).format(hashes_to_filelocs[DagCode.dag_fileloc_hash(fileloc)], fileloc)
f"Filename '{filename}' causes a hash collision in the "
f"database with '{fileloc}'. Please rename the file."
)
raise AirflowException(message)

existing_filelocs = {dag_code.fileloc for dag_code in existing_orm_dag_codes}
Expand Down
4 changes: 2 additions & 2 deletions airflow/models/taskinstance.py
Original file line number Diff line number Diff line change
Expand Up @@ -1267,8 +1267,8 @@ def _date_or_empty(self, attr: str):
def _log_state(self, lead_msg: str = ''):
self.log.info(
'%sMarking task as %s.'
+ ' dag_id=%s, task_id=%s,'
+ ' execution_date=%s, start_date=%s, end_date=%s',
' dag_id=%s, task_id=%s,'
' execution_date=%s, start_date=%s, end_date=%s',
lead_msg,
self.state.upper(),
self.dag_id,
Expand Down
8 changes: 4 additions & 4 deletions airflow/operators/google_api_to_s3_transfer.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,10 @@ class GoogleApiToS3Transfer(GoogleApiToS3Operator):

def __init__(self, **kwargs):
warnings.warn(
"""This class is deprecated.
Please use
`airflow.providers.amazon.aws.transfers."""
+ "google_api_to_s3_transfer.GoogleApiToS3Operator`.",
"This class is deprecated. "
"Please use "
"`airflow.providers.amazon.aws.transfers."
"google_api_to_s3_transfer.GoogleApiToS3Operator`.",
DeprecationWarning,
stacklevel=3,
)
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/alibaba/cloud/sensors/oss_key.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,8 @@ def poke(self, context):
if parsed_url.scheme != '' or parsed_url.netloc != '':
raise AirflowException(
'If bucket_name is provided, bucket_key'
+ ' should be relative path from root'
+ ' level, rather than a full oss:// url'
' should be relative path from root'
' level, rather than a full oss:// url'
)

self.log.info('Poking for key : oss://%s/%s', self.bucket_name, self.bucket_key)
Expand Down
8 changes: 4 additions & 4 deletions airflow/providers/amazon/aws/hooks/s3.py
Original file line number Diff line number Diff line change
Expand Up @@ -716,8 +716,8 @@ def copy_object(
if parsed_url.scheme != '' or parsed_url.netloc != '':
raise AirflowException(
'If dest_bucket_name is provided, '
+ 'dest_bucket_key should be relative path '
+ 'from root level, rather than a full s3:// url'
'dest_bucket_key should be relative path '
'from root level, rather than a full s3:// url'
)

if source_bucket_name is None:
Expand All @@ -727,8 +727,8 @@ def copy_object(
if parsed_url.scheme != '' or parsed_url.netloc != '':
raise AirflowException(
'If source_bucket_name is provided, '
+ 'source_bucket_key should be relative path '
+ 'from root level, rather than a full s3:// url'
'source_bucket_key should be relative path '
'from root level, rather than a full s3:// url'
)

copy_source = {'Bucket': source_bucket_name, 'Key': source_bucket_key, 'VersionId': source_version_id}
Expand Down
4 changes: 2 additions & 2 deletions airflow/providers/amazon/aws/sensors/s3_key.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,8 +89,8 @@ def poke(self, context):
if parsed_url.scheme != '' or parsed_url.netloc != '':
raise AirflowException(
'If bucket_name is provided, bucket_key'
+ ' should be relative path from root'
+ ' level, rather than a full s3:// url'
' should be relative path from root'
' level, rather than a full s3:// url'
)

self.log.info('Poking for key : s3://%s/%s', self.bucket_name, self.bucket_key)
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/databricks/hooks/databricks.py
Original file line number Diff line number Diff line change
Expand Up @@ -216,7 +216,7 @@ def _do_api_call(self, endpoint_info, json):

if attempt_num == self.retry_limit:
raise AirflowException(
('API requests to Databricks failed {} times. ' + 'Giving up.').format(self.retry_limit)
f'API requests to Databricks failed {self.retry_limit} times. Giving up.'
)

attempt_num += 1
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@
"/bin/bash",
"-c",
"/bin/sleep 30; "
"/bin/mv {{ params.source_location }}/" + f"{t_view.output}" + " {{ params.target_location }};"
"/bin/mv {{ params.source_location }}/" + str(t_view.output) + " {{ params.target_location }};"
"/bin/echo '{{ params.target_location }}/" + f"{t_view.output}';",
],
task_id="move_data",
Expand Down
2 changes: 1 addition & 1 deletion airflow/providers/yandex/hooks/yandex.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@ def _get_credentials(self) -> Dict[str, Any]:
if not (service_account_json or oauth_token or service_account_json_path):
raise AirflowException(
'No credentials are found in connection. Specify either service account '
+ 'authentication JSON or user OAuth token in Yandex.Cloud connection'
'authentication JSON or user OAuth token in Yandex.Cloud connection'
)
if service_account_json_path:
with open(service_account_json_path) as infile:
Expand Down
2 changes: 1 addition & 1 deletion airflow/sensors/smart_sensor.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,7 +125,7 @@ def _get_sensor_logger(self, si):
log_id = "-".join(
[si.dag_id, si.task_id, si.execution_date.strftime("%Y_%m_%dT%H_%M_%S_%f"), str(si.try_number)]
)
logger = logging.getLogger('airflow.task' + '.' + log_id)
logger = logging.getLogger(f'airflow.task.{log_id}')

if len(logger.handlers) == 0:
handler = self.create_new_task_handler()
Expand Down
2 changes: 1 addition & 1 deletion dev/provider_packages/prepare_provider_packages.py
Original file line number Diff line number Diff line change
Expand Up @@ -1238,7 +1238,7 @@ def validate_provider_info_with_runtime_schema(provider_info: Dict[str, Any]) ->
console.print("[red]Provider info not validated against runtime schema[/]")
raise Exception(
"Error when validating schema. The schema must be compatible with "
+ "airflow/provider_info.schema.json.",
"airflow/provider_info.schema.json.",
ex,
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@ def check_all_providers_are_listed_in_setup_py() -> None:
if provider_name not in ALL_PROVIDERS:
errors.append(
f"The provider {provider_name} is missing in setup.py "
+ "[bold]PROVIDERS_REQUIREMENTS[/]: [red]NOK[/]"
"[bold]PROVIDERS_REQUIREMENTS[/]: [red]NOK[/]"
)


Expand Down
61 changes: 32 additions & 29 deletions tests/providers/amazon/aws/utils/eks_test_constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -187,49 +187,52 @@ class PageCount:


FARGATE_PROFILE_UUID_PATTERN: str = (
"(?P<fargate_uuid>[-0-9a-z]{8}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{12})"
r"(?P<fargate_uuid>[-0-9a-z]{8}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{12})"
)
NODEGROUP_UUID_PATTERN: str = (
"(?P<nodegroup_uuid>[-0-9a-z]{8}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{12})"
r"(?P<nodegroup_uuid>[-0-9a-z]{8}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{4}-[-0-9a-z]{12})"
)


class RegExTemplates:
"""The compiled RegEx patterns used in testing."""

CLUSTER_ARN: Pattern = re.compile(
"arn:"
+ "(?P<partition>.+):"
+ "eks:"
+ "(?P<region>[-0-9a-zA-Z]+):"
+ "(?P<account_id>[0-9]{12}):"
+ "cluster/"
+ "(?P<cluster_name>.+)"
r"""arn:
(?P<partition>.+):
eks:
(?P<region>[-0-9a-zA-Z]+):
(?P<account_id>[0-9]{12}):
cluster/
(?P<cluster_name>.+)""",
re.VERBOSE,
)
FARGATE_PROFILE_ARN: Pattern = re.compile(
"arn:"
+ "(?P<partition>.+):"
+ "eks:"
+ "(?P<region>[-0-9a-zA-Z]+):"
+ "(?P<account_id>[0-9]{12}):"
+ "fargateprofile/"
+ "(?P<cluster_name>.+)/"
+ "(?P<fargate_name>.+)/"
+ FARGATE_PROFILE_UUID_PATTERN
r"""arn:
(?P<partition>.+):
eks:
(?P<region>[-0-9a-zA-Z]+):
(?P<account_id>[0-9]{12}):
fargateprofile/
(?P<cluster_name>.+)/
(?P<fargate_name>.+)/"""
+ FARGATE_PROFILE_UUID_PATTERN,
re.VERBOSE,
)
NODEGROUP_ARN: Pattern = re.compile(
"arn:"
+ "(?P<partition>.+):"
+ "eks:"
+ "(?P<region>[-0-9a-zA-Z]+):"
+ "(?P<account_id>[0-9]{12}):"
+ "nodegroup/"
+ "(?P<cluster_name>.+)/"
+ "(?P<nodegroup_name>.+)/"
+ NODEGROUP_UUID_PATTERN
r"""arn:
(?P<partition>.+):
eks:
(?P<region>[-0-9a-zA-Z]+):
(?P<account_id>[0-9]{12}):
nodegroup/
(?P<cluster_name>.+)/
(?P<nodegroup_name>.+)/"""
+ NODEGROUP_UUID_PATTERN,
re.VERBOSE,
)
NODEGROUP_ASG_NAME_PATTERN: Pattern = re.compile("eks-" + NODEGROUP_UUID_PATTERN)
NODEGROUP_SECURITY_GROUP_NAME_PATTERN: Pattern = re.compile("sg-" + "([-0-9a-z]{17})")
NODEGROUP_ASG_NAME_PATTERN: Pattern = re.compile(f"eks-{NODEGROUP_UUID_PATTERN}")
NODEGROUP_SECURITY_GROUP_NAME_PATTERN: Pattern = re.compile(r"sg-([-0-9a-z]{17})")


class MethodNames:
Expand Down
16 changes: 9 additions & 7 deletions tests/providers/apache/hive/operators/test_hive_stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -308,17 +308,19 @@ def test_runs_for_hive_stats(self, mock_hive_metastore_hook):
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)

select_count_query = (
"SELECT COUNT(*) AS __count FROM airflow."
+ "static_babynames_partitioned WHERE ds = '2015-01-01';"
"SELECT COUNT(*) AS __count "
"FROM airflow.static_babynames_partitioned "
"WHERE ds = '2015-01-01';"
)
mock_presto_hook.get_first.assert_called_with(hql=select_count_query)

expected_stats_select_query = (
"SELECT 1 FROM hive_stats WHERE table_name='airflow."
+ "static_babynames_partitioned' AND "
+ "partition_repr='{\"ds\": \"2015-01-01\"}' AND "
+ "dttm='2015-01-01T00:00:00+00:00' "
+ "LIMIT 1;"
"SELECT 1 "
"FROM hive_stats "
"WHERE table_name='airflow.static_babynames_partitioned' "
" AND partition_repr='{\"ds\": \"2015-01-01\"}' "
" AND dttm='2015-01-01T00:00:00+00:00' "
"LIMIT 1;"
)

raw_stats_select_query = mock_mysql_hook.get_records.call_args_list[0][0][0]
Expand Down
Loading

0 comments on commit f5ad26d

Please sign in to comment.