diff --git a/.circleci/config.yml b/.circleci/config.yml index 0ccc4a6d..159035c2 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -16,7 +16,7 @@ jobs: pip-dependency-file: dev_requirements.txt - run: tox -- -v test/unit integration-sqlserver: &sqlserver - docker: &msodbc_py_&_sqlserver + docker: - image: *docker_image - image: mcr.microsoft.com/mssql/server:2019-latest environment: @@ -47,7 +47,7 @@ jobs: - run: name: wait for SQL Server container to set up command: sleep 30 - - run: &prep=connect + - run: &prep-connect name: prep for connecting command: | mkdir -p ~/.dbt @@ -79,7 +79,7 @@ jobs: - python/install-packages: *install_dev - run: *install-dbt-sqlserver - azure-cli/install - - run: *prep=connect + - run: *prep-connect - azure-cli/login-with-service-principal: azure-sp: DBT_AZURE_SP_NAME azure-sp-password: DBT_AZURE_SP_SECRET diff --git a/.circleci/wakeup_azure.py b/.circleci/wakeup_azure.py index 4977a8bb..6cade4d6 100755 --- a/.circleci/wakeup_azure.py +++ b/.circleci/wakeup_azure.py @@ -16,11 +16,17 @@ async def resume_azsql() -> bool: try: client = SqlManagementClient(credential=credential, subscription_id=subscription_id) - db = await client.databases.get(resource_group_name=resource_group_name, server_name=sql_server_name, - database_name=database_name) + db = await client.databases.get( + resource_group_name=resource_group_name, + server_name=sql_server_name, + database_name=database_name, + ) if db.status == "Paused": - res = await client.databases.begin_resume(resource_group_name=resource_group_name, - server_name=sql_server_name, database_name=database_name) + res = await client.databases.begin_resume( + resource_group_name=resource_group_name, + server_name=sql_server_name, + database_name=database_name, + ) print("Resuming SQL Database") await res.wait() elif db.status in ("Pausing", "Resuming"): @@ -40,7 +46,7 @@ async def main(): await main() -if __name__ == '__main__': +if __name__ == "__main__": loop = asyncio.get_event_loop() loop.run_until_complete(main()) loop.close() diff --git a/.github/workflows/publish-docker.yml b/.github/workflows/publish-docker.yml index e857eb32..fe5b36d4 100644 --- a/.github/workflows/publish-docker.yml +++ b/.github/workflows/publish-docker.yml @@ -1,6 +1,6 @@ --- name: Publish Docker image for CI/CD -on: +on: # yamllint disable-line rule:truthy push: tags: - 'docker-*' diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..6c59896b --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,96 @@ +default_language_version: + python: python3.9 +repos: + - repo: 'https://github.com/pre-commit/pre-commit-hooks' + rev: v4.2.0 + hooks: + - id: check-yaml + args: + - '--unsafe' + - id: check-json + - id: end-of-file-fixer + - id: trailing-whitespace + exclude_types: + - markdown + - id: check-case-conflict + - id: check-ast + - id: check-builtin-literals + - id: check-merge-conflict + - id: no-commit-to-branch + - id: fix-byte-order-marker + - id: mixed-line-ending + - id: check-docstring-first + - repo: 'https://github.com/adrienverge/yamllint' + rev: v1.26.3 + hooks: + - id: yamllint + args: + - '-d {extends: default, rules: {line-length: disable, document-start: disable}}' + - '-s' + - repo: 'https://github.com/MarcoGorelli/absolufy-imports' + rev: v0.3.1 + hooks: + - id: absolufy-imports + - repo: 'https://github.com/hadialqattan/pycln' + rev: v1.2.5 + hooks: + - id: pycln + args: + - '--all' + - repo: 'https://github.com/pycqa/isort' + rev: 5.10.1 + hooks: + - id: isort + args: + - '--profile' + - black + - '--atomic' + - '--line-length' + - '99' + - '--python-version' + - '39' + - repo: 'https://github.com/psf/black' + rev: 22.3.0 + hooks: + - id: black + args: + - '--line-length=99' + - '--target-version=py39' + - id: black + alias: black-check + stages: + - manual + args: + - '--line-length=99' + - '--target-version=py39' + - '--check' + - '--diff' + - repo: 'https://gitlab.com/pycqa/flake8' + rev: 3.9.2 + hooks: + - id: flake8 + args: + - '--max-line-length=99' + - id: flake8 + args: + - '--max-line-length=99' + alias: flake8-check + stages: + - manual + - repo: 'https://github.com/pre-commit/mirrors-mypy' + rev: v0.950 + hooks: + - id: mypy + args: + - '--show-error-codes' + - '--ignore-missing-imports' + files: '^dbt/adapters' + - id: mypy + alias: mypy-check + stages: + - manual + args: + - '--show-error-codes' + - '--pretty' + - '--ignore-missing-imports' + files: '^dbt/adapters' diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 4fbbce76..9a4090b4 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -10,11 +10,11 @@ pyenv virtualenv 3.9.12 dbt-sqlserver pyenv activate dbt-sqlserver ``` -Install the development dependencies: +Install the development dependencies and pre-commit and get information about possible make commands: ```shell -pip install -r devrequirements.txt -pip install -e . +make dev +make help ``` ## Testing diff --git a/MANIFEST.in b/MANIFEST.in index 78412d5b..cfbc714e 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1 +1 @@ -recursive-include dbt/include *.sql *.yml *.md \ No newline at end of file +recursive-include dbt/include *.sql *.yml *.md diff --git a/Makefile b/Makefile new file mode 100644 index 00000000..c2c0a297 --- /dev/null +++ b/Makefile @@ -0,0 +1,56 @@ +.DEFAULT_GOAL:=help + +.PHONY: dev +dev: ## Installs adapter in develop mode along with development depedencies + @\ + pip install -r dev_requirements.txt && pre-commit install + +.PHONY: mypy +mypy: ## Runs mypy against staged changes for static type checking. + @\ + pre-commit run --hook-stage manual mypy-check | grep -v "INFO" + +.PHONY: flake8 +flake8: ## Runs flake8 against staged changes to enforce style guide. + @\ + pre-commit run --hook-stage manual flake8-check | grep -v "INFO" + +.PHONY: black +black: ## Runs black against staged changes to enforce style guide. + @\ + pre-commit run --hook-stage manual black-check -v | grep -v "INFO" + +.PHONY: lint +lint: ## Runs flake8 and mypy code checks against staged changes. + @\ + pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \ + pre-commit run mypy-check --hook-stage manual | grep -v "INFO" + +.PHONY: linecheck +linecheck: ## Checks for all Python lines 100 characters or more + @\ + find dbt -type f -name "*.py" -exec grep -I -r -n '.\{100\}' {} \; + +.PHONY: unit +unit: ## Runs unit tests. + @\ + tox -- -v test/unit + +.PHONY: test +test: ## Runs unit tests and code checks against staged changes. + @\ + tox -- -v test/unit; \ + pre-commit run black-check --hook-stage manual | grep -v "INFO"; \ + pre-commit run flake8-check --hook-stage manual | grep -v "INFO"; \ + pre-commit run mypy-check --hook-stage manual | grep -v "INFO" + +.PHONY: clean + @echo "cleaning repo" + @git clean -f -X + +.PHONY: help +help: ## Show this help message. + @echo 'usage: make [target]' + @echo + @echo 'targets:' + @grep -E '^[7+a-zA-Z_-]+:.*?## .*$$' $(MAKEFILE_LIST) | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' diff --git a/__init__.py b/__init__.py deleted file mode 100644 index e69de29b..00000000 diff --git a/dbt/__init__.py b/dbt/__init__.py index 07acae52..8db66d3d 100644 --- a/dbt/__init__.py +++ b/dbt/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) \ No newline at end of file +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/dbt/adapters/__init__.py b/dbt/adapters/__init__.py index 07acae52..8db66d3d 100644 --- a/dbt/adapters/__init__.py +++ b/dbt/adapters/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) \ No newline at end of file +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/dbt/adapters/sqlserver/__init__.py b/dbt/adapters/sqlserver/__init__.py index c1b2c56b..d7ed4593 100644 --- a/dbt/adapters/sqlserver/__init__.py +++ b/dbt/adapters/sqlserver/__init__.py @@ -1,13 +1,13 @@ -from dbt.adapters.sqlserver.connections import SQLServerConnectionManager -from dbt.adapters.sqlserver.connections import SQLServerCredentials -from dbt.adapters.sqlserver.impl import SQLServerAdapter - from dbt.adapters.base import AdapterPlugin -from dbt.include import sqlserver +from dbt.adapters.sqlserver.connections import SQLServerConnectionManager, SQLServerCredentials +from dbt.adapters.sqlserver.impl import SQLServerAdapter +from dbt.include import sqlserver Plugin = AdapterPlugin( adapter=SQLServerAdapter, credentials=SQLServerCredentials, include_path=sqlserver.PACKAGE_PATH, ) + +__all__ = ["Plugin", "SQLServerConnectionManager", "SQLServerAdapter", "SQLServerCredentials"] diff --git a/dbt/adapters/sqlserver/__version__.py b/dbt/adapters/sqlserver/__version__.py index 6c4856f0..a3b01520 100644 --- a/dbt/adapters/sqlserver/__version__.py +++ b/dbt/adapters/sqlserver/__version__.py @@ -1 +1 @@ -version = '1.0.0.dev0' +version = "1.0.0.dev0" diff --git a/dbt/adapters/sqlserver/connections.py b/dbt/adapters/sqlserver/connections.py index a66152a2..926394af 100644 --- a/dbt/adapters/sqlserver/connections.py +++ b/dbt/adapters/sqlserver/connections.py @@ -3,18 +3,17 @@ from contextlib import contextmanager from dataclasses import dataclass from itertools import chain, repeat -from typing import Callable, Dict, Mapping -from typing import Optional +from typing import Callable, Dict, Mapping, Optional import dbt.exceptions import pyodbc from azure.core.credentials import AccessToken from azure.identity import ( AzureCliCredential, - ManagedIdentityCredential, ClientSecretCredential, DefaultAzureCredential, EnvironmentCredential, + ManagedIdentityCredential, ) from dbt.adapters.base import Credentials from dbt.adapters.sql import SQLConnectionManager @@ -216,7 +215,7 @@ def get_sp_access_token(credentials: SQLServerCredentials) -> AccessToken: The access token. """ token = ClientSecretCredential( - credentials.tenant_id, credentials.client_id, credentials.client_secret + str(credentials.tenant_id), str(credentials.client_id), str(credentials.client_secret) ).get_token(AZURE_CREDENTIAL_SCOPE) return token @@ -253,11 +252,9 @@ def get_pyodbc_attrs_before(credentials: SQLServerCredentials) -> Dict: "environment": get_environment_access_token, } - authentication = credentials.authentication.lower() + authentication = str(credentials.authentication).lower() if authentication in azure_auth_functions: - time_remaining = ( - (_TOKEN.expires_on - time.time()) if _TOKEN else MAX_REMAINING_TIME - ) + time_remaining = (_TOKEN.expires_on - time.time()) if _TOKEN else MAX_REMAINING_TIME if _TOKEN is None or (time_remaining < MAX_REMAINING_TIME): azure_auth_function = azure_auth_functions[authentication] @@ -318,8 +315,9 @@ def open(cls, connection): con_str.append(f"DRIVER={{{credentials.driver}}}") if "\\" in credentials.host: - # if there is a backslash \ in the host name the host is a sql-server named instance - # in this case then port number has to be omitted + + # If there is a backslash \ in the host name, the host is a + # SQL Server named instance. In this case then port number has to be omitted. con_str.append(f"SERVER={credentials.host}") else: con_str.append(f"SERVER={credentials.host},{credentials.port}") @@ -338,7 +336,7 @@ def open(cls, connection): con_str.append(f"UID={{{credentials.UID}}}") elif getattr(credentials, "windows_login", False): - con_str.append(f"trusted_connection=yes") + con_str.append("trusted_connection=yes") elif type_auth == "sql": con_str.append(f"UID={{{credentials.UID}}}") con_str.append(f"PWD={{{credentials.PWD}}}") @@ -347,9 +345,9 @@ def open(cls, connection): # to learn more visit # https://docs.microsoft.com/en-us/sql/relational-databases/native-client/features/using-encryption-without-validation?view=sql-server-ver15 if getattr(credentials, "encrypt", False) is True: - con_str.append(f"Encrypt=Yes") + con_str.append("Encrypt=Yes") if getattr(credentials, "trust_cert", False) is True: - con_str.append(f"TrustServerCertificate=Yes") + con_str.append("TrustServerCertificate=Yes") plugin_version = __version__.version application_name = f"dbt-{credentials.type}/{plugin_version}" diff --git a/dbt/adapters/sqlserver/impl.py b/dbt/adapters/sqlserver/impl.py index 21acb806..1211d534 100644 --- a/dbt/adapters/sqlserver/impl.py +++ b/dbt/adapters/sqlserver/impl.py @@ -1,11 +1,10 @@ +from typing import List, Optional + +import agate +from dbt.adapters.base.relation import BaseRelation from dbt.adapters.sql import SQLAdapter + from dbt.adapters.sqlserver import SQLServerConnectionManager -from dbt.adapters.base.relation import BaseRelation -import agate -from typing import ( - Optional, Tuple, Callable, Iterable, Type, Dict, Any, List, Mapping, - Iterator, Union, Set -) class SQLServerAdapter(SQLAdapter): @@ -42,9 +41,7 @@ def convert_time_type(cls, agate_table, col_idx): return "datetime" # Methods used in adapter tests - def timestamp_add_sql( - self, add_to: str, number: int = 1, interval: str = "hour" - ) -> str: + def timestamp_add_sql(self, add_to: str, number: int = 1, interval: str = "hour") -> str: # note: 'interval' is not supported for T-SQL # for backwards compatibility, we're compelled to set some sort of # default. A lot of searching has lead me to believe that the @@ -53,19 +50,20 @@ def timestamp_add_sql( return f"DATEADD({interval},{number},{add_to})" def string_add_sql( - self, add_to: str, value: str, location='append', + self, + add_to: str, + value: str, + location="append", ) -> str: """ `+` is T-SQL's string concatenation operator """ - if location == 'append': + if location == "append": return f"{add_to} + '{value}'" - elif location == 'prepend': + elif location == "prepend": return f"'{value}' + {add_to}" else: - raise RuntimeException( - f'Got an unexpected location value of "{location}"' - ) + raise ValueError(f'Got an unexpected location value of "{location}"') def get_rows_different_sql( self, diff --git a/dbt/include/__init__.py b/dbt/include/__init__.py index 07acae52..8db66d3d 100644 --- a/dbt/include/__init__.py +++ b/dbt/include/__init__.py @@ -1 +1 @@ -__path__ = __import__("pkgutil").extend_path(__path__, __name__) \ No newline at end of file +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/dbt/include/sqlserver/dbt_project.yml b/dbt/include/sqlserver/dbt_project.yml index 08aa128f..8952ba41 100644 --- a/dbt/include/sqlserver/dbt_project.yml +++ b/dbt/include/sqlserver/dbt_project.yml @@ -1,4 +1,3 @@ - name: dbt_sqlserver version: 1.0 diff --git a/dbt/include/sqlserver/macros/adapters/columns.sql b/dbt/include/sqlserver/macros/adapters/columns.sql index df0f15e9..05d4ef25 100644 --- a/dbt/include/sqlserver/macros/adapters/columns.sql +++ b/dbt/include/sqlserver/macros/adapters/columns.sql @@ -60,4 +60,4 @@ {%- endcall -%} -{% endmacro %} \ No newline at end of file +{% endmacro %} diff --git a/dbt/include/sqlserver/macros/adapters/freshness.sql b/dbt/include/sqlserver/macros/adapters/freshness.sql index 60268d6e..84519e5b 100644 --- a/dbt/include/sqlserver/macros/adapters/freshness.sql +++ b/dbt/include/sqlserver/macros/adapters/freshness.sql @@ -1,3 +1,3 @@ {% macro sqlserver__current_timestamp() -%} SYSDATETIME() -{%- endmacro %} \ No newline at end of file +{%- endmacro %} diff --git a/dbt/include/sqlserver/macros/adapters/indexes.sql b/dbt/include/sqlserver/macros/adapters/indexes.sql index 9f385490..528f6f5c 100644 --- a/dbt/include/sqlserver/macros/adapters/indexes.sql +++ b/dbt/include/sqlserver/macros/adapters/indexes.sql @@ -132,9 +132,9 @@ select @drop_remaining_indexes_last = ( {% set idx_name = this.table + '__clustered_index_on_' + columns|join('_') %} -if not exists(select * from sys.indexes - where - name = '{{ idx_name }}' and +if not exists(select * from sys.indexes + where + name = '{{ idx_name }}' and object_id = OBJECT_ID('{{ this }}') ) begin @@ -156,9 +156,9 @@ end {% set idx_name = this.table + '__index_on_' + columns|join('_')|replace(" ", "_") %} -if not exists(select * from sys.indexes - where - name = '{{ idx_name }}' and +if not exists(select * from sys.indexes + where + name = '{{ idx_name }}' and object_id = OBJECT_ID('{{ this }}') ) begin diff --git a/dbt/include/sqlserver/macros/adapters/metadata.sql b/dbt/include/sqlserver/macros/adapters/metadata.sql index 8dee2beb..463e8199 100644 --- a/dbt/include/sqlserver/macros/adapters/metadata.sql +++ b/dbt/include/sqlserver/macros/adapters/metadata.sql @@ -87,4 +87,4 @@ where table_schema like '{{ schema_relation.schema }}' {% endcall %} {{ return(load_result('list_relations_without_caching').table) }} -{% endmacro %} \ No newline at end of file +{% endmacro %} diff --git a/dbt/include/sqlserver/macros/adapters/persist_docs.sql b/dbt/include/sqlserver/macros/adapters/persist_docs.sql index fb3f4fe8..8b3e4f90 100644 --- a/dbt/include/sqlserver/macros/adapters/persist_docs.sql +++ b/dbt/include/sqlserver/macros/adapters/persist_docs.sql @@ -1,4 +1,4 @@ {# we don't support "persist docs" today, but we'd like to! https://github.com/dbt-msft/dbt-sqlserver/issues/134 - #} \ No newline at end of file + #} diff --git a/dbt/include/sqlserver/macros/adapters/relation.sql b/dbt/include/sqlserver/macros/adapters/relation.sql index e327b670..de043c22 100644 --- a/dbt/include/sqlserver/macros/adapters/relation.sql +++ b/dbt/include/sqlserver/macros/adapters/relation.sql @@ -36,4 +36,4 @@ WHERE name='{{ from_relation.schema }}_{{ from_relation.identifier }}_cci' and object_id = OBJECT_ID('{{ from_relation.schema }}.{{ to_relation.identifier }}')) EXEC sp_rename N'{{ from_relation.schema }}.{{ to_relation.identifier }}.{{ from_relation.schema }}_{{ from_relation.identifier }}_cci', N'{{ from_relation.schema }}_{{ to_relation.identifier }}_cci', N'INDEX' {%- endcall %} -{% endmacro %} \ No newline at end of file +{% endmacro %} diff --git a/dbt/include/sqlserver/macros/adapters/schema.sql b/dbt/include/sqlserver/macros/adapters/schema.sql index d18d3a00..5403320e 100644 --- a/dbt/include/sqlserver/macros/adapters/schema.sql +++ b/dbt/include/sqlserver/macros/adapters/schema.sql @@ -29,4 +29,4 @@ {% endmacro %} -{# there is no drop_schema... why? #} \ No newline at end of file +{# there is no drop_schema... why? #} diff --git a/dbt/include/sqlserver/macros/materializations/models/table/create_table_as.sql b/dbt/include/sqlserver/macros/materializations/models/table/create_table_as.sql index dbf99531..53bf7221 100644 --- a/dbt/include/sqlserver/macros/materializations/models/table/create_table_as.sql +++ b/dbt/include/sqlserver/macros/materializations/models/table/create_table_as.sql @@ -18,7 +18,7 @@ {{ tmp_relation }} {{ sqlserver__drop_relation_script(tmp_relation) }} - + {% if not temporary and as_columnstore -%} {{ sqlserver__create_clustered_columnstore_index(relation) }} {% endif %} diff --git a/dbt/include/sqlserver/macros/materializations/seeds/helpers.sql b/dbt/include/sqlserver/macros/materializations/seeds/helpers.sql index 23035890..66bf829e 100644 --- a/dbt/include/sqlserver/macros/materializations/seeds/helpers.sql +++ b/dbt/include/sqlserver/macros/materializations/seeds/helpers.sql @@ -61,6 +61,6 @@ {% set max_batch_size = get_batch_size() %} {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %} {% set batch_size = calc_batch_size(cols_sql|length, max_batch_size) %} - + {{ return(basic_load_csv_rows(model, batch_size, agate_table) )}} {% endmacro %} diff --git a/dbt/include/sqlserver/macros/materializations/snapshots/snapshot.sql b/dbt/include/sqlserver/macros/materializations/snapshots/snapshot.sql index 9a94d7a5..a9faa8ba 100644 --- a/dbt/include/sqlserver/macros/materializations/snapshots/snapshot.sql +++ b/dbt/include/sqlserver/macros/materializations/snapshots/snapshot.sql @@ -12,4 +12,4 @@ alter table {{ relation }} add "{{ column.name }}" {{ column.data_type }}; {% endcall %} {% endfor %} -{% endmacro %} \ No newline at end of file +{% endmacro %} diff --git a/dbt/include/sqlserver/macros/materializations/snapshots/snapshot_merge.sql b/dbt/include/sqlserver/macros/materializations/snapshots/snapshot_merge.sql index 5c006f9f..ff27ae31 100644 --- a/dbt/include/sqlserver/macros/materializations/snapshots/snapshot_merge.sql +++ b/dbt/include/sqlserver/macros/materializations/snapshots/snapshot_merge.sql @@ -1,3 +1,3 @@ {% macro sqlserver__snapshot_merge_sql(target, source, insert_cols) %} {{ default__snapshot_merge_sql(target, source, insert_cols) }}; -{% endmacro %} \ No newline at end of file +{% endmacro %} diff --git a/dev_requirements.txt b/dev_requirements.txt index a65a3fe7..15d834b2 100644 --- a/dev_requirements.txt +++ b/dev_requirements.txt @@ -1,4 +1,3 @@ -black==22.3.0 pytest-dbt-adapter~=0.6.0 pytest==6.2.5 tox==3.25.0 @@ -7,3 +6,4 @@ azure-mgmt-sql==3.0.1 aiohttp==3.8.1 twine==4.0.0 wheel==0.37.1 +pre-commit==2.19.0 diff --git a/setup.py b/setup.py index c1417bfd..58f5b25c 100644 --- a/setup.py +++ b/setup.py @@ -1,13 +1,13 @@ #!/usr/bin/env python -from setuptools import find_namespace_packages, setup import os import re import sys +from setuptools import find_namespace_packages, setup from setuptools.command.install import install this_directory = os.path.abspath(os.path.dirname(__file__)) -with open(os.path.join(this_directory, 'README.md')) as f: +with open(os.path.join(this_directory, "README.md")) as f: long_description = f.read() package_name = "dbt-sqlserver" @@ -15,36 +15,35 @@ # get this from a separate file def _dbt_sqlserver_version(): - _version_path = os.path.join( - this_directory, 'dbt', 'adapters', 'sqlserver', '__version__.py' - ) - _version_pattern = r'''version\s*=\s*["'](.+)["']''' + _version_path = os.path.join(this_directory, "dbt", "adapters", "sqlserver", "__version__.py") + _version_pattern = r"""version\s*=\s*["'](.+)["']""" with open(_version_path) as f: match = re.search(_version_pattern, f.read().strip()) if match is None: - raise ValueError(f'invalid version at {_version_path}') + raise ValueError(f"invalid version at {_version_path}") return match.group(1) package_version = _dbt_sqlserver_version() description = """A sqlserver adapter plugin for dbt (data build tool)""" -dbt_version = '1.0' +dbt_version = "1.0" # the package version should be the dbt version, with maybe some things on the # ends of it. (0.18.1 vs 0.18.1a1, 0.18.1.1, ...) if not package_version.startswith(dbt_version): raise ValueError( - f'Invalid setup.py: package_version={package_version} must start with ' - f'dbt_version={dbt_version}' + f"Invalid setup.py: package_version={package_version} must start with " + f"dbt_version={dbt_version}" ) class VerifyVersionCommand(install): """Custom command to verify that the git tag matches our version""" - description = 'Verify that the git tag matches our version' + + description = "Verify that the git tag matches our version" def run(self): - tag = os.getenv('CIRCLE_TAG') + tag = os.getenv("CIRCLE_TAG") tag_without_prefix = tag[1:] if tag_without_prefix != package_version: @@ -63,7 +62,7 @@ def run(self): license="MIT", author="Mikael Ene, Anders Swanson, Sam Debruyn, Cor Zuurmond", url="https://github.com/dbt-msft/dbt-sqlserver", - packages=find_namespace_packages(include=['dbt', 'dbt.*']), + packages=find_namespace_packages(include=["dbt", "dbt.*"]), include_package_data=True, install_requires=[ "dbt-core~=1.0.0", @@ -71,7 +70,7 @@ def run(self): "azure-identity>=1.7.0", ], cmdclass={ - 'verify': VerifyVersionCommand, + "verify": VerifyVersionCommand, }, classifiers=[ "Development Status :: 5 - Production/Stable", diff --git a/test/integration/azuresql.dbtspec b/test/integration/azuresql.dbtspec index 7618751c..234197e3 100644 --- a/test/integration/azuresql.dbtspec +++ b/test/integration/azuresql.dbtspec @@ -19,7 +19,7 @@ projects: version: '1.0.0' models: dbt_test_project: - +as_columnstore: false + +as_columnstore: false - overrides: ephemeral dbt_project_yml: *override-project - overrides: incremental diff --git a/test/integration/dbt_project.yml b/test/integration/dbt_project.yml index f02f89c5..9ff304d2 100644 --- a/test/integration/dbt_project.yml +++ b/test/integration/dbt_project.yml @@ -1,6 +1,5 @@ - name: 'sqlserver_integration_tests' version: '1.0' config-version: 2 -profile: 'integration_tests' \ No newline at end of file +profile: 'integration_tests' diff --git a/test/integration/models/test.sql b/test/integration/models/test.sql index 90664768..d56da9a4 100644 --- a/test/integration/models/test.sql +++ b/test/integration/models/test.sql @@ -1,3 +1,3 @@ {# inane comment #} {% set col_name = 'foo' %} -SELECT 1 as {{ col_name }} \ No newline at end of file +SELECT 1 as {{ col_name }} diff --git a/test/integration/sample.profiles.yml b/test/integration/sample.profiles.yml index 426805f1..1bb97fc6 100644 --- a/test/integration/sample.profiles.yml +++ b/test/integration/sample.profiles.yml @@ -2,8 +2,8 @@ # You should __NEVER__ check credentials into version control. Thanks for reading :) config: - send_anonymous_usage_stats: False - use_colors: True + send_anonymous_usage_stats: false + use_colors: true defaults: basic: &basic @@ -22,8 +22,8 @@ defaults: <<: *basic host: "{{ env_var('DBT_AZURESQL_SERVER') }}" database: "{{ env_var('DBT_AZURESQL_DB') }}" - encrypt: yes - trust_cert: yes + encrypt: true + trust_cert: true integration_tests: target: sqlserver_local_userpass @@ -31,8 +31,8 @@ integration_tests: sqlserver_local_userpass: *basic-sqlserver sqlserver_local_encrypt: <<: *basic-sqlserver - encrypt: yes - trust_cert: yes + encrypt: true + trust_cert: true azuresql_sqlcred: <<: *azuresql-basic username: "{{ env_var('DBT_AZURESQL_UID') }}" diff --git a/test/unit/adapters/sqlserver/test_connections.py b/test/unit/adapters/sqlserver/test_connections.py index 3580581d..e6e042d1 100644 --- a/test/unit/adapters/sqlserver/test_connections.py +++ b/test/unit/adapters/sqlserver/test_connections.py @@ -7,8 +7,8 @@ from dbt.adapters.sqlserver import SQLServerCredentials, connections - -# See https://github.com/Azure/azure-sdk-for-python/blob/azure-identity_1.5.0/sdk/identity/azure-identity/tests/test_cli_credential.py +# See +# https://github.com/Azure/azure-sdk-for-python/blob/azure-identity_1.5.0/sdk/identity/azure-identity/tests/test_cli_credential.py CHECK_OUTPUT = AzureCliCredential.__module__ + ".subprocess.check_output" @@ -29,9 +29,9 @@ def mock_cli_access_token() -> str: expected_expires_on = 1602015811 successful_output = json.dumps( { - "expiresOn": dt.datetime.fromtimestamp( - expected_expires_on - ).strftime("%Y-%m-%d %H:%M:%S.%f"), + "expiresOn": dt.datetime.fromtimestamp(expected_expires_on).strftime( + "%Y-%m-%d %H:%M:%S.%f" + ), "accessToken": access_token, "subscription": "some-guid", "tenant": "some-guid", @@ -48,7 +48,7 @@ def test_get_pyodbc_attrs_before_empty_dict_when_service_principal( When the authentication is set to sql we expect an empty attrs before. """ attrs_before = connections.get_pyodbc_attrs_before(credentials) - assert attrs_before == dict() + assert attrs_before == {} @pytest.mark.parametrize("authentication", ["CLI", "cli", "cLi"]) @@ -62,8 +62,6 @@ def test_get_pyodbc_attrs_before_contains_access_token_key_for_cli_authenticatio access token key. """ credentials.authentication = authentication - with mock.patch( - CHECK_OUTPUT, mock.Mock(return_value=mock_cli_access_token) - ): + with mock.patch(CHECK_OUTPUT, mock.Mock(return_value=mock_cli_access_token)): attrs_before = connections.get_pyodbc_attrs_before(credentials) assert 1256 in attrs_before.keys()