Skip to content

Commit

Permalink
Update mypy to latest and turn on everywhere (dbt-labs#5171)
Browse files Browse the repository at this point in the history
  • Loading branch information
iknox-fa authored and Axel Goblet committed May 20, 2022
1 parent b82beb6 commit 197050d
Show file tree
Hide file tree
Showing 26 changed files with 127 additions and 579 deletions.
7 changes: 7 additions & 0 deletions .changes/unreleased/Under the Hood-20220427-112127.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
kind: Under the Hood
body: Mypy -> 0.942 + fixed import logic to allow for full mypy coverage
time: 2022-04-27T11:21:27.499359-05:00
custom:
Author: iknox-fa
Issue: "4805"
PR: "5171"
5 changes: 3 additions & 2 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,9 +52,10 @@ jobs:
pip --version
pip install pre-commit
pre-commit --version
pip install mypy==0.782
pip install mypy==0.942
mypy --version
pip install -r editable-requirements.txt
pip install -r requirements.txt
pip install -r dev-requirements.txt
dbt --version
- name: Run pre-commit hooks
Expand Down
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ repos:
alias: flake8-check
stages: [manual]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.782
rev: v0.942
hooks:
- id: mypy
# N.B.: Mypy is... a bit fragile.
Expand Down
7 changes: 7 additions & 0 deletions core/dbt/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# N.B.
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
# The matching statement is in plugins/postgres/dbt/__init__.py

from pkgutil import extend_path

__path__ = extend_path(__path__, __name__)
7 changes: 7 additions & 0 deletions core/dbt/adapters/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
# N.B.
# This will add to the package’s __path__ all subdirectories of directories on sys.path named after the package which effectively combines both modules into a single namespace (dbt.adapters)
# The matching statement is in plugins/postgres/dbt/adapters/__init__.py

from pkgutil import extend_path

__path__ = extend_path(__path__, __name__)
2 changes: 0 additions & 2 deletions core/dbt/adapters/factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -140,8 +140,6 @@ def get_adapter_plugins(self, name: Optional[str]) -> List[AdapterPlugin]:
raise InternalException(f"No plugin found for {plugin_name}") from None
plugins.append(plugin)
seen.add(plugin_name)
if plugin.dependencies is None:
continue
for dep in plugin.dependencies:
if dep not in seen:
plugin_names.append(dep)
Expand Down
12 changes: 7 additions & 5 deletions core/dbt/adapters/protocol.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@
List,
Generic,
TypeVar,
ClassVar,
Tuple,
Union,
Dict,
Expand Down Expand Up @@ -88,10 +87,13 @@ class AdapterProtocol( # type: ignore[misc]
Compiler_T,
],
):
AdapterSpecificConfigs: ClassVar[Type[AdapterConfig_T]]
Column: ClassVar[Type[Column_T]]
Relation: ClassVar[Type[Relation_T]]
ConnectionManager: ClassVar[Type[ConnectionManager_T]]
# N.B. Technically these are ClassVars, but mypy doesn't support putting type vars in a
# ClassVar due to the restirctiveness of PEP-526
# See: https://github.com/python/mypy/issues/5144
AdapterSpecificConfigs: Type[AdapterConfig_T]
Column: Type[Column_T]
Relation: Type[Relation_T]
ConnectionManager: Type[ConnectionManager_T]
connections: ConnectionManager_T

def __init__(self, config: AdapterRequiredConfig):
Expand Down
4 changes: 2 additions & 2 deletions core/dbt/adapters/reference_keys.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
# this module exists to resolve circular imports with the events module

from collections import namedtuple
from typing import Optional
from typing import Any, Optional


_ReferenceKey = namedtuple("_ReferenceKey", "database schema identifier")
Expand All @@ -14,7 +14,7 @@ def lowercase(value: Optional[str]) -> Optional[str]:
return value.lower()


def _make_key(relation) -> _ReferenceKey:
def _make_key(relation: Any) -> _ReferenceKey:
"""Make _ReferenceKeys with lowercase values for the cache so we don't have
to keep track of quoting
"""
Expand Down
27 changes: 16 additions & 11 deletions core/dbt/clients/system.py
Original file line number Diff line number Diff line change
Expand Up @@ -246,16 +246,17 @@ def _supports_long_paths() -> bool:
# https://stackoverflow.com/a/35097999/11262881
# I don't know exaclty what he means, but I am inclined to believe him as
# he's pretty active on Python windows bugs!
try:
dll = WinDLL("ntdll")
except OSError: # I don't think this happens? you need ntdll to run python
return False
# not all windows versions have it at all
if not hasattr(dll, "RtlAreLongPathsEnabled"):
return False
# tell windows we want to get back a single unsigned byte (a bool).
dll.RtlAreLongPathsEnabled.restype = c_bool
return dll.RtlAreLongPathsEnabled()
else:
try:
dll = WinDLL("ntdll")
except OSError: # I don't think this happens? you need ntdll to run python
return False
# not all windows versions have it at all
if not hasattr(dll, "RtlAreLongPathsEnabled"):
return False
# tell windows we want to get back a single unsigned byte (a bool).
dll.RtlAreLongPathsEnabled.restype = c_bool
return dll.RtlAreLongPathsEnabled()


def convert_path(path: str) -> str:
Expand Down Expand Up @@ -443,7 +444,11 @@ def download_with_retries(
connection_exception_retry(download_fn, 5)


def download(url: str, path: str, timeout: Optional[Union[float, tuple]] = None) -> None:
def download(
url: str,
path: str,
timeout: Optional[Union[float, Tuple[float, float], Tuple[float, None]]] = None,
) -> None:
path = convert_path(path)
connection_timeout = timeout or float(os.getenv("DBT_HTTP_TIMEOUT", 10))
response = requests.get(url, timeout=connection_timeout)
Expand Down
5 changes: 1 addition & 4 deletions core/dbt/contracts/graph/parsed.py
Original file line number Diff line number Diff line change
Expand Up @@ -586,10 +586,7 @@ def quote_columns(self) -> Optional[bool]:

@property
def columns(self) -> Sequence[UnparsedColumn]:
if self.table.columns is None:
return []
else:
return self.table.columns
return [] if self.table.columns is None else self.table.columns

def get_tests(self) -> Iterator[Tuple[Dict[str, Any], Optional[UnparsedColumn]]]:
for test in self.tests:
Expand Down
8 changes: 2 additions & 6 deletions core/dbt/events/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -2421,9 +2421,7 @@ class GeneralWarningMsg(WarnLevel):
code: str = "Z046"

def message(self) -> str:
if self.log_fmt is not None:
return self.log_fmt.format(self.msg)
return self.msg
return self.log_fmt.format(self.msg) if self.log_fmt is not None else self.msg


@dataclass
Expand All @@ -2433,9 +2431,7 @@ class GeneralWarningException(WarnLevel):
code: str = "Z047"

def message(self) -> str:
if self.log_fmt is not None:
return self.log_fmt.format(str(self.exc))
return str(self.exc)
return self.log_fmt.format(str(self.exc)) if self.log_fmt is not None else str(self.exc)


@dataclass
Expand Down
4 changes: 2 additions & 2 deletions core/dbt/graph/selector_methods.py
Original file line number Diff line number Diff line change
Expand Up @@ -540,7 +540,7 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu
)

current_state_sources = {
result.unique_id: getattr(result, "max_loaded_at", None)
result.unique_id: getattr(result, "max_loaded_at", 0)
for result in self.previous_state.sources_current.results
if hasattr(result, "max_loaded_at")
}
Expand All @@ -552,7 +552,7 @@ def search(self, included_nodes: Set[UniqueId], selector: str) -> Iterator[Uniqu
}

previous_state_sources = {
result.unique_id: getattr(result, "max_loaded_at", None)
result.unique_id: getattr(result, "max_loaded_at", 0)
for result in self.previous_state.sources.results
if hasattr(result, "max_loaded_at")
}
Expand Down
2 changes: 0 additions & 2 deletions core/dbt/parser/manifest.py
Original file line number Diff line number Diff line change
Expand Up @@ -946,8 +946,6 @@ def _check_resource_uniqueness(
for resource, node in manifest.nodes.items():
if not node.is_relational:
continue
# appease mypy - sources aren't refable!
assert not isinstance(node, ParsedSourceDefinition)

name = node.name
# the full node name is really defined by the adapter's relation
Expand Down
6 changes: 3 additions & 3 deletions core/dbt/parser/sources.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ def construct_sources(self) -> None:
self.sources[unpatched.unique_id] = unpatched
continue
# returns None if there is no patch
patch = self.get_patch_for(unpatched)
patch = self.get_patch_for(unpatched) # type: ignore[unreachable] # CT-564 / GH 5169

# returns unpatched if there is no patch
patched = self.patch_source(unpatched, patch)
Expand Down Expand Up @@ -213,8 +213,8 @@ def get_patch_for(
self,
unpatched: UnpatchedSourceDefinition,
) -> Optional[SourcePatch]:
if isinstance(unpatched, ParsedSourceDefinition):
return None
if isinstance(unpatched, ParsedSourceDefinition): # type: ignore[unreachable] # CT-564 / GH 5169
return None # type: ignore[unreachable] # CT-564 / GH 5169
key = (unpatched.package_name, unpatched.source.name)
patch: Optional[SourcePatch] = self.manifest.source_patches.get(key)
if patch is None:
Expand Down
5 changes: 3 additions & 2 deletions core/dbt/profiler.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,11 @@
from contextlib import contextmanager
from cProfile import Profile
from pstats import Stats
from typing import Any, Generator


@contextmanager
def profiler(enable, outfile):
def profiler(enable: bool, outfile: str) -> Generator[Any, None, None]:
try:
if enable:
profiler = Profile()
Expand All @@ -16,4 +17,4 @@ def profiler(enable, outfile):
profiler.disable()
stats = Stats(profiler)
stats.sort_stats("tottime")
stats.dump_stats(outfile)
stats.dump_stats(str(outfile))
4 changes: 3 additions & 1 deletion core/dbt/selected_resources.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
from typing import Set, Any

SELECTED_RESOURCES = []


def set_selected_resources(selected_resources):
def set_selected_resources(selected_resources: Set[Any]) -> None:
global SELECTED_RESOURCES
SELECTED_RESOURCES = list(selected_resources)
10 changes: 5 additions & 5 deletions core/dbt/ui.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,28 +18,28 @@
COLOR_RESET_ALL = COLORS["reset_all"]


def color(text: str, color_code: str):
def color(text: str, color_code: str) -> str:
if flags.USE_COLORS:
return "{}{}{}".format(color_code, text, COLOR_RESET_ALL)
else:
return text


def printer_width():
def printer_width() -> int:
if flags.PRINTER_WIDTH:
return flags.PRINTER_WIDTH
return 80


def green(text: str):
def green(text: str) -> str:
return color(text, COLOR_FG_GREEN)


def yellow(text: str):
def yellow(text: str) -> str:
return color(text, COLOR_FG_YELLOW)


def red(text: str):
def red(text: str) -> str:
return color(text, COLOR_FG_RED)


Expand Down
9 changes: 9 additions & 0 deletions core/dbt/version.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import os
import glob
import json
from pathlib import Path
from typing import Iterator, List, Optional, Tuple

import requests
Expand Down Expand Up @@ -224,6 +225,14 @@ def _get_adapter_plugin_names() -> Iterator[str]:
# not be reporting plugin versions today
if spec is None or spec.submodule_search_locations is None:
return

# https://github.com/dbt-labs/dbt-core/pull/5171 changes how importing adapters works a bit and renders the previous discovery method useless for postgres.
# To solve this we manually add that path to the search path below.
# I don't like this solution. Not one bit.
# This can go away when we move the postgres adapter to it's own repo.
postgres_path = Path(__file__ + "/../../../plugins/postgres/dbt/adapters").resolve()
spec.submodule_search_locations.append(str(postgres_path))

for adapters_path in spec.submodule_search_locations:
version_glob = os.path.join(adapters_path, "*", "__version__.py")
for version_path in glob.glob(version_glob):
Expand Down
Loading

0 comments on commit 197050d

Please sign in to comment.