Skip to content

Commit

Permalink
Merge branch 'dev/louisa-may-alcott' of https://github.com/fishtown-a…
Browse files Browse the repository at this point in the history
…nalytics/dbt into kms-encryption
  • Loading branch information
kconvey committed Nov 7, 2019
2 parents 0d8e0cb + 6198584 commit 563dfc1
Show file tree
Hide file tree
Showing 100 changed files with 2,782 additions and 740 deletions.
2 changes: 1 addition & 1 deletion .bumpversion.cfg
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
[bumpversion]
current_version = 0.15.0b2
current_version = 0.15.0rc1
parse = (?P<major>\d+)
\.(?P<minor>\d+)
\.(?P<patch>\d+)
Expand Down
56 changes: 53 additions & 3 deletions .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: 2
jobs:
unit:
docker: &test_and_postgres
- image: fishtownjacob/test-container
- image: fishtownjacob/test-container:2
- image: postgres
name: database
environment: &pgenv
Expand All @@ -19,7 +19,7 @@ jobs:
PGUSER: root
PGPASSWORD: password
PGDATABASE: postgres
- run: tox -e flake8,mypy,unit-py36
- run: tox -e flake8,mypy,unit-py36,unit-py38
integration-postgres-py36:
docker: *test_and_postgres
steps:
Expand All @@ -32,7 +32,7 @@ jobs:
path: ./logs
integration-snowflake-py36:
docker: &test_only
- image: fishtownjacob/test-container
- image: fishtownjacob/test-container:2
steps:
- checkout
- run:
Expand All @@ -59,6 +59,44 @@ jobs:
command: tox -e integration-bigquery-py36
- store_artifacts:
path: ./logs
integration-postgres-py38:
docker: *test_and_postgres
steps:
- checkout
- run: *setupdb
- run:
name: Run tests
command: tox -e integration-postgres-py38
- store_artifacts:
path: ./logs
integration-snowflake-py38:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-snowflake-py38
no_output_timeout: 1h
- store_artifacts:
path: ./logs
integration-redshift-py38:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-redshift-py38
- store_artifacts:
path: ./logs
integration-bigquery-py38:
docker: *test_only
steps:
- checkout
- run:
name: Run tests
command: tox -e integration-bigquery-py38
- store_artifacts:
path: ./logs

workflows:
version: 2
Expand All @@ -77,3 +115,15 @@ workflows:
- integration-snowflake-py36:
requires:
- integration-postgres-py36
- integration-postgres-py38:
requires:
- unit
- integration-redshift-py38:
requires:
- integration-postgres-py38
- integration-bigquery-py38:
requires:
- integration-postgres-py38
- integration-snowflake-py38:
requires:
- integration-postgres-py38
64 changes: 28 additions & 36 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -3,52 +3,44 @@ FROM ubuntu:18.04
ENV DEBIAN_FRONTEND noninteractive

RUN apt-get update && \
apt-get dist-upgrade -y && \
apt-get install -y --no-install-recommends \
netcat postgresql make build-essential libssl-dev zlib1g-dev \
libbz2-dev libreadline-dev libsqlite3-dev wget curl llvm libncurses5-dev \
xz-utils tk-dev libxml2-dev libxmlsec1-dev libffi-dev git ca-certificates \
curl git ssh && \
netcat postgresql curl git ssh software-properties-common \
make build-essential ca-certificates libpq-dev && \
add-apt-repository ppa:deadsnakes/ppa && \
apt-get install -y \
python python-dev python-pip \
python3.6 python3.6-dev python3-pip python3.6-venv \
python3.7 python3.7-dev python3.7-venv \
python3.8 python3.8-dev python3.8-venv && \
apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

RUN useradd -mU dbt_test_user
RUN mkdir /usr/app && chown dbt_test_user /usr/app
RUN mkdir /home/tox && chown dbt_test_user /home/tox
USER dbt_test_user

WORKDIR /usr/app
VOLUME /usr/app

RUN curl -L https://github.com/pyenv/pyenv-installer/raw/master/bin/pyenv-installer | bash

ENV PYENV_ROOT="/home/dbt_test_user/.pyenv" \
PATH="/home/dbt_test_user/.pyenv/bin:/home/dbt_test_user/.pyenv/shims:$PATH"

RUN pyenv update && \
echo "2.7.16 3.6.8 3.7.3" | xargs -P 4 -n 1 pyenv install && \
pyenv global $(pyenv versions --bare)

RUN pyenv virtualenv 3.6.8 dbt36 && \
pyenv virtualenv 3.7.3 dbt37 && \
pyenv virtualenv 2.7.16 dbt27

RUN cd /usr/app && \
python -m pip install -U pip && \
python -m pip install tox && \
pyenv local dbt37 && \
python -m pip install -U pip && \
python -m pip install tox && \
pyenv local --unset && \
pyenv local dbt36 && \
python -m pip install -U pip && \
python -m pip install tox && \
pyenv local --unset && \
pyenv local dbt27 && \
python -m pip install -U pip && \
python -m pip install tox && \
pyenv local --unset && \
pyenv rehash

RUN pyenv local dbt36 dbt37 dbt27
RUN pip3 install tox wheel

RUN python2.7 -m pip install virtualenv wheel && \
python2.7 -m virtualenv /home/tox/venv2.7 && \
/home/tox/venv2.7/bin/python -m pip install -U pip tox

RUN python3.6 -m pip install -U pip wheel && \
python3.6 -m venv /home/tox/venv3.6 && \
/home/tox/venv3.6/bin/python -m pip install -U pip tox

RUN python3.7 -m pip install -U pip wheel && \
python3.7 -m venv /home/tox/venv3.7 && \
/home/tox/venv3.7/bin/python -m pip install -U pip tox

RUN python3.8 -m pip install -U pip wheel && \
python3.8 -m venv /home/tox/venv3.8 && \
/home/tox/venv3.8/bin/python -m pip install -U pip tox

USER dbt_test_user

ENV PYTHONIOENCODING=utf-8
ENV LANG C.UTF-8
33 changes: 28 additions & 5 deletions core/dbt/adapters/base/connections.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import abc
from multiprocessing import RLock
import os
from multiprocessing import RLock
from threading import get_ident
from typing import (
Dict, Tuple, Hashable, Optional, ContextManager, List
Expand All @@ -11,7 +11,10 @@
import dbt.exceptions
import dbt.flags
from dbt.contracts.connection import (
Connection, Identifier, ConnectionState, HasCredentials
Connection, Identifier, ConnectionState, AdapterRequiredConfig
)
from dbt.adapters.base.query_headers import (
QueryStringSetter, MacroQueryStringSetter,
)
from dbt.logger import GLOBAL_LOGGER as logger

Expand All @@ -31,10 +34,17 @@ class BaseConnectionManager(metaclass=abc.ABCMeta):
"""
TYPE: str = NotImplemented

def __init__(self, profile: HasCredentials):
def __init__(self, profile: AdapterRequiredConfig):
self.profile = profile
self.thread_connections: Dict[Hashable, Connection] = {}
self.lock: RLock = dbt.flags.MP_CONTEXT.RLock()
self.query_header = QueryStringSetter(self.profile)

def set_query_header(self, manifest=None) -> None:
if manifest is not None:
self.query_header = MacroQueryStringSetter(self.profile, manifest)
else:
self.query_header = QueryStringSetter(self.profile)

@staticmethod
def get_thread_identifier() -> Hashable:
Expand Down Expand Up @@ -91,6 +101,10 @@ def set_connection_name(self, name: Optional[str] = None) -> Connection:
# named 'master'
conn_name = 'master'
else:
if not isinstance(name, str):
raise dbt.exceptions.CompilerException(
f'For connection name, got {name} - not a string!'
)
assert isinstance(name, str)
conn_name = name

Expand Down Expand Up @@ -221,7 +235,10 @@ def _close_handle(cls, connection: Connection) -> None:
def _rollback(cls, connection: Connection) -> None:
"""Roll back the given connection."""
if dbt.flags.STRICT_MODE:
assert isinstance(connection, Connection)
if not isinstance(connection, Connection):
raise dbt.exceptions.CompilerException(
f'In _rollback, got {connection} - not a Connection!'
)

if connection.transaction_open is False:
raise dbt.exceptions.InternalException(
Expand All @@ -236,7 +253,10 @@ def _rollback(cls, connection: Connection) -> None:
@classmethod
def close(cls, connection: Connection) -> Connection:
if dbt.flags.STRICT_MODE:
assert isinstance(connection, Connection)
if not isinstance(connection, Connection):
raise dbt.exceptions.CompilerException(
f'In close, got {connection} - not a Connection!'
)

# if the connection is in closed or init, there's nothing to do
if connection.state in {ConnectionState.CLOSED, ConnectionState.INIT}:
Expand All @@ -257,6 +277,9 @@ def commit_if_has_connection(self) -> None:
if connection:
self.commit()

def _add_query_comment(self, sql: str) -> str:
return self.query_header.add(sql)

@abc.abstractmethod
def execute(
self, sql: str, auto_begin: bool = False, fetch: bool = False
Expand Down
39 changes: 26 additions & 13 deletions core/dbt/adapters/base/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
from datetime import datetime
from typing import (
Optional, Tuple, Callable, Container, FrozenSet, Type, Dict, Any, List,
Mapping
Mapping, Iterator,
)

import agate
Expand All @@ -13,12 +13,13 @@
import dbt.flags

from dbt.clients.agate_helper import empty_table
from dbt.contracts.graph.compiled import CompileResultNode
from dbt.contracts.graph.manifest import Manifest
from dbt.node_types import NodeType
from dbt.logger import GLOBAL_LOGGER as logger
from dbt.utils import filter_null_values

from dbt.adapters.base.connections import BaseConnectionManager
from dbt.adapters.base.connections import BaseConnectionManager, Connection
from dbt.adapters.base.meta import AdapterMeta, available
from dbt.adapters.base.relation import ComponentName, BaseRelation
from dbt.adapters.base import Column as BaseColumn
Expand Down Expand Up @@ -203,20 +204,20 @@ def __init__(self, config):
###
# Methods that pass through to the connection manager
###
def acquire_connection(self, name=None):
def acquire_connection(self, name=None) -> Connection:
return self.connections.set_connection_name(name)

def release_connection(self):
return self.connections.release()
def release_connection(self) -> None:
self.connections.release()

def cleanup_connections(self):
return self.connections.cleanup_all()
def cleanup_connections(self) -> None:
self.connections.cleanup_all()

def clear_transaction(self):
def clear_transaction(self) -> None:
self.connections.clear_transaction()

def commit_if_has_connection(self):
return self.connections.commit_if_has_connection()
def commit_if_has_connection(self) -> None:
self.connections.commit_if_has_connection()

def nice_connection_name(self):
conn = self.connections.get_if_exists()
Expand All @@ -225,11 +226,23 @@ def nice_connection_name(self):
return conn.name

@contextmanager
def connection_named(self, name):
def connection_named(
self, name: str, node: Optional[CompileResultNode] = None
):
try:
yield self.acquire_connection(name)
self.connections.query_header.set(name, node)
conn = self.acquire_connection(name)
yield conn
finally:
self.release_connection()
self.connections.query_header.reset()

@contextmanager
def connection_for(
self, node: CompileResultNode
) -> Iterator[Connection]:
with self.connection_named(node.unique_id, node) as conn:
yield conn

@available.parse(lambda *a, **k: ('', empty_table()))
def execute(
Expand Down Expand Up @@ -321,7 +334,7 @@ def _get_cache_schemas(
# result is a map whose keys are information_schema Relations without
# identifiers that have appropriate database prefixes, and whose values
# are sets of lowercase schema names that are valid members of those
# schemas
# databases
return info_schema_name_map

def _relations_cache_for_schemas(self, manifest: Manifest) -> None:
Expand Down
Loading

0 comments on commit 563dfc1

Please sign in to comment.