Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/alias #800

Merged
merged 29 commits into from
Jul 4, 2018
Merged
Show file tree
Hide file tree
Changes from 26 commits
Commits
Show all changes
29 commits
Select commit Hold shift + click to select a range
2b1378c
tests working on newer postgres image
Feb 6, 2018
3726cd1
rename test
Feb 8, 2018
4fb70c1
passing tests; breaking no other tests. looks good!
Feb 9, 2018
76ced75
cleanup and typos
Feb 9, 2018
81e644a
extra line
Feb 9, 2018
ecb7c86
make alias optional, fix parser tests
Feb 9, 2018
69e6c9a
add requested changes
Feb 12, 2018
75b91a8
fix describe_node
Feb 22, 2018
b11ebb0
cleanup
Feb 22, 2018
9fd89d7
Merge branch 'development' into feature/model-aliasing
abelsonlive Feb 22, 2018
a2b5f73
fix logging line
Feb 28, 2018
64f420d
Merge branch 'feature/model-aliasing' of https://github.com/kickstart…
Feb 28, 2018
5040c88
try to get alias from config
Feb 28, 2018
ca7992b
Merge remote-tracking branch 'kickstarter/feature/model-aliasing' int…
jon-rtr May 12, 2018
874d1b9
Temporarily bumping the library version.
jon-rtr May 12, 2018
06d459a
Adding a `make clean` command.
jon-rtr May 12, 2018
144f453
Temporarily bumping the library version.
jon-rtr May 12, 2018
20b84c4
Merge branch 'development' into feature/alias
drewbanin Jun 18, 2018
ff84f2a
bq/seed updates, add tests, catch dupe aliases
drewbanin Jun 18, 2018
6c6f18a
tests passing
drewbanin Jun 19, 2018
f39b5a6
rm alias from macros
drewbanin Jun 23, 2018
d8a2e57
rip out generate_model_alias macro;
drewbanin Jul 2, 2018
73a474e
remove get_alias macro integration test
drewbanin Jul 3, 2018
f872ed0
fix tests for pg, add tests for snowflake/bq
drewbanin Jul 3, 2018
e5739b3
hopefull fix for circle postgres errors
drewbanin Jul 3, 2018
f2ef85a
actually pass literals thru macro
drewbanin Jul 3, 2018
d274196
revert tox.init change
drewbanin Jul 3, 2018
307a4e1
merge development
drewbanin Jul 3, 2018
ed9f884
fix unit tests
drewbanin Jul 4, 2018
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 14 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,20 @@ test-integration:
@echo "Integration test run starting..."
@time docker-compose run test tox -e integration-postgres-py27,integration-postgres-py36,integration-snowflake-py27,integration-snowflake-py36,integration-bigquery-py27,integration-bigquery-py36


test-quick:
@echo "Integration test run starting..."
@time docker-compose run test tox -e integration-postgres-py36 -- -x

clean:
rm -f .coverage
rm -rf .eggs/
rm -rf .tox/
rm -rf build/
rm -rf dbt.egg-info/
rm -f dbt_project.yml
rm -rf dist/
rm -f htmlcov/*.{css,html,js,json,png}
rm -rf logs/
rm -rf target/
find . -type f -name '*.pyc' -delete
find . -type d -name '__pycache__' -depth -delete
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this looks really useful

11 changes: 7 additions & 4 deletions dbt/adapters/bigquery/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -231,12 +231,13 @@ def get_timeout(cls, conn):
@classmethod
def materialize_as_view(cls, profile, project_cfg, dataset, model):
model_name = model.get('name')
model_alias = model.get('alias')
model_sql = model.get('injected_sql')

conn = cls.get_connection(profile, project_cfg, model_name)
client = conn.get('handle')

view_ref = dataset.table(model_name)
view_ref = dataset.table(model_alias)
view = google.cloud.bigquery.Table(view_ref)
view.view_query = model_sql
view.view_use_legacy_sql = False
Expand Down Expand Up @@ -281,14 +282,15 @@ def make_date_partitioned_table(cls, profile, project_cfg, dataset_name,
def materialize_as_table(cls, profile, project_cfg, dataset,
model, model_sql, decorator=None):
model_name = model.get('name')
model_alias = model.get('alias')

conn = cls.get_connection(profile, model_name)
client = conn.get('handle')

if decorator is None:
table_name = model_name
table_name = model_alias
else:
table_name = "{}${}".format(model_name, decorator)
table_name = "{}${}".format(model_alias, decorator)

table_ref = dataset.table(table_name)
job_config = google.cloud.bigquery.QueryJobConfig()
Expand All @@ -299,7 +301,8 @@ def materialize_as_table(cls, profile, project_cfg, dataset,
query_job = client.query(model_sql, job_config=job_config)

# this waits for the job to complete
with cls.exception_handler(profile, model_sql, model_name, model_name):
with cls.exception_handler(profile, model_sql, model_alias,
model_name):
query_job.result(timeout=cls.get_timeout(conn))

return "CREATE TABLE"
Expand Down
2 changes: 1 addition & 1 deletion dbt/adapters/bigquery/relation.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ def create_from_node(cls, profile, node, **kwargs):
return cls.create(
project=profile.get('project'),
schema=node.get('schema'),
identifier=node.get('name'),
identifier=node.get('alias'),
**kwargs)

@classmethod
Expand Down
2 changes: 1 addition & 1 deletion dbt/adapters/default/relation.py
Original file line number Diff line number Diff line change
Expand Up @@ -177,7 +177,7 @@ def create_from_node(cls, profile, node, table_name=None, **kwargs):
return cls.create(
database=profile.get('dbname'),
schema=node.get('schema'),
identifier=node.get('name'),
identifier=node.get('alias'),
table_name=table_name,
**kwargs)

Expand Down
2 changes: 1 addition & 1 deletion dbt/adapters/snowflake/relation.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,5 +48,5 @@ def create_from_node(cls, profile, node, **kwargs):
return cls.create(
database=profile.get('database'),
schema=node.get('schema'),
identifier=node.get('name'),
identifier=node.get('alias'),
**kwargs)
9 changes: 9 additions & 0 deletions dbt/compilation.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,18 +275,27 @@ def get_all_projects(self):
def _check_resource_uniqueness(cls, flat_graph):
nodes = flat_graph['nodes']
names_resources = {}
alias_resources = {}

for resource, node in nodes.items():
if node.get('resource_type') not in NodeType.refable():
continue

name = node['name']
alias = "{}.{}".format(node['schema'], node['alias'])

existing_node = names_resources.get(name)
if existing_node is not None:
dbt.exceptions.raise_duplicate_resource_name(
existing_node, node)

existing_alias = alias_resources.get(alias)
if existing_alias is not None:
dbt.exceptions.raise_ambiguous_alias(
existing_alias, node)

names_resources[name] = node
alias_resources[alias] = node

def compile(self):
linker = Linker()
Expand Down
9 changes: 6 additions & 3 deletions dbt/context/common.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,8 @@
import json
import os
import pytz

from dbt.adapters.factory import get_adapter
from dbt.compat import basestring, to_string
from dbt.compat import basestring
from dbt.node_types import NodeType
from dbt.contracts.graph.parsed import ParsedMacro, ParsedNode

Expand Down Expand Up @@ -405,10 +404,14 @@ def generate(model, project_cfg, flat_graph, provider=None):
"fromjson": fromjson,
"tojson": tojson,
"target": target,
"this": get_this_relation(db_wrapper, project_cfg, profile, model),
"try_or_compiler_error": try_or_compiler_error(model)
})

# Operations do not represent database relations, so 'this' does not apply
if model.get('resource_type') != NodeType.Operation:
context["this"] = get_this_relation(db_wrapper, project_cfg, profile,
model)

context = _add_tracking(context)
context = _add_validation(context)
context = _add_sql_handlers(context)
Expand Down
8 changes: 7 additions & 1 deletion dbt/contracts/graph/parsed.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,12 @@
'The actual database string that this will build into.'
)
},
'alias': {
'type': 'string',
'description': (
'The name of the relation that this will build into'
)
},
'refs': {
'type': 'array',
'items': {
Expand Down Expand Up @@ -148,7 +154,7 @@
},
'required': UNPARSED_NODE_CONTRACT['required'] + [
'unique_id', 'fqn', 'schema', 'refs', 'depends_on', 'empty',
'config', 'tags',
'config', 'tags', 'alias',
]
}
)
Expand Down
17 changes: 17 additions & 0 deletions dbt/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -164,6 +164,10 @@ def ref_bad_context(model, target_model_name, target_model_package):
To fix this, add the following hint to the top of the model "{model_name}":

-- depends_on: {ref_string}"""
# This explicitly references model['name'], instead of model['alias'], for
# better error messages. Ex. If models foo_users and bar_users are aliased
# to 'users', in their respective schemas, then you would want to see
# 'bar_users' in your error messge instead of just 'users'.
error_msg = base_error_msg.format(
model_name=model['name'],
model_path=model['path'],
Expand Down Expand Up @@ -339,3 +343,16 @@ def raise_duplicate_resource_name(node_1, node_2):
duped_name,
node_1['unique_id'], node_1['original_file_path'],
node_2['unique_id'], node_2['original_file_path']))


def raise_ambiguous_alias(node_1, node_2):
duped_name = "{}.{}".format(node_1['schema'], node_1['alias'])

raise_compiler_error(
'dbt found two resources with the database representation "{}".\ndbt '
'cannot create two resources with identical database representations. '
'To fix this,\nchange the "schema" or "alias" configuration of one of '
'these resources:\n- {} ({})\n- {} ({})'.format(
duped_name,
node_1['unique_id'], node_1['original_file_path'],
node_2['unique_id'], node_2['original_file_path']))
Original file line number Diff line number Diff line change
Expand Up @@ -127,8 +127,8 @@
{% endcall %}
{% endfor %}

{%- set identifier = model['name'] -%}
{%- set tmp_identifier = model['name'] + '__dbt_archival_tmp' -%}
{%- set identifier = model['alias'] -%}
{%- set tmp_identifier = identifier + '__dbt_archival_tmp' -%}
{%- set tmp_relation = api.Relation.create(identifier=tmp_identifier, type='table') -%}

{% call statement() %}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
{% macro dbt__incremental_delete(target_relation, tmp_relation) -%}

{%- set unique_key = config.require('unique_key') -%}
{%- set identifier = model['name'] -%}

delete
from {{ target_relation }}
Expand All @@ -16,8 +15,8 @@
{%- set sql_where = config.require('sql_where') -%}
{%- set unique_key = config.get('unique_key') -%}

{%- set identifier = model['name'] -%}
{%- set tmp_identifier = model['name'] + '__dbt_incremental_tmp' -%}
{%- set identifier = model['alias'] -%}
{%- set tmp_identifier = identifier + '__dbt_incremental_tmp' -%}

{%- set existing_relations = adapter.list_relations(schema=schema) -%}
{%- set old_relation = adapter.get_relation(relations_list=existing_relations,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,6 @@
{% macro bigquery__load_csv_rows(model) %}

{%- set column_override = model['config'].get('column_types', {}) -%}
{{ adapter.load_dataframe(model['schema'], model['name'], model['agate_table'], column_override) }}
{{ adapter.load_dataframe(model['schema'], model['alias'], model['agate_table'], column_override) }}

{% endmacro %}
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@

{% materialization seed, default %}

{%- set identifier = model['name'] -%}
{%- set identifier = model['alias'] -%}
{%- set full_refresh_mode = (flags.FULL_REFRESH == True) -%}
{%- set existing_relations = adapter.list_relations(schema=schema) -%}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@

{% materialization table, adapter='bigquery' -%}

{%- set identifier = model['name'] -%}
{%- set identifier = model['alias'] -%}
{%- set non_destructive_mode = (flags.NON_DESTRUCTIVE == True) -%}
{%- set existing_relations = adapter.list_relations(schema=schema) -%}
{%- set old_relation = adapter.get_relation(relations_list=existing_relations, identifier=identifier) -%}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{% materialization table, default %}
{%- set identifier = model['name'] -%}
{%- set identifier = model['alias'] -%}
{%- set tmp_identifier = identifier + '__dbt_tmp' -%}
{%- set non_destructive_mode = (flags.NON_DESTRUCTIVE == True) -%}

Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{% materialization view, adapter='bigquery' -%}

{%- set identifier = model['name'] -%}
{%- set identifier = model['alias'] -%}
{%- set non_destructive_mode = (flags.NON_DESTRUCTIVE == True) -%}

{%- set existing_relations = adapter.list_relations(schema=schema) -%}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
{%- materialization view, default -%}

{%- set identifier = model['name'] -%}
{%- set identifier = model['alias'] -%}
{%- set tmp_identifier = identifier + '__dbt_tmp' -%}
{%- set non_destructive_mode = (flags.NON_DESTRUCTIVE == True) -%}

Expand Down
1 change: 1 addition & 0 deletions dbt/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ class SourceConfig(object):
AppendListFields = ['pre-hook', 'post-hook']
ExtendDictFields = ['vars', 'column_types', 'quoting']
ClobberFields = [
'alias',
'schema',
'enabled',
'materialized',
Expand Down
11 changes: 5 additions & 6 deletions dbt/node_runners.py
Original file line number Diff line number Diff line change
Expand Up @@ -260,17 +260,17 @@ def _node_context(cls, adapter, project, node):
def call_get_columns_in_table(schema_name, table_name):
return adapter.get_columns_in_table(
profile, project, schema_name,
table_name, model_name=node.get('name'))
table_name, model_name=node.get('alias'))

def call_get_missing_columns(from_schema, from_table,
to_schema, to_table):
return adapter.get_missing_columns(
profile, project, from_schema, from_table,
to_schema, to_table, node.get('name'))
to_schema, to_table, node.get('alias'))

def call_already_exists(schema, table):
return adapter.already_exists(
profile, project, schema, table, node.get('name'))
profile, project, schema, table, node.get('alias'))

return {
"run_started_at": dbt.tracking.active_user.run_started_at,
Expand Down Expand Up @@ -388,8 +388,7 @@ def after_hooks(cls, project, adapter, results, flat_graph, elapsed):
def describe_node(self):
materialization = dbt.utils.get_materialization(self.node)
schema_name = self.node.get('schema')
node_name = self.node.get('name')

node_name = self.node.get('alias')
return "{} model {}.{}".format(materialization, schema_name, node_name)

def print_start_line(self):
Expand Down Expand Up @@ -499,7 +498,7 @@ class SeedRunner(ModelRunner):

def describe_node(self):
schema_name = self.node.get('schema')
return "seed file {}.{}".format(schema_name, self.node["name"])
return "seed file {}.{}".format(schema_name, self.node['alias'])

@classmethod
def before_run(cls, project, adapter, flat_graph):
Expand Down
7 changes: 4 additions & 3 deletions dbt/parser.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import copy
import os
import re
import hashlib
Expand Down Expand Up @@ -228,11 +227,12 @@ def parse_node(node, node_path, root_project_config, package_project_config,
config_dict.update(config.config)
node['config'] = config_dict

# Set this temporarily so get_rendered() below has access to a schema
# Set this temporarily so get_rendered() has access to a schema & alias
profile = dbt.utils.get_profile_from_project(root_project_config)
default_schema = profile.get('schema', 'public')
node['schema'] = default_schema

default_alias = node.get('name')
node['alias'] = default_alias
context = dbt.context.parser.generate(node, root_project_config,
{"macros": macros})

Expand All @@ -250,6 +250,7 @@ def parse_node(node, node_path, root_project_config, package_project_config,
schema_override = config.config.get('schema')
get_schema = context.get('generate_schema_name', lambda x: default_schema)
node['schema'] = get_schema(schema_override)
node['alias'] = config.config.get('alias', default_alias)

# Overwrite node config
config_dict = node.get('config', {})
Expand Down
4 changes: 2 additions & 2 deletions dbt/task/seed.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,9 @@ def show_table(self, result):
rand_table = table.order_by(lambda x: random.random())

schema = result.node['schema']
name = result.node['name']
alias = result.node['alias']

header = "Random sample of table: {}.{}".format(schema, name)
header = "Random sample of table: {}.{}".format(schema, alias)
logger.info("")
logger.info(header)
logger.info("-" * len(header))
Expand Down
4 changes: 2 additions & 2 deletions dbt/ui/printer.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def print_model_result_line(result, schema_name, index, total):
info=info,
model_type=get_materialization(model),
schema=schema_name,
relation=model.get('name')),
relation=model.get('alias')),
status,
index,
total,
Expand Down Expand Up @@ -187,7 +187,7 @@ def print_seed_result_line(result, schema_name, index, total):
"{info} seed file {schema}.{relation}".format(
info=info,
schema=schema_name,
relation=model.get('name')),
relation=model.get('alias')),
status,
index,
total,
Expand Down
Loading