From 43c475c2e96c99a65f3d038f8aceecd5c715c088 Mon Sep 17 00:00:00 2001 From: Jacob Beck Date: Wed, 5 Dec 2018 14:30:42 -0700 Subject: [PATCH] run_operation -> execute_macro --- dbt/adapters/base/impl.py | 26 ++++++++------- dbt/adapters/postgres/impl.py | 6 ++-- dbt/clients/jinja.py | 28 +--------------- dbt/context/common.py | 32 +++++++------------ dbt/contracts/graph/manifest.py | 7 ---- dbt/contracts/graph/parsed.py | 1 - dbt/contracts/graph/unparsed.py | 3 -- .../macros/operations/catalog/get_catalog.sql | 4 --- .../operations/relations/get_relations.sql | 4 --- dbt/parser/base.py | 7 +--- dbt/parser/macros.py | 4 --- dbt/utils.py | 12 ------- test/unit/test_postgres_adapter.py | 8 ++--- 13 files changed, 36 insertions(+), 106 deletions(-) delete mode 100644 dbt/include/global_project/macros/operations/catalog/get_catalog.sql delete mode 100644 dbt/include/global_project/macros/operations/relations/get_relations.sql diff --git a/dbt/adapters/base/impl.py b/dbt/adapters/base/impl.py index c3eb71c938d..b6a26485c08 100644 --- a/dbt/adapters/base/impl.py +++ b/dbt/adapters/base/impl.py @@ -21,7 +21,7 @@ from dbt.adapters.base import BaseRelation from dbt.adapters.cache import RelationsCache -GET_CATALOG_OPERATION_NAME = 'get_catalog_data' +GET_CATALOG_MACRO_NAME = 'get_catalog' def _expect_row_value(key, row): @@ -655,25 +655,29 @@ def convert_agate_type(cls, agate_table, col_idx): ### # Operations involving the manifest ### - def run_operation(self, manifest, operation_name): - """Look the operation identified by operation_name up in the manifest - and run it. + def execute_macro(self, manifest, macro_name, project=None): + """Look macro_name up in the manifest and execute its results. Return an an AttrDict with three attributes: 'table', 'data', and 'status'. 'table' is an agate.Table. """ - operation = manifest.find_operation_by_name(operation_name, 'dbt') + macro = manifest.find_macro_by_name(macro_name, project) + if macro is None: + raise dbt.exceptions.RuntimeException( + 'Could not find macro with name {} in project {}' + .format(macro_name, project) + ) # This causes a reference cycle, as dbt.context.runtime.generate() # ends up calling get_adapter, so the import has to be here. import dbt.context.runtime context = dbt.context.runtime.generate( - operation, + macro, self.config, manifest, ) - result = operation.generator(context)() + result = macro.generator(context)() return result @classmethod @@ -684,13 +688,13 @@ def _catalog_filter_table(cls, table, manifest): return table.where(_catalog_filter_schemas(manifest)) def get_catalog(self, manifest): - """Get the catalog for this manifest by running the get catalog - operation. Returns an agate.Table of catalog information. + """Get the catalog for this manifest by running the get catalog macro. + Returns an agate.Table of catalog information. """ try: - table = self.run_operation(manifest, GET_CATALOG_OPERATION_NAME) + table = self.execute_macro(manifest, GET_CATALOG_MACRO_NAME) finally: - self.release_connection(GET_CATALOG_OPERATION_NAME) + self.release_connection(GET_CATALOG_MACRO_NAME) results = self._catalog_filter_table(table, manifest) return results diff --git a/dbt/adapters/postgres/impl.py b/dbt/adapters/postgres/impl.py index 673fbf55ebb..6f809dc6827 100644 --- a/dbt/adapters/postgres/impl.py +++ b/dbt/adapters/postgres/impl.py @@ -11,7 +11,7 @@ from dbt.logger import GLOBAL_LOGGER as logger -GET_RELATIONS_OPERATION_NAME = 'get_relations_data' +GET_RELATIONS_MACRO_NAME = 'get_relations' class PostgresAdapter(SQLAdapter): @@ -24,9 +24,9 @@ def date_function(cls): def _link_cached_relations(self, manifest): schemas = manifest.get_used_schemas() try: - table = self.run_operation(manifest, GET_RELATIONS_OPERATION_NAME) + table = self.execute_macro(manifest, GET_RELATIONS_MACRO_NAME) finally: - self.release_connection(GET_RELATIONS_OPERATION_NAME) + self.release_connection(GET_RELATIONS_MACRO_NAME) table = self._relations_filter_table(table, schemas) for (refed_schema, refed_name, dep_schema, dep_name) in table: diff --git a/dbt/clients/jinja.py b/dbt/clients/jinja.py index 40cf1ae5950..c82180a798f 100644 --- a/dbt/clients/jinja.py +++ b/dbt/clients/jinja.py @@ -92,10 +92,7 @@ def call(*args, **kwargs): template = template_cache.get_node_template(node) module = template.make_module(context, False, context) - if node['resource_type'] == NodeType.Operation: - macro = module.__dict__[dbt.utils.get_dbt_operation_name(name)] - else: - macro = module.__dict__[dbt.utils.get_dbt_macro_name(name)] + macro = module.__dict__[dbt.utils.get_dbt_macro_name(name)] module.__dict__.update(context) try: @@ -148,28 +145,6 @@ def parse(self, parser): return node -class OperationExtension(jinja2.ext.Extension): - tags = ['operation'] - - def parse(self, parser): - node = jinja2.nodes.Macro(lineno=next(parser.stream).lineno) - operation_name = \ - parser.parse_assign_target(name_only=True).name - - node.args = [] - node.defaults = [] - - while parser.stream.skip_if('comma'): - target = parser.parse_assign_target(name_only=True) - - node.name = dbt.utils.get_operation_macro_name(operation_name) - - node.body = parser.parse_statements(('name:endoperation',), - drop_needle=True) - - return node - - class DocumentationExtension(jinja2.ext.Extension): tags = ['docs'] @@ -253,7 +228,6 @@ def get_environment(node=None, capture_macros=False): args['undefined'] = create_macro_capture_env(node) args['extensions'].append(MaterializationExtension) - args['extensions'].append(OperationExtension) args['extensions'].append(DocumentationExtension) return MacroFuzzEnvironment(**args) diff --git a/dbt/context/common.py b/dbt/context/common.py index d55e43d049d..e2740e7e8cd 100644 --- a/dbt/context/common.py +++ b/dbt/context/common.py @@ -380,22 +380,6 @@ def generate_base(model, model_dict, config, manifest, source_config, "try_or_compiler_error": try_or_compiler_error(model) }) - # Operations do not represent database relations, so there should be no - # 'this' variable in the context for operations. The Operation branch - # below should be removed in a future release. The fake relation below - # mirrors the historical implementation, without causing errors around - # the missing 'alias' attribute for operations - # - # https://github.com/fishtown-analytics/dbt/issues/878 - if model.resource_type == NodeType.Operation: - this = db_wrapper.adapter.Relation.create( - schema=config.credentials.schema, - identifier=model.name - ) - else: - this = get_this_relation(db_wrapper, config, model_dict) - - context["this"] = this return context @@ -418,9 +402,13 @@ def modify_generated_context(context, model, model_dict, config, manifest): return context -def generate_operation_macro(model, config, manifest, provider): - """This is an ugly hack to support the fact that the `docs generate` - operation ends up in here, and macros are not nodes. +def generate_execute_macro(model, config, manifest, provider): + """Internally, macros can be executed like nodes, with some restrictions: + + - they don't have have all values available that nodes do: + - 'this', 'pre_hooks', 'post_hooks', and 'sql' are missing + - 'schema' does not use any 'model' information + - they can't be configured with config() directives """ model_dict = model.serialize() context = generate_base(model, model_dict, config, manifest, @@ -434,6 +422,10 @@ def generate_model(model, config, manifest, source_config, provider): model_dict = model.to_dict() context = generate_base(model, model_dict, config, manifest, source_config, provider) + # operations (hooks) don't get a 'this' + if model.resource_type != NodeType.Operation: + this = get_this_relation(context['adapter'], config, model_dict) + context['this'] = this # overwrite schema if we have it, and hooks + sql context.update({ 'schema': model.get('schema', context['schema']), @@ -454,7 +446,7 @@ def generate(model, config, manifest, source_config=None, provider=None): dbt.context.runtime.generate """ if isinstance(model, ParsedMacro): - return generate_operation_macro(model, config, manifest, provider) + return generate_execute_macro(model, config, manifest, provider) else: return generate_model(model, config, manifest, source_config, provider) diff --git a/dbt/contracts/graph/manifest.py b/dbt/contracts/graph/manifest.py index 815f42fc7ac..456c288906e 100644 --- a/dbt/contracts/graph/manifest.py +++ b/dbt/contracts/graph/manifest.py @@ -256,13 +256,6 @@ def find_docs_by_name(self, name, package=None): return doc return None - def find_operation_by_name(self, name, package): - """Find a macro in the graph by its name and package name, or None for - any package. - """ - return self._find_by_name(name, package, 'macros', - [NodeType.Operation]) - def find_macro_by_name(self, name, package): """Find a macro in the graph by its name and package name, or None for any package. diff --git a/dbt/contracts/graph/parsed.py b/dbt/contracts/graph/parsed.py index f59c209c8ff..d0a77d2f795 100644 --- a/dbt/contracts/graph/parsed.py +++ b/dbt/contracts/graph/parsed.py @@ -405,7 +405,6 @@ class ParsedNodePatch(APIObject): 'resource_type': { 'enum': [ NodeType.Macro, - NodeType.Operation, ], }, 'unique_id': { diff --git a/dbt/contracts/graph/unparsed.py b/dbt/contracts/graph/unparsed.py index b9abf31c427..788f3359af9 100644 --- a/dbt/contracts/graph/unparsed.py +++ b/dbt/contracts/graph/unparsed.py @@ -62,9 +62,6 @@ NodeType.Model, NodeType.Test, NodeType.Analysis, - # Note: Hooks fail if you remove this, even though it's - # also allowed in ParsedMacro, which seems wrong. - # Maybe need to move hook operations into macros? NodeType.Operation, NodeType.Seed, # we need this if parse_node is going to handle archives. diff --git a/dbt/include/global_project/macros/operations/catalog/get_catalog.sql b/dbt/include/global_project/macros/operations/catalog/get_catalog.sql deleted file mode 100644 index 4371416adc6..00000000000 --- a/dbt/include/global_project/macros/operations/catalog/get_catalog.sql +++ /dev/null @@ -1,4 +0,0 @@ -{% operation get_catalog_data %} - {% set catalog = dbt.get_catalog() %} - {{ return(catalog) }} -{% endoperation %} diff --git a/dbt/include/global_project/macros/operations/relations/get_relations.sql b/dbt/include/global_project/macros/operations/relations/get_relations.sql deleted file mode 100644 index ad943c2a84b..00000000000 --- a/dbt/include/global_project/macros/operations/relations/get_relations.sql +++ /dev/null @@ -1,4 +0,0 @@ -{% operation get_relations_data %} - {% set relations = dbt.get_relations() %} - {{ return(relations) }} -{% endoperation %} diff --git a/dbt/parser/base.py b/dbt/parser/base.py index fbcd3050c98..a29c8e03745 100644 --- a/dbt/parser/base.py +++ b/dbt/parser/base.py @@ -82,13 +82,8 @@ def get_schema_func(self): def get_schema(_): return self.default_schema else: - # use the macro itself as the 'parsed node' to pass into - # parser.generate() to get a context. - macro_node = get_schema_macro.incorporate( - resource_type=NodeType.Operation - ) root_context = dbt.context.parser.generate( - macro_node, self.root_project_config, + get_schema_macro, self.root_project_config, self.macro_manifest, self.root_project_config ) get_schema = get_schema_macro.generator(root_context) diff --git a/dbt/parser/macros.py b/dbt/parser/macros.py index 5e7969fccb7..c03713dae03 100644 --- a/dbt/parser/macros.py +++ b/dbt/parser/macros.py @@ -53,10 +53,6 @@ def parse_macro_file(self, macro_file_path, macro_file_contents, root_path, node_type = NodeType.Macro name = macro_name.replace(dbt.utils.MACRO_PREFIX, '') - elif macro_name.startswith(dbt.utils.OPERATION_PREFIX): - node_type = NodeType.Operation - name = macro_name.replace(dbt.utils.OPERATION_PREFIX, '') - if node_type != resource_type: continue diff --git a/dbt/utils.py b/dbt/utils.py index ca7c2611907..75e4274a3e1 100644 --- a/dbt/utils.py +++ b/dbt/utils.py @@ -151,7 +151,6 @@ def find_in_list_by_name(haystack, target_name, target_package, nodetype): MACRO_PREFIX = 'dbt_macro__' -OPERATION_PREFIX = 'dbt_operation__' DOCS_PREFIX = 'dbt_docs__' @@ -159,10 +158,6 @@ def get_dbt_macro_name(name): return '{}{}'.format(MACRO_PREFIX, name) -def get_dbt_operation_name(name): - return '{}{}'.format(OPERATION_PREFIX, name) - - def get_dbt_docs_name(name): return '{}{}'.format(DOCS_PREFIX, name) @@ -180,13 +175,6 @@ def get_materialization_macro_name(materialization_name, adapter_type=None, return name -def get_operation_macro_name(operation_name, with_prefix=True): - if with_prefix: - return get_dbt_operation_name(operation_name) - else: - return operation_name - - def get_docs_macro_name(docs_name, with_prefix=True): if with_prefix: return get_dbt_docs_name(docs_name) diff --git a/test/unit/test_postgres_adapter.py b/test/unit/test_postgres_adapter.py index db131714f3d..de438f03a7b 100644 --- a/test/unit/test_postgres_adapter.py +++ b/test/unit/test_postgres_adapter.py @@ -133,8 +133,8 @@ def test_set_zero_keepalive(self, psycopg2): port=5432, connect_timeout=10) - @mock.patch.object(PostgresAdapter, 'run_operation') - def test_get_catalog_various_schemas(self, mock_run): + @mock.patch.object(PostgresAdapter, 'execute_macro') + def test_get_catalog_various_schemas(self, mock_execute): column_names = ['table_schema', 'table_name'] rows = [ ('foo', 'bar'), @@ -143,8 +143,8 @@ def test_get_catalog_various_schemas(self, mock_run): ('quux', 'bar'), ('skip', 'bar') ] - mock_run.return_value = agate.Table(rows=rows, - column_names=column_names) + mock_execute.return_value = agate.Table(rows=rows, + column_names=column_names) mock_manifest = mock.MagicMock() mock_manifest.get_used_schemas.return_value = {'foo', 'quux'}