Skip to content

Commit

Permalink
Merge pull request #2056 from fishtown-analytics/fix/partial-parsing-…
Browse files Browse the repository at this point in the history
…disabled-models

Fix partial parsing with disabled models (#2055)
  • Loading branch information
beckjake authored Jan 19, 2020
2 parents 722d87c + a8b93f6 commit 66a4f76
Show file tree
Hide file tree
Showing 2 changed files with 45 additions and 14 deletions.
47 changes: 34 additions & 13 deletions core/dbt/parser/results.py
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,37 @@ def _get_disabled(
if n.original_file_path == match_file.path.original_file_path
]

def _process_node(
self,
node_id: str,
source_file: SourceFile,
old_file: SourceFile,
old_result: 'ParseResult',
) -> None:
"""Nodes are a special kind of complicated - there can be multiple
with the same name, as long as all but one are disabled.
"""
source_path = source_file.path.original_file_path
found: bool = False
if node_id in old_result.nodes:
old_node = old_result.nodes[node_id]
if old_node.original_file_path == source_path:
self.add_node(source_file, old_node)
found = True

if node_id in old_result.disabled:
matches = old_result._get_disabled(node_id, source_file)
for match in matches:
self.add_disabled(source_file, match)
found = True

if not found:
raise CompilationException(
'Expected to find "{}" in cached "manifest.nodes" or '
'"manifest.disabled" based on cached file information: {}!'
.format(node_id, old_file)
)

def sanitized_update(
self, source_file: SourceFile, old_result: 'ParseResult',
) -> bool:
Expand Down Expand Up @@ -146,20 +177,10 @@ def sanitized_update(
# because we know this is how we _parsed_ the node, we can safely
# assume if it's disabled it was done by the project or file, and
# we can keep our old data
# the node ID could be in old_result.disabled AND in old_result.nodes.
# In that case, we have to make sure the path also matches.
for node_id in old_file.nodes:
if node_id in old_result.nodes:
node = old_result.nodes[node_id]
self.add_node(source_file, node)
elif node_id in old_result.disabled:
matches = old_result._get_disabled(node_id, source_file)
for match in matches:
self.add_disabled(source_file, match)
else:
raise CompilationException(
'Expected to find "{}" in cached "manifest.nodes" or '
'"manifest.disabled" based on cached file information: {}!'
.format(node_id, old_file)
)
self._process_node(node_id, source_file, old_file, old_result)

for name in old_file.patches:
patch = _expect_value(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,17 @@ def test_postgres_duplicate_model_disabled(self):
query = "select value from {schema}.model" \
.format(schema=self.unique_schema())
result = self.run_sql(query, fetch="one")[0]
assert result == 1
self.assertEqual(result, 1)

@use_profile('postgres')
def test_postgres_duplicate_model_disabled_partial_parsing(self):
self.run_dbt(['clean'])
results = self.run_dbt(["--partial-parse", "run"])
self.assertEqual(len(results), 1)
results = self.run_dbt(["--partial-parse", "run"])
self.assertEqual(len(results), 1)
results = self.run_dbt(["--partial-parse", "run"])
self.assertEqual(len(results), 1)


class TestDuplicateModelEnabledAcrossPackages(DBTIntegrationTest):
Expand Down

0 comments on commit 66a4f76

Please sign in to comment.