diff --git a/CHANGELOG.md b/CHANGELOG.md index 0e56c6d2..bb5f86d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,3 +1,15 @@ +# dbt_zendesk v0.17.0 + +## Breaking Changes (Full refresh required after upgrading) +- Incremental models running on BigQuery have had the `partition_by` logic adjusted to include a granularity of a month. This change only impacts BigQuery warehouses and was applied to avoid the common `too many partitions` error some users have experienced when partitioning by day. Therefore, adjusting the partition to a month granularity will decrease the number of partitions created and allow for more performant querying and incremental loads. This change was applied to the following models: + - `int_zendesk__field_calendar_spine` + - `int_zendesk__field_history_pivot` + - `zendesk__ticket_field_history` + +## Under the Hood +- Updated seed files to reflect a real world ticket field history update scenario. +- Modified the `consistency_sla_policy_count` validation test to group by `ticket_id` for more accurate testing. + # dbt_zendesk v0.16.0 ## 🚨 Minor Upgrade 🚨 Although this update is not a breaking change, it will likely impact the output of the `zendesk__sla_policies` and `zendesk__sla_metrics` models. [PR #154](https://github.com/fivetran/dbt_zendesk/pull/154) includes the following changes: diff --git a/README.md b/README.md index 8c0b9fdf..384a6c8d 100644 --- a/README.md +++ b/README.md @@ -64,7 +64,7 @@ Include the following zendesk package version in your `packages.yml` file: ```yml packages: - package: fivetran/zendesk - version: [">=0.16.0", "<0.17.0"] + version: [">=0.17.0", "<0.18.0"] ``` > **Note**: Do not include the Zendesk Support source package. The Zendesk Support transform package already has a dependency on the source in its own `packages.yml` file. diff --git a/dbt_project.yml b/dbt_project.yml index a18ac5f2..1439368e 100644 --- a/dbt_project.yml +++ b/dbt_project.yml @@ -1,5 +1,5 @@ name: 'zendesk' -version: '0.16.0' +version: '0.17.0' config-version: 2 diff --git a/docs/catalog.json b/docs/catalog.json index ec6e3a04..cd4fa25e 100644 --- a/docs/catalog.json +++ b/docs/catalog.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", "dbt_version": "1.7.9", "generated_at": "2024-05-14T15:31:24.360147Z", "invocation_id": "a6607f0c-5bee-4c0f-9bfc-3034194b1b1f", "env": {}}, "nodes": {"seed.zendesk_integration_tests.brand_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "brand_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "BOOL", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "STRING", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "BOOL", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "BOOL", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "STRING", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "STRING", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "STRING", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "BOOL", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "STRING", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "INT64", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "INT64", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "BOOL", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "STRING", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "INT64", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "STRING", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "INT64", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "STRING", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "STRING", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "STRING", "index": 22, "name": "url", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 346, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 1, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.brand_data"}, "seed.zendesk_integration_tests.daylight_time_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "daylight_time_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"time_zone": {"type": "STRING", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "INT64", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "DATETIME", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "INT64", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "DATETIME", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 99, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 2, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.daylight_time_data"}, "seed.zendesk_integration_tests.domain_name_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "domain_name_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"index": {"type": "INT64", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "INT64", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "STRING", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 580, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.domain_name_data"}, "seed.zendesk_integration_tests.group_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "group_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "STRING", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 7, "name": "url", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 879, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 8, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.group_data"}, "seed.zendesk_integration_tests.organization_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "organization_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "INT64", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "INT64", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "INT64", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "STRING", "index": 7, "name": "name", "comment": null}, "notes": {"type": "INT64", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "BOOL", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "BOOL", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 12, "name": "url", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1011, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_data"}, "seed.zendesk_integration_tests.organization_tag_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "organization_tag_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"organization_id": {"type": "INT64", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "STRING", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 600, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 12, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_tag_data"}, "seed.zendesk_integration_tests.schedule_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "schedule_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"end_time": {"type": "INT64", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "INT64", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "INT64", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "INT64", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "STRING", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "INT64", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "STRING", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 10, "name": "created_at", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 480, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 5, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_data"}, "seed.zendesk_integration_tests.schedule_holiday_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "schedule_holiday_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "INT64", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "DATE", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "STRING", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "DATE", "index": 7, "name": "start_date", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 112, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 2, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data"}, "seed.zendesk_integration_tests.ticket_comment_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_comment_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "STRING", "index": 3, "name": "body", "comment": null}, "created": {"type": "TIMESTAMP", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "BOOL", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "BOOL", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "INT64", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "BOOL", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "INT64", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "BOOL", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1031, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 20, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_comment_data"}, "seed.zendesk_integration_tests.ticket_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "BOOL", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "INT64", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "INT64", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "STRING", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "TIMESTAMP", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "INT64", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "INT64", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "INT64", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "BOOL", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "BOOL", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "INT64", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "INT64", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "INT64", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "STRING", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "INT64", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "STRING", "index": 19, "name": "status", "comment": null}, "subject": {"type": "STRING", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "INT64", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "INT64", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "INT64", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "STRING", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "STRING", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "INT64", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "INT64", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "INT64", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "STRING", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "STRING", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "STRING", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "INT64", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "STRING", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "INT64", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 2196, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_data"}, "seed.zendesk_integration_tests.ticket_field_history_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_field_history_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"field_name": {"type": "STRING", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "TIMESTAMP", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "INT64", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "STRING", "index": 6, "name": "value", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 805, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 20, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data"}, "seed.zendesk_integration_tests.ticket_form_history_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_form_history_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "DATETIME", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "BOOL", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "DATETIME", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "STRING", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "BOOL", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "STRING", "index": 9, "name": "name", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1545, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 15, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data"}, "seed.zendesk_integration_tests.ticket_schedule_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_schedule_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"created_at": {"type": "TIMESTAMP", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "INT64", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 320, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data"}, "seed.zendesk_integration_tests.ticket_tag_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_tag_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"tag": {"type": "STRING", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 261, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_tag_data"}, "seed.zendesk_integration_tests.time_zone_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "time_zone_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"time_zone": {"type": "STRING", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "STRING", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 48, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 2, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.time_zone_data"}, "seed.zendesk_integration_tests.user_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "user_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "BOOL", "index": 3, "name": "active", "comment": null}, "alias": {"type": "INT64", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "INT64", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "BOOL", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "INT64", "index": 8, "name": "details", "comment": null}, "email": {"type": "STRING", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "INT64", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "TIMESTAMP", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "STRING", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "INT64", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "BOOL", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "STRING", "index": 15, "name": "name", "comment": null}, "notes": {"type": "INT64", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "BOOL", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "INT64", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "INT64", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "INT64", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "BOOL", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "STRING", "index": 22, "name": "role", "comment": null}, "shared": {"type": "BOOL", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "BOOL", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "INT64", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "BOOL", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "STRING", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "STRING", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "BOOL", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 31, "name": "url", "comment": null}, "verified": {"type": "BOOL", "index": 32, "name": "verified", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 2152, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_data"}, "seed.zendesk_integration_tests.user_tag_data": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "user_tag_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"tag": {"type": "STRING", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "INT64", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 500, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_tag_data"}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "INT64", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "TIMESTAMP", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "INT64", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "INT64", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "INT64", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "INT64", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "INT64", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "INT64", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "INT64", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "INT64", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "INT64", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "BOOL", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "INT64", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "INT64", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "TIMESTAMP", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours"}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "TIMESTAMP", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "STRING", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "INT64", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "TIMESTAMP", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "BOOL", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "INT64", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "INT64", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "INT64", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "BOOL", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "INT64", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "TIMESTAMP", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours"}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "TIMESTAMP", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "STRING", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "INT64", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "TIMESTAMP", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "BOOL", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses"}, "model.zendesk.int_zendesk__assignee_updates": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__assignee_updates", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "assignee_id": {"type": "INT64", "index": 2, "name": "assignee_id", "comment": null}, "last_updated": {"type": "TIMESTAMP", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "INT64", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 224, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__assignee_updates"}, "model.zendesk.int_zendesk__comment_metrics": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__comment_metrics", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "last_comment_added_at": {"type": "TIMESTAMP", "index": 2, "name": "last_comment_added_at", "comment": null}, "count_public_agent_comments": {"type": "INT64", "index": 3, "name": "count_public_agent_comments", "comment": null}, "count_agent_comments": {"type": "INT64", "index": 4, "name": "count_agent_comments", "comment": null}, "count_end_user_comments": {"type": "INT64", "index": 5, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "INT64", "index": 6, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "INT64", "index": 7, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "INT64", "index": 8, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "INT64", "index": 9, "name": "count_ticket_handoffs", "comment": null}, "count_agent_replies": {"type": "INT64", "index": 10, "name": "count_agent_replies", "comment": null}, "is_one_touch_resolution": {"type": "BOOL", "index": 11, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "BOOL", "index": 12, "name": "is_two_touch_resolution", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__comment_metrics"}, "model.zendesk.int_zendesk__field_calendar_spine": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"date_day": {"type": "DATE", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "STRING", "index": 3, "name": "ticket_day_id", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 280700, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "partitioning_type": {"id": "partitioning_type", "label": "Partitioned By", "value": "date_day", "include": true, "description": "The partitioning column for this table"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 5614, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_calendar_spine"}, "model.zendesk.int_zendesk__field_history_pivot": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__field_history_pivot", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "date_day": {"type": "DATE", "index": 2, "name": "date_day", "comment": null}, "status": {"type": "STRING", "index": 3, "name": "status", "comment": null}, "assignee_id": {"type": "STRING", "index": 4, "name": "assignee_id", "comment": null}, "priority": {"type": "STRING", "index": 5, "name": "priority", "comment": null}, "ticket_day_id": {"type": "STRING", "index": 6, "name": "ticket_day_id", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 984, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "partitioning_type": {"id": "partitioning_type", "label": "Partitioned By", "value": "date_day", "include": true, "description": "The partitioning column for this table"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 17, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_pivot"}, "model.zendesk.int_zendesk__field_history_scd": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__field_history_scd", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"valid_from": {"type": "DATE", "index": 1, "name": "valid_from", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "STRING", "index": 3, "name": "ticket_day_id", "comment": null}, "status": {"type": "STRING", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "STRING", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "STRING", "index": 6, "name": "priority", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 984, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 17, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_scd"}, "model.zendesk.int_zendesk__latest_ticket_form": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_form_id": {"type": "INT64", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "STRING", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "BOOL", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "STRING", "index": 6, "name": "name", "comment": null}, "latest_form_index": {"type": "INT64", "index": 7, "name": "latest_form_index", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 303, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 3, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__latest_ticket_form"}, "model.zendesk.int_zendesk__organization_aggregates": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__organization_aggregates", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"organization_id": {"type": "INT64", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "INT64", "index": 4, "name": "details", "comment": null}, "name": {"type": "STRING", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "INT64", "index": 6, "name": "external_id", "comment": null}, "organization_tags": {"type": "STRING", "index": 7, "name": "organization_tags", "comment": null}, "domain_names": {"type": "STRING", "index": 8, "name": "domain_names", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 311, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__organization_aggregates"}, "model.zendesk.int_zendesk__reply_time_business_hours": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "STRING", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "TIMESTAMP", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 5, "name": "sla_applied_at", "comment": null}, "sla_schedule_start_at": {"type": "TIMESTAMP", "index": 6, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "TIMESTAMP", "index": 7, "name": "sla_schedule_end_at", "comment": null}, "target": {"type": "INT64", "index": 8, "name": "target", "comment": null}, "sum_lapsed_business_minutes": {"type": "INT64", "index": 9, "name": "sum_lapsed_business_minutes", "comment": null}, "in_business_hours": {"type": "BOOL", "index": 10, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "TIMESTAMP", "index": 11, "name": "sla_breach_at", "comment": null}, "is_breached_during_schedule": {"type": "BOOL", "index": 12, "name": "is_breached_during_schedule", "comment": null}, "total_schedule_weekly_business_minutes": {"type": "INT64", "index": 13, "name": "total_schedule_weekly_business_minutes", "comment": null}, "sla_breach_exact_time": {"type": "TIMESTAMP", "index": 14, "name": "sla_breach_exact_time", "comment": null}, "week_number": {"type": "INT64", "index": 15, "name": "week_number", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours"}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "TIMESTAMP", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "STRING", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "STRING", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "INT64", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "INT64", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "BOOL", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 10, "name": "sla_policy_name", "comment": null}, "sla_breach_at": {"type": "TIMESTAMP", "index": 11, "name": "sla_breach_at", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours"}, "model.zendesk.int_zendesk__reply_time_combined": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__reply_time_combined", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "STRING", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "TIMESTAMP", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 5, "name": "sla_applied_at", "comment": null}, "sum_lapsed_business_minutes": {"type": "NUMERIC", "index": 6, "name": "sum_lapsed_business_minutes", "comment": null}, "target": {"type": "INT64", "index": 7, "name": "target", "comment": null}, "in_business_hours": {"type": "BOOL", "index": 8, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "TIMESTAMP", "index": 9, "name": "sla_breach_at", "comment": null}, "week_number": {"type": "NUMERIC", "index": 10, "name": "week_number", "comment": null}, "sla_schedule_start_at": {"type": "TIMESTAMP", "index": 11, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "TIMESTAMP", "index": 12, "name": "sla_schedule_end_at", "comment": null}, "agent_reply_at": {"type": "TIMESTAMP", "index": 13, "name": "agent_reply_at", "comment": null}, "next_solved_at": {"type": "TIMESTAMP", "index": 14, "name": "next_solved_at", "comment": null}, "day_index": {"type": "INT64", "index": 15, "name": "day_index", "comment": null}, "next_schedule_start": {"type": "TIMESTAMP", "index": 16, "name": "next_schedule_start", "comment": null}, "first_sla_breach_at": {"type": "TIMESTAMP", "index": 17, "name": "first_sla_breach_at", "comment": null}, "sum_lapsed_business_minutes_new": {"type": "NUMERIC", "index": 18, "name": "sum_lapsed_business_minutes_new", "comment": null}, "total_runtime_minutes": {"type": "FLOAT64", "index": 19, "name": "total_runtime_minutes", "comment": null}, "current_time_check": {"type": "TIMESTAMP", "index": 20, "name": "current_time_check", "comment": null}, "updated_sla_policy_starts_at": {"type": "TIMESTAMP", "index": 21, "name": "updated_sla_policy_starts_at", "comment": null}, "is_stale_sla_policy": {"type": "BOOL", "index": 22, "name": "is_stale_sla_policy", "comment": null}, "is_sla_breached": {"type": "BOOL", "index": 23, "name": "is_sla_breached", "comment": null}, "total_new_minutes": {"type": "FLOAT64", "index": 24, "name": "total_new_minutes", "comment": null}, "sla_update_at": {"type": "TIMESTAMP", "index": 25, "name": "sla_update_at", "comment": null}, "sla_elapsed_time": {"type": "FLOAT64", "index": 26, "name": "sla_elapsed_time", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_combined"}, "model.zendesk.int_zendesk__requester_updates": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__requester_updates", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "requester_id": {"type": "INT64", "index": 2, "name": "requester_id", "comment": null}, "last_updated": {"type": "TIMESTAMP", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "INT64", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 240, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_updates"}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "INT64", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "TIMESTAMP", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "INT64", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "INT64", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "INT64", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "INT64", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "INT64", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "INT64", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "INT64", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "INT64", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "INT64", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "BOOL", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "INT64", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "INT64", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "TIMESTAMP", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours"}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "TIMESTAMP", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "STRING", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "INT64", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "TIMESTAMP", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "BOOL", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "INT64", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "INT64", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "INT64", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "BOOL", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "INT64", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "TIMESTAMP", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "TIMESTAMP", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "STRING", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "INT64", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "TIMESTAMP", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "BOOL", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"}, "model.zendesk.int_zendesk__schedule_spine": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__schedule_spine", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"schedule_id": {"type": "STRING", "index": 1, "name": "schedule_id", "comment": null}, "valid_from": {"type": "TIMESTAMP", "index": 2, "name": "valid_from", "comment": null}, "valid_until": {"type": "TIMESTAMP", "index": 3, "name": "valid_until", "comment": null}, "start_time_utc": {"type": "INT64", "index": 4, "name": "start_time_utc", "comment": null}, "end_time_utc": {"type": "INT64", "index": 5, "name": "end_time_utc", "comment": null}, "is_holiday_week": {"type": "BOOL", "index": 6, "name": "is_holiday_week", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 155, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 5, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_spine"}, "model.zendesk.int_zendesk__sla_policy_applied": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "TIMESTAMP", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "STRING", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "STRING", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "INT64", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "INT64", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "BOOL", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 10, "name": "sla_policy_name", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__sla_policy_applied"}, "model.zendesk.int_zendesk__ticket_aggregates": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "assignee_id": {"type": "INT64", "index": 3, "name": "assignee_id", "comment": null}, "brand_id": {"type": "INT64", "index": 4, "name": "brand_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 6, "name": "updated_at", "comment": null}, "description": {"type": "STRING", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "TIMESTAMP", "index": 8, "name": "due_at", "comment": null}, "group_id": {"type": "INT64", "index": 9, "name": "group_id", "comment": null}, "external_id": {"type": "INT64", "index": 10, "name": "external_id", "comment": null}, "is_public": {"type": "BOOL", "index": 11, "name": "is_public", "comment": null}, "organization_id": {"type": "INT64", "index": 12, "name": "organization_id", "comment": null}, "priority": {"type": "INT64", "index": 13, "name": "priority", "comment": null}, "recipient": {"type": "STRING", "index": 14, "name": "recipient", "comment": null}, "requester_id": {"type": "INT64", "index": 15, "name": "requester_id", "comment": null}, "status": {"type": "STRING", "index": 16, "name": "status", "comment": null}, "subject": {"type": "STRING", "index": 17, "name": "subject", "comment": null}, "problem_id": {"type": "INT64", "index": 18, "name": "problem_id", "comment": null}, "submitter_id": {"type": "INT64", "index": 19, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "INT64", "index": 20, "name": "ticket_form_id", "comment": null}, "type": {"type": "STRING", "index": 21, "name": "type", "comment": null}, "url": {"type": "STRING", "index": 22, "name": "url", "comment": null}, "created_channel": {"type": "STRING", "index": 23, "name": "created_channel", "comment": null}, "source_from_id": {"type": "INT64", "index": 24, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "INT64", "index": 25, "name": "source_from_title", "comment": null}, "source_rel": {"type": "INT64", "index": 26, "name": "source_rel", "comment": null}, "source_to_address": {"type": "STRING", "index": 27, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "STRING", "index": 28, "name": "source_to_name", "comment": null}, "is_incident": {"type": "BOOL", "index": 29, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "STRING", "index": 30, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "STRING", "index": 31, "name": "ticket_tags", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 2142, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_aggregates"}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "first_agent_assignment_date": {"type": "TIMESTAMP", "index": 2, "name": "first_agent_assignment_date", "comment": null}, "first_assignee_id": {"type": "STRING", "index": 3, "name": "first_assignee_id", "comment": null}, "last_agent_assignment_date": {"type": "TIMESTAMP", "index": 4, "name": "last_agent_assignment_date", "comment": null}, "last_assignee_id": {"type": "STRING", "index": 5, "name": "last_assignee_id", "comment": null}, "assignee_stations_count": {"type": "INT64", "index": 6, "name": "assignee_stations_count", "comment": null}, "unique_assignee_count": {"type": "INT64", "index": 7, "name": "unique_assignee_count", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "FLOAT64", "index": 8, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 52, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 1, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee"}, "model.zendesk.int_zendesk__ticket_historical_group": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "group_stations_count": {"type": "INT64", "index": 2, "name": "group_stations_count", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_group"}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "latest_satisfaction_reason": {"type": "STRING", "index": 2, "name": "latest_satisfaction_reason", "comment": null}, "latest_satisfaction_comment": {"type": "STRING", "index": 3, "name": "latest_satisfaction_comment", "comment": null}, "first_satisfaction_score": {"type": "STRING", "index": 4, "name": "first_satisfaction_score", "comment": null}, "latest_satisfaction_score": {"type": "STRING", "index": 5, "name": "latest_satisfaction_score", "comment": null}, "count_satisfaction_scores": {"type": "INT64", "index": 6, "name": "count_satisfaction_scores", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "BOOL", "index": 7, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "BOOL", "index": 8, "name": "is_bad_to_good_satisfaction_score", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction"}, "model.zendesk.int_zendesk__ticket_historical_status": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "TIMESTAMP", "index": 3, "name": "valid_ending_at", "comment": null}, "status_duration_calendar_minutes": {"type": "INT64", "index": 4, "name": "status_duration_calendar_minutes", "comment": null}, "status": {"type": "STRING", "index": 5, "name": "status", "comment": null}, "ticket_status_counter": {"type": "INT64", "index": 6, "name": "ticket_status_counter", "comment": null}, "unique_status_counter": {"type": "INT64", "index": 7, "name": "unique_status_counter", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 888, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 18, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_status"}, "model.zendesk.int_zendesk__ticket_schedules": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_schedules", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "schedule_id": {"type": "STRING", "index": 2, "name": "schedule_id", "comment": null}, "schedule_created_at": {"type": "TIMESTAMP", "index": 3, "name": "schedule_created_at", "comment": null}, "schedule_invalidated_at": {"type": "TIMESTAMP", "index": 4, "name": "schedule_invalidated_at", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 680, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 20, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_schedules"}, "model.zendesk.int_zendesk__updates": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__updates", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "STRING", "index": 2, "name": "field_name", "comment": null}, "value": {"type": "STRING", "index": 3, "name": "value", "comment": null}, "is_public": {"type": "BOOL", "index": 4, "name": "is_public", "comment": null}, "user_id": {"type": "INT64", "index": 5, "name": "user_id", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 6, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "TIMESTAMP", "index": 7, "name": "valid_ending_at", "comment": null}, "ticket_created_date": {"type": "TIMESTAMP", "index": 8, "name": "ticket_created_date", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1612, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 40, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__updates"}, "model.zendesk.int_zendesk__user_aggregates": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__user_aggregates", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"user_id": {"type": "INT64", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "INT64", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "last_login_at": {"type": "TIMESTAMP", "index": 4, "name": "last_login_at", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 6, "name": "updated_at", "comment": null}, "email": {"type": "STRING", "index": 7, "name": "email", "comment": null}, "name": {"type": "STRING", "index": 8, "name": "name", "comment": null}, "organization_id": {"type": "INT64", "index": 9, "name": "organization_id", "comment": null}, "phone": {"type": "INT64", "index": 10, "name": "phone", "comment": null}, "role": {"type": "STRING", "index": 11, "name": "role", "comment": null}, "ticket_restriction": {"type": "STRING", "index": 12, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "STRING", "index": 13, "name": "time_zone", "comment": null}, "locale": {"type": "STRING", "index": 14, "name": "locale", "comment": null}, "is_active": {"type": "BOOL", "index": 15, "name": "is_active", "comment": null}, "is_suspended": {"type": "BOOL", "index": 16, "name": "is_suspended", "comment": null}, "user_tags": {"type": "STRING", "index": 17, "name": "user_tags", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1342, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__user_aggregates"}, "model.zendesk_source.stg_zendesk__brand": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__brand", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"brand_id": {"type": "INT64", "index": 1, "name": "brand_id", "comment": null}, "brand_url": {"type": "STRING", "index": 2, "name": "brand_url", "comment": null}, "name": {"type": "STRING", "index": 3, "name": "name", "comment": null}, "subdomain": {"type": "STRING", "index": 4, "name": "subdomain", "comment": null}, "is_active": {"type": "BOOL", "index": 5, "name": "is_active", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 111, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 1, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand"}, "model.zendesk_source.stg_zendesk__brand_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__brand_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "BOOL", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "STRING", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "BOOL", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "BOOL", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "STRING", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "STRING", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "STRING", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "BOOL", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "STRING", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "INT64", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "INT64", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "BOOL", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "STRING", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "INT64", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "STRING", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "INT64", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "STRING", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "STRING", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "STRING", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp"}, "model.zendesk_source.stg_zendesk__daylight_time": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__daylight_time", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"daylight_end_utc": {"type": "DATETIME", "index": 1, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "INT64", "index": 2, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "DATETIME", "index": 3, "name": "daylight_start_utc", "comment": null}, "time_zone": {"type": "STRING", "index": 4, "name": "time_zone", "comment": null}, "year": {"type": "INT64", "index": 5, "name": "year", "comment": null}, "daylight_offset_minutes": {"type": "INT64", "index": 6, "name": "daylight_offset_minutes", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 99, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 2, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time"}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"time_zone": {"type": "STRING", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "INT64", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "DATETIME", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "INT64", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "DATETIME", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp"}, "model.zendesk_source.stg_zendesk__domain_name": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__domain_name", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"organization_id": {"type": "INT64", "index": 1, "name": "organization_id", "comment": null}, "domain_name": {"type": "STRING", "index": 2, "name": "domain_name", "comment": null}, "index": {"type": "INT64", "index": 3, "name": "index", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 500, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name"}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"index": {"type": "INT64", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "INT64", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "STRING", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp"}, "model.zendesk_source.stg_zendesk__group": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__group", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"group_id": {"type": "INT64", "index": 1, "name": "group_id", "comment": null}, "name": {"type": "STRING", "index": 2, "name": "name", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 255, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 8, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group"}, "model.zendesk_source.stg_zendesk__group_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__group_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "STRING", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group_tmp"}, "model.zendesk_source.stg_zendesk__organization": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__organization", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"organization_id": {"type": "INT64", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "INT64", "index": 4, "name": "details", "comment": null}, "name": {"type": "STRING", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "INT64", "index": 6, "name": "external_id", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 311, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization"}, "model.zendesk_source.stg_zendesk__organization_tag": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__organization_tag", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"organization_id": {"type": "INT64", "index": 1, "name": "organization_id", "comment": null}, "tags": {"type": "STRING", "index": 2, "name": "tags", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 504, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 12, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag"}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"organization_id": {"type": "INT64", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "STRING", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp"}, "model.zendesk_source.stg_zendesk__organization_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__organization_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "INT64", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "INT64", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "INT64", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "STRING", "index": 7, "name": "name", "comment": null}, "notes": {"type": "INT64", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "BOOL", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "BOOL", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp"}, "model.zendesk_source.stg_zendesk__schedule": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__schedule", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"schedule_id": {"type": "STRING", "index": 1, "name": "schedule_id", "comment": null}, "end_time": {"type": "INT64", "index": 2, "name": "end_time", "comment": null}, "start_time": {"type": "INT64", "index": 3, "name": "start_time", "comment": null}, "schedule_name": {"type": "STRING", "index": 4, "name": "schedule_name", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 5, "name": "created_at", "comment": null}, "time_zone": {"type": "STRING", "index": 6, "name": "time_zone", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 385, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 5, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule"}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"_fivetran_deleted": {"type": "BOOL", "index": 1, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "holiday_end_date_at": {"type": "TIMESTAMP", "index": 3, "name": "holiday_end_date_at", "comment": null}, "holiday_id": {"type": "STRING", "index": 4, "name": "holiday_id", "comment": null}, "holiday_name": {"type": "STRING", "index": 5, "name": "holiday_name", "comment": null}, "schedule_id": {"type": "STRING", "index": 6, "name": "schedule_id", "comment": null}, "holiday_start_date_at": {"type": "TIMESTAMP", "index": 7, "name": "holiday_start_date_at", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 114, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 2, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday"}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "INT64", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "DATE", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "STRING", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "DATE", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp"}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"end_time": {"type": "INT64", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "INT64", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "INT64", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "INT64", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "STRING", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "INT64", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "STRING", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "assignee_id": {"type": "INT64", "index": 3, "name": "assignee_id", "comment": null}, "brand_id": {"type": "INT64", "index": 4, "name": "brand_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 6, "name": "updated_at", "comment": null}, "description": {"type": "STRING", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "TIMESTAMP", "index": 8, "name": "due_at", "comment": null}, "group_id": {"type": "INT64", "index": 9, "name": "group_id", "comment": null}, "external_id": {"type": "INT64", "index": 10, "name": "external_id", "comment": null}, "is_public": {"type": "BOOL", "index": 11, "name": "is_public", "comment": null}, "organization_id": {"type": "INT64", "index": 12, "name": "organization_id", "comment": null}, "priority": {"type": "INT64", "index": 13, "name": "priority", "comment": null}, "recipient": {"type": "STRING", "index": 14, "name": "recipient", "comment": null}, "requester_id": {"type": "INT64", "index": 15, "name": "requester_id", "comment": null}, "status": {"type": "STRING", "index": 16, "name": "status", "comment": null}, "subject": {"type": "STRING", "index": 17, "name": "subject", "comment": null}, "problem_id": {"type": "INT64", "index": 18, "name": "problem_id", "comment": null}, "submitter_id": {"type": "INT64", "index": 19, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "INT64", "index": 20, "name": "ticket_form_id", "comment": null}, "type": {"type": "STRING", "index": 21, "name": "type", "comment": null}, "url": {"type": "STRING", "index": 22, "name": "url", "comment": null}, "created_channel": {"type": "STRING", "index": 23, "name": "created_channel", "comment": null}, "source_from_id": {"type": "INT64", "index": 24, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "INT64", "index": 25, "name": "source_from_title", "comment": null}, "source_rel": {"type": "INT64", "index": 26, "name": "source_rel", "comment": null}, "source_to_address": {"type": "STRING", "index": 27, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "STRING", "index": 28, "name": "source_to_name", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 2132, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket"}, "model.zendesk_source.stg_zendesk__ticket_comment": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_comment", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_comment_id": {"type": "INT64", "index": 1, "name": "ticket_comment_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "STRING", "index": 3, "name": "body", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 4, "name": "created_at", "comment": null}, "is_public": {"type": "BOOL", "index": 5, "name": "is_public", "comment": null}, "ticket_id": {"type": "INT64", "index": 6, "name": "ticket_id", "comment": null}, "user_id": {"type": "INT64", "index": 7, "name": "user_id", "comment": null}, "is_facebook_comment": {"type": "BOOL", "index": 8, "name": "is_facebook_comment", "comment": null}, "is_tweet": {"type": "BOOL", "index": 9, "name": "is_tweet", "comment": null}, "is_voice_comment": {"type": "BOOL", "index": 10, "name": "is_voice_comment", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1031, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 20, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment"}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "STRING", "index": 3, "name": "body", "comment": null}, "created": {"type": "TIMESTAMP", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "BOOL", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "BOOL", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "INT64", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "BOOL", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "INT64", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "BOOL", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp"}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "STRING", "index": 2, "name": "field_name", "comment": null}, "valid_starting_at": {"type": "TIMESTAMP", "index": 3, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "TIMESTAMP", "index": 4, "name": "valid_ending_at", "comment": null}, "value": {"type": "STRING", "index": 5, "name": "value", "comment": null}, "user_id": {"type": "INT64", "index": 6, "name": "user_id", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 669, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 20, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history"}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"field_name": {"type": "STRING", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "TIMESTAMP", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "INT64", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "STRING", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_form_id": {"type": "INT64", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "STRING", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "BOOL", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "STRING", "index": 6, "name": "name", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1395, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 15, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history"}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "DATETIME", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "BOOL", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "DATETIME", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "STRING", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "BOOL", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "STRING", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 2, "name": "created_at", "comment": null}, "schedule_id": {"type": "STRING", "index": 3, "name": "schedule_id", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 220, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule"}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"created_at": {"type": "TIMESTAMP", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "INT64", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tag": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_tag", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "tags": {"type": "STRING", "index": 2, "name": "tags", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 181, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag"}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"tag": {"type": "STRING", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "BOOL", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "INT64", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "INT64", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "STRING", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "TIMESTAMP", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "INT64", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "INT64", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "INT64", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "BOOL", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "BOOL", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "INT64", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "INT64", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "INT64", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "STRING", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "INT64", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "STRING", "index": 19, "name": "status", "comment": null}, "subject": {"type": "STRING", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "INT64", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "INT64", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "INT64", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "STRING", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "STRING", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "INT64", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "INT64", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "INT64", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "STRING", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "STRING", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "STRING", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "INT64", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "STRING", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "INT64", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp"}, "model.zendesk_source.stg_zendesk__time_zone": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__time_zone", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"standard_offset": {"type": "STRING", "index": 1, "name": "standard_offset", "comment": null}, "time_zone": {"type": "STRING", "index": 2, "name": "time_zone", "comment": null}, "standard_offset_minutes": {"type": "INT64", "index": 3, "name": "standard_offset_minutes", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 48, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 2, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone"}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"time_zone": {"type": "STRING", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "STRING", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp"}, "model.zendesk_source.stg_zendesk__user": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__user", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"user_id": {"type": "INT64", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "INT64", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "last_login_at": {"type": "TIMESTAMP", "index": 4, "name": "last_login_at", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 6, "name": "updated_at", "comment": null}, "email": {"type": "STRING", "index": 7, "name": "email", "comment": null}, "name": {"type": "STRING", "index": 8, "name": "name", "comment": null}, "organization_id": {"type": "INT64", "index": 9, "name": "organization_id", "comment": null}, "phone": {"type": "INT64", "index": 10, "name": "phone", "comment": null}, "role": {"type": "STRING", "index": 11, "name": "role", "comment": null}, "ticket_restriction": {"type": "STRING", "index": 12, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "STRING", "index": 13, "name": "time_zone", "comment": null}, "locale": {"type": "STRING", "index": 14, "name": "locale", "comment": null}, "is_active": {"type": "BOOL", "index": 15, "name": "is_active", "comment": null}, "is_suspended": {"type": "BOOL", "index": 16, "name": "is_suspended", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1342, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user"}, "model.zendesk_source.stg_zendesk__user_tag": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__user_tag", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"user_id": {"type": "INT64", "index": 1, "name": "user_id", "comment": null}, "tags": {"type": "STRING", "index": 2, "name": "tags", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 420, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag"}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"tag": {"type": "STRING", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "INT64", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp"}, "model.zendesk_source.stg_zendesk__user_tmp": {"metadata": {"type": "view", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__user_tmp", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "BOOL", "index": 3, "name": "active", "comment": null}, "alias": {"type": "INT64", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "INT64", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "BOOL", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "INT64", "index": 8, "name": "details", "comment": null}, "email": {"type": "STRING", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "INT64", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "TIMESTAMP", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "STRING", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "INT64", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "BOOL", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "STRING", "index": 15, "name": "name", "comment": null}, "notes": {"type": "INT64", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "BOOL", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "INT64", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "INT64", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "INT64", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "BOOL", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "STRING", "index": 22, "name": "role", "comment": null}, "shared": {"type": "BOOL", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "BOOL", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "INT64", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "BOOL", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "STRING", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "STRING", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "BOOL", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 31, "name": "url", "comment": null}, "verified": {"type": "BOOL", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tmp"}, "model.zendesk.zendesk__sla_policies": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__sla_policies", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"sla_event_id": {"type": "STRING", "index": 1, "name": "sla_event_id", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "STRING", "index": 3, "name": "sla_policy_name", "comment": null}, "metric": {"type": "STRING", "index": 4, "name": "metric", "comment": null}, "sla_applied_at": {"type": "TIMESTAMP", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "INT64", "index": 6, "name": "target", "comment": null}, "in_business_hours": {"type": "BOOL", "index": 7, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "TIMESTAMP", "index": 8, "name": "sla_breach_at", "comment": null}, "sla_elapsed_time": {"type": "FLOAT64", "index": 9, "name": "sla_elapsed_time", "comment": null}, "is_active_sla": {"type": "BOOL", "index": 10, "name": "is_active_sla", "comment": null}, "is_sla_breach": {"type": "BOOL", "index": 11, "name": "is_sla_breach", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__sla_policies"}, "model.zendesk.zendesk__ticket_backlog": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__ticket_backlog", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"date_day": {"type": "DATE", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "status": {"type": "STRING", "index": 3, "name": "status", "comment": null}, "created_channel": {"type": "STRING", "index": 4, "name": "created_channel", "comment": null}, "assignee_name": {"type": "STRING", "index": 5, "name": "assignee_name", "comment": null}, "priority": {"type": "STRING", "index": 6, "name": "priority", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_backlog"}, "model.zendesk.zendesk__ticket_enriched": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__ticket_enriched", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "assignee_id": {"type": "INT64", "index": 3, "name": "assignee_id", "comment": null}, "brand_id": {"type": "INT64", "index": 4, "name": "brand_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 6, "name": "updated_at", "comment": null}, "description": {"type": "STRING", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "TIMESTAMP", "index": 8, "name": "due_at", "comment": null}, "group_id": {"type": "INT64", "index": 9, "name": "group_id", "comment": null}, "external_id": {"type": "INT64", "index": 10, "name": "external_id", "comment": null}, "is_public": {"type": "BOOL", "index": 11, "name": "is_public", "comment": null}, "organization_id": {"type": "INT64", "index": 12, "name": "organization_id", "comment": null}, "priority": {"type": "INT64", "index": 13, "name": "priority", "comment": null}, "recipient": {"type": "STRING", "index": 14, "name": "recipient", "comment": null}, "requester_id": {"type": "INT64", "index": 15, "name": "requester_id", "comment": null}, "status": {"type": "STRING", "index": 16, "name": "status", "comment": null}, "subject": {"type": "STRING", "index": 17, "name": "subject", "comment": null}, "problem_id": {"type": "INT64", "index": 18, "name": "problem_id", "comment": null}, "submitter_id": {"type": "INT64", "index": 19, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "INT64", "index": 20, "name": "ticket_form_id", "comment": null}, "type": {"type": "STRING", "index": 21, "name": "type", "comment": null}, "url": {"type": "STRING", "index": 22, "name": "url", "comment": null}, "created_channel": {"type": "STRING", "index": 23, "name": "created_channel", "comment": null}, "source_from_id": {"type": "INT64", "index": 24, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "INT64", "index": 25, "name": "source_from_title", "comment": null}, "source_rel": {"type": "INT64", "index": 26, "name": "source_rel", "comment": null}, "source_to_address": {"type": "STRING", "index": 27, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "STRING", "index": 28, "name": "source_to_name", "comment": null}, "is_incident": {"type": "BOOL", "index": 29, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "STRING", "index": 30, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "STRING", "index": 31, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "STRING", "index": 32, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "INT64", "index": 33, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "STRING", "index": 34, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "STRING", "index": 35, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "STRING", "index": 36, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "STRING", "index": 37, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "BOOL", "index": 38, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "BOOL", "index": 39, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "STRING", "index": 40, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "STRING", "index": 41, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "INT64", "index": 42, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "TIMESTAMP", "index": 43, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "TIMESTAMP", "index": 44, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "STRING", "index": 45, "name": "requester_role", "comment": null}, "requester_email": {"type": "STRING", "index": 46, "name": "requester_email", "comment": null}, "requester_name": {"type": "STRING", "index": 47, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "BOOL", "index": 48, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "STRING", "index": 49, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "STRING", "index": 50, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "INT64", "index": 51, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "TIMESTAMP", "index": 52, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "TIMESTAMP", "index": 53, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "INT64", "index": 54, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "STRING", "index": 55, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "STRING", "index": 56, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "INT64", "index": 57, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "TIMESTAMP", "index": 58, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "TIMESTAMP", "index": 59, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "INT64", "index": 60, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "STRING", "index": 61, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "BOOL", "index": 62, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "STRING", "index": 63, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "STRING", "index": 64, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "BOOL", "index": 65, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "STRING", "index": 66, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "STRING", "index": 67, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "INT64", "index": 68, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "STRING", "index": 69, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "STRING", "index": 70, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "STRING", "index": 71, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "BOOL", "index": 72, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "STRING", "index": 73, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "STRING", "index": 74, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "INT64", "index": 75, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "TIMESTAMP", "index": 76, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "TIMESTAMP", "index": 77, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "STRING", "index": 78, "name": "group_name", "comment": null}, "organization_name": {"type": "STRING", "index": 79, "name": "organization_name", "comment": null}, "requester_tag": {"type": "STRING", "index": 80, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "STRING", "index": 81, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "STRING", "index": 82, "name": "assignee_tag", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_enriched"}, "model.zendesk.zendesk__ticket_field_history": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__ticket_field_history", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_day_id": {"type": "STRING", "index": 1, "name": "ticket_day_id", "comment": null}, "date_day": {"type": "DATE", "index": 2, "name": "date_day", "comment": null}, "ticket_id": {"type": "INT64", "index": 3, "name": "ticket_id", "comment": null}, "status": {"type": "STRING", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "STRING", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "STRING", "index": 6, "name": "priority", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 291508, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "partitioning_type": {"id": "partitioning_type", "label": "Partitioned By", "value": "date_day", "include": true, "description": "The partitioning column for this table"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 5614, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_field_history"}, "model.zendesk.zendesk__ticket_metrics": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__ticket_metrics", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"ticket_id": {"type": "INT64", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "assignee_id": {"type": "INT64", "index": 3, "name": "assignee_id", "comment": null}, "brand_id": {"type": "INT64", "index": 4, "name": "brand_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 6, "name": "updated_at", "comment": null}, "description": {"type": "STRING", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "TIMESTAMP", "index": 8, "name": "due_at", "comment": null}, "group_id": {"type": "INT64", "index": 9, "name": "group_id", "comment": null}, "external_id": {"type": "INT64", "index": 10, "name": "external_id", "comment": null}, "is_public": {"type": "BOOL", "index": 11, "name": "is_public", "comment": null}, "organization_id": {"type": "INT64", "index": 12, "name": "organization_id", "comment": null}, "priority": {"type": "INT64", "index": 13, "name": "priority", "comment": null}, "recipient": {"type": "STRING", "index": 14, "name": "recipient", "comment": null}, "requester_id": {"type": "INT64", "index": 15, "name": "requester_id", "comment": null}, "status": {"type": "STRING", "index": 16, "name": "status", "comment": null}, "subject": {"type": "STRING", "index": 17, "name": "subject", "comment": null}, "problem_id": {"type": "INT64", "index": 18, "name": "problem_id", "comment": null}, "submitter_id": {"type": "INT64", "index": 19, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "INT64", "index": 20, "name": "ticket_form_id", "comment": null}, "type": {"type": "STRING", "index": 21, "name": "type", "comment": null}, "url": {"type": "STRING", "index": 22, "name": "url", "comment": null}, "created_channel": {"type": "STRING", "index": 23, "name": "created_channel", "comment": null}, "source_from_id": {"type": "INT64", "index": 24, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "INT64", "index": 25, "name": "source_from_title", "comment": null}, "source_rel": {"type": "INT64", "index": 26, "name": "source_rel", "comment": null}, "source_to_address": {"type": "STRING", "index": 27, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "STRING", "index": 28, "name": "source_to_name", "comment": null}, "is_incident": {"type": "BOOL", "index": 29, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "STRING", "index": 30, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "STRING", "index": 31, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "STRING", "index": 32, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "INT64", "index": 33, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "STRING", "index": 34, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "STRING", "index": 35, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "STRING", "index": 36, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "STRING", "index": 37, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "BOOL", "index": 38, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "BOOL", "index": 39, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "STRING", "index": 40, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "STRING", "index": 41, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "INT64", "index": 42, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "TIMESTAMP", "index": 43, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "TIMESTAMP", "index": 44, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "STRING", "index": 45, "name": "requester_role", "comment": null}, "requester_email": {"type": "STRING", "index": 46, "name": "requester_email", "comment": null}, "requester_name": {"type": "STRING", "index": 47, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "BOOL", "index": 48, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "STRING", "index": 49, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "STRING", "index": 50, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "INT64", "index": 51, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "TIMESTAMP", "index": 52, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "TIMESTAMP", "index": 53, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "INT64", "index": 54, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "STRING", "index": 55, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "STRING", "index": 56, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "INT64", "index": 57, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "TIMESTAMP", "index": 58, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "TIMESTAMP", "index": 59, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "INT64", "index": 60, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "STRING", "index": 61, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "BOOL", "index": 62, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "STRING", "index": 63, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "STRING", "index": 64, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "BOOL", "index": 65, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "STRING", "index": 66, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "STRING", "index": 67, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "INT64", "index": 68, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "STRING", "index": 69, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "STRING", "index": 70, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "STRING", "index": 71, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "BOOL", "index": 72, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "STRING", "index": 73, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "STRING", "index": 74, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "INT64", "index": 75, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "TIMESTAMP", "index": 76, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "TIMESTAMP", "index": 77, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "STRING", "index": 78, "name": "group_name", "comment": null}, "organization_name": {"type": "STRING", "index": 79, "name": "organization_name", "comment": null}, "requester_tag": {"type": "STRING", "index": 80, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "STRING", "index": 81, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "STRING", "index": 82, "name": "assignee_tag", "comment": null}, "first_reply_time_calendar_minutes": {"type": "FLOAT64", "index": 83, "name": "first_reply_time_calendar_minutes", "comment": null}, "total_reply_time_calendar_minutes": {"type": "FLOAT64", "index": 84, "name": "total_reply_time_calendar_minutes", "comment": null}, "count_agent_comments": {"type": "INT64", "index": 85, "name": "count_agent_comments", "comment": null}, "count_public_agent_comments": {"type": "INT64", "index": 86, "name": "count_public_agent_comments", "comment": null}, "count_end_user_comments": {"type": "INT64", "index": 87, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "INT64", "index": 88, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "INT64", "index": 89, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "INT64", "index": 90, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "INT64", "index": 91, "name": "count_ticket_handoffs", "comment": null}, "ticket_last_comment_date": {"type": "TIMESTAMP", "index": 92, "name": "ticket_last_comment_date", "comment": null}, "unique_assignee_count": {"type": "INT64", "index": 93, "name": "unique_assignee_count", "comment": null}, "assignee_stations_count": {"type": "INT64", "index": 94, "name": "assignee_stations_count", "comment": null}, "group_stations_count": {"type": "INT64", "index": 95, "name": "group_stations_count", "comment": null}, "first_assignee_id": {"type": "STRING", "index": 96, "name": "first_assignee_id", "comment": null}, "last_assignee_id": {"type": "STRING", "index": 97, "name": "last_assignee_id", "comment": null}, "first_agent_assignment_date": {"type": "TIMESTAMP", "index": 98, "name": "first_agent_assignment_date", "comment": null}, "last_agent_assignment_date": {"type": "TIMESTAMP", "index": 99, "name": "last_agent_assignment_date", "comment": null}, "first_solved_at": {"type": "TIMESTAMP", "index": 100, "name": "first_solved_at", "comment": null}, "last_solved_at": {"type": "TIMESTAMP", "index": 101, "name": "last_solved_at", "comment": null}, "first_assignment_to_resolution_calendar_minutes": {"type": "INT64", "index": 102, "name": "first_assignment_to_resolution_calendar_minutes", "comment": null}, "last_assignment_to_resolution_calendar_minutes": {"type": "INT64", "index": 103, "name": "last_assignment_to_resolution_calendar_minutes", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "FLOAT64", "index": 104, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}, "first_resolution_calendar_minutes": {"type": "INT64", "index": 105, "name": "first_resolution_calendar_minutes", "comment": null}, "final_resolution_calendar_minutes": {"type": "INT64", "index": 106, "name": "final_resolution_calendar_minutes", "comment": null}, "count_resolutions": {"type": "INT64", "index": 107, "name": "count_resolutions", "comment": null}, "count_reopens": {"type": "INT64", "index": 108, "name": "count_reopens", "comment": null}, "ticket_deleted_count": {"type": "INT64", "index": 109, "name": "ticket_deleted_count", "comment": null}, "total_ticket_recoveries": {"type": "INT64", "index": 110, "name": "total_ticket_recoveries", "comment": null}, "last_status_assignment_date": {"type": "TIMESTAMP", "index": 111, "name": "last_status_assignment_date", "comment": null}, "new_status_duration_in_calendar_minutes": {"type": "INT64", "index": 112, "name": "new_status_duration_in_calendar_minutes", "comment": null}, "open_status_duration_in_calendar_minutes": {"type": "INT64", "index": 113, "name": "open_status_duration_in_calendar_minutes", "comment": null}, "agent_wait_time_in_calendar_minutes": {"type": "INT64", "index": 114, "name": "agent_wait_time_in_calendar_minutes", "comment": null}, "requester_wait_time_in_calendar_minutes": {"type": "INT64", "index": 115, "name": "requester_wait_time_in_calendar_minutes", "comment": null}, "solve_time_in_calendar_minutes": {"type": "INT64", "index": 116, "name": "solve_time_in_calendar_minutes", "comment": null}, "agent_work_time_in_calendar_minutes": {"type": "INT64", "index": 117, "name": "agent_work_time_in_calendar_minutes", "comment": null}, "on_hold_time_in_calendar_minutes": {"type": "INT64", "index": 118, "name": "on_hold_time_in_calendar_minutes", "comment": null}, "total_agent_replies": {"type": "INT64", "index": 119, "name": "total_agent_replies", "comment": null}, "requester_last_login_age_minutes": {"type": "FLOAT64", "index": 120, "name": "requester_last_login_age_minutes", "comment": null}, "assignee_last_login_age_minutes": {"type": "FLOAT64", "index": 121, "name": "assignee_last_login_age_minutes", "comment": null}, "unsolved_ticket_age_minutes": {"type": "FLOAT64", "index": 122, "name": "unsolved_ticket_age_minutes", "comment": null}, "unsolved_ticket_age_since_update_minutes": {"type": "FLOAT64", "index": 123, "name": "unsolved_ticket_age_since_update_minutes", "comment": null}, "is_one_touch_resolution": {"type": "BOOL", "index": 124, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "BOOL", "index": 125, "name": "is_two_touch_resolution", "comment": null}, "is_multi_touch_resolution": {"type": "BOOL", "index": 126, "name": "is_multi_touch_resolution", "comment": null}, "first_resolution_business_minutes": {"type": "INT64", "index": 127, "name": "first_resolution_business_minutes", "comment": null}, "full_resolution_business_minutes": {"type": "INT64", "index": 128, "name": "full_resolution_business_minutes", "comment": null}, "first_reply_time_business_minutes": {"type": "INT64", "index": 129, "name": "first_reply_time_business_minutes", "comment": null}, "agent_wait_time_in_business_minutes": {"type": "INT64", "index": 130, "name": "agent_wait_time_in_business_minutes", "comment": null}, "requester_wait_time_in_business_minutes": {"type": "INT64", "index": 131, "name": "requester_wait_time_in_business_minutes", "comment": null}, "solve_time_in_business_minutes": {"type": "INT64", "index": 132, "name": "solve_time_in_business_minutes", "comment": null}, "agent_work_time_in_business_minutes": {"type": "INT64", "index": 133, "name": "agent_work_time_in_business_minutes", "comment": null}, "on_hold_time_in_business_minutes": {"type": "INT64", "index": 134, "name": "on_hold_time_in_business_minutes", "comment": null}, "new_status_duration_in_business_minutes": {"type": "INT64", "index": 135, "name": "new_status_duration_in_business_minutes", "comment": null}, "open_status_duration_in_business_minutes": {"type": "INT64", "index": 136, "name": "open_status_duration_in_business_minutes", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 0, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 0, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_metrics"}, "model.zendesk.zendesk__ticket_summary": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__ticket_summary", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"user_count": {"type": "INT64", "index": 1, "name": "user_count", "comment": null}, "active_agent_count": {"type": "INT64", "index": 2, "name": "active_agent_count", "comment": null}, "deleted_user_count": {"type": "INT64", "index": 3, "name": "deleted_user_count", "comment": null}, "end_user_count": {"type": "INT64", "index": 4, "name": "end_user_count", "comment": null}, "suspended_user_count": {"type": "INT64", "index": 5, "name": "suspended_user_count", "comment": null}, "new_ticket_count": {"type": "INT64", "index": 6, "name": "new_ticket_count", "comment": null}, "on_hold_ticket_count": {"type": "INT64", "index": 7, "name": "on_hold_ticket_count", "comment": null}, "open_ticket_count": {"type": "INT64", "index": 8, "name": "open_ticket_count", "comment": null}, "pending_ticket_count": {"type": "INT64", "index": 9, "name": "pending_ticket_count", "comment": null}, "solved_ticket_count": {"type": "INT64", "index": 10, "name": "solved_ticket_count", "comment": null}, "problem_ticket_count": {"type": "INT64", "index": 11, "name": "problem_ticket_count", "comment": null}, "assigned_ticket_count": {"type": "INT64", "index": 12, "name": "assigned_ticket_count", "comment": null}, "reassigned_ticket_count": {"type": "INT64", "index": 13, "name": "reassigned_ticket_count", "comment": null}, "reopened_ticket_count": {"type": "INT64", "index": 14, "name": "reopened_ticket_count", "comment": null}, "surveyed_satisfaction_ticket_count": {"type": "INT64", "index": 15, "name": "surveyed_satisfaction_ticket_count", "comment": null}, "unassigned_unsolved_ticket_count": {"type": "INT64", "index": 16, "name": "unassigned_unsolved_ticket_count", "comment": null}, "unreplied_ticket_count": {"type": "INT64", "index": 17, "name": "unreplied_ticket_count", "comment": null}, "unreplied_unsolved_ticket_count": {"type": "INT64", "index": 18, "name": "unreplied_unsolved_ticket_count", "comment": null}, "unsolved_ticket_count": {"type": "INT64", "index": 19, "name": "unsolved_ticket_count", "comment": null}, "recovered_ticket_count": {"type": "INT64", "index": 20, "name": "recovered_ticket_count", "comment": null}, "deleted_ticket_count": {"type": "INT64", "index": 21, "name": "deleted_ticket_count", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 40, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 1, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_summary"}}, "sources": {"source.zendesk_source.zendesk.brand": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "brand_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "BOOL", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "STRING", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "BOOL", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "BOOL", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "STRING", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "STRING", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "STRING", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "BOOL", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "STRING", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "INT64", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "INT64", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "BOOL", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "STRING", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "INT64", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "STRING", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "INT64", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "STRING", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "STRING", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "STRING", "index": 22, "name": "url", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 346, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 1, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.brand"}, "source.zendesk_source.zendesk.daylight_time": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "daylight_time_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"time_zone": {"type": "STRING", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "INT64", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "DATETIME", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "INT64", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "DATETIME", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 99, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 2, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.daylight_time"}, "source.zendesk_source.zendesk.domain_name": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "domain_name_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"index": {"type": "INT64", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "INT64", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "STRING", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 580, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.domain_name"}, "source.zendesk_source.zendesk.group": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "group_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "STRING", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 7, "name": "url", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 879, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 8, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.group"}, "source.zendesk_source.zendesk.organization": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "organization_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "INT64", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "INT64", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "INT64", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "STRING", "index": 7, "name": "name", "comment": null}, "notes": {"type": "INT64", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "BOOL", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "BOOL", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 12, "name": "url", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1011, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization"}, "source.zendesk_source.zendesk.organization_tag": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "organization_tag_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"organization_id": {"type": "INT64", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "STRING", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 600, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 12, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization_tag"}, "source.zendesk_source.zendesk.schedule": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "schedule_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"end_time": {"type": "INT64", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "INT64", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "INT64", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "INT64", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "STRING", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "INT64", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "STRING", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 10, "name": "created_at", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 480, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 5, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule"}, "source.zendesk_source.zendesk.schedule_holiday": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "schedule_holiday_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "INT64", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "DATE", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "STRING", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "DATE", "index": 7, "name": "start_date", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 112, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 2, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule_holiday"}, "source.zendesk_source.zendesk.ticket_comment": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_comment_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "STRING", "index": 3, "name": "body", "comment": null}, "created": {"type": "TIMESTAMP", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "BOOL", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "BOOL", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "INT64", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "BOOL", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "INT64", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "BOOL", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1031, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 20, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_comment"}, "source.zendesk_source.zendesk.ticket": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "BOOL", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "INT64", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "INT64", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "STRING", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "TIMESTAMP", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "INT64", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "INT64", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "INT64", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "BOOL", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "BOOL", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "INT64", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "INT64", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "INT64", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "STRING", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "INT64", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "STRING", "index": 19, "name": "status", "comment": null}, "subject": {"type": "STRING", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "INT64", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "INT64", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "INT64", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "STRING", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "STRING", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "INT64", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "INT64", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "INT64", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "STRING", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "STRING", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "STRING", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "INT64", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "STRING", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "INT64", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 2196, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket"}, "source.zendesk_source.zendesk.ticket_field_history": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_field_history_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"field_name": {"type": "STRING", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "TIMESTAMP", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "INT64", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "STRING", "index": 6, "name": "value", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 805, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 20, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_field_history"}, "source.zendesk_source.zendesk.ticket_form_history": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_form_history_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "DATETIME", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "BOOL", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "BOOL", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "DATETIME", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "STRING", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "BOOL", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "STRING", "index": 9, "name": "name", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 1545, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 15, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_form_history"}, "source.zendesk_source.zendesk.ticket_schedule": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_schedule_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"created_at": {"type": "TIMESTAMP", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "INT64", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 320, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_schedule"}, "source.zendesk_source.zendesk.ticket_tag": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "ticket_tag_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"tag": {"type": "STRING", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "INT64", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 261, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_tag"}, "source.zendesk_source.zendesk.time_zone": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "time_zone_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"time_zone": {"type": "STRING", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "STRING", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 48, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 2, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.time_zone"}, "source.zendesk_source.zendesk.user": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "user_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"id": {"type": "INT64", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "BOOL", "index": 3, "name": "active", "comment": null}, "alias": {"type": "INT64", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "INT64", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "BOOL", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "TIMESTAMP", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "INT64", "index": 8, "name": "details", "comment": null}, "email": {"type": "STRING", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "INT64", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "TIMESTAMP", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "STRING", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "INT64", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "BOOL", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "STRING", "index": 15, "name": "name", "comment": null}, "notes": {"type": "INT64", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "BOOL", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "INT64", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "INT64", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "INT64", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "BOOL", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "STRING", "index": 22, "name": "role", "comment": null}, "shared": {"type": "BOOL", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "BOOL", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "INT64", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "BOOL", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "STRING", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "STRING", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "BOOL", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "TIMESTAMP", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "STRING", "index": 31, "name": "url", "comment": null}, "verified": {"type": "BOOL", "index": 32, "name": "verified", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 2152, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user"}, "source.zendesk_source.zendesk.user_tag": {"metadata": {"type": "table", "schema": "zendesk_integration_tests_50", "name": "user_tag_data", "database": "dbt-package-testing", "comment": null, "owner": null}, "columns": {"tag": {"type": "STRING", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "INT64", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "TIMESTAMP", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"num_bytes": {"id": "num_bytes", "label": "Approximate Size", "value": 500, "include": true, "description": "Approximate size of table as reported by BigQuery"}, "num_rows": {"id": "num_rows", "label": "# Rows", "value": 10, "include": true, "description": "Approximate count of rows in this table"}, "has_stats": {"id": "has_stats", "label": "Has Stats?", "value": true, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user_tag"}}, "errors": null} \ No newline at end of file +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/catalog/v1.json", "dbt_version": "1.7.11", "generated_at": "2024-08-26T20:48:52.520942Z", "invocation_id": "57bbaa30-28cc-4e7b-b76f-fe2920291c4e", "env": {}}, "nodes": {"seed.zendesk_integration_tests.brand_data_postgres": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "brand_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.brand_data_postgres"}, "seed.zendesk_integration_tests.daylight_time_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "daylight_time_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.daylight_time_data"}, "seed.zendesk_integration_tests.domain_name_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "domain_name_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.domain_name_data"}, "seed.zendesk_integration_tests.group_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "group_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.group_data"}, "seed.zendesk_integration_tests.organization_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "organization_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_data"}, "seed.zendesk_integration_tests.organization_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "organization_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.organization_tag_data"}, "seed.zendesk_integration_tests.schedule_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_data"}, "seed.zendesk_integration_tests.schedule_holiday_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "schedule_holiday_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data"}, "seed.zendesk_integration_tests.ticket_comment_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_comment_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_comment_data"}, "seed.zendesk_integration_tests.ticket_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_data"}, "seed.zendesk_integration_tests.ticket_field_history_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_field_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data"}, "seed.zendesk_integration_tests.ticket_form_history_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_form_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data"}, "seed.zendesk_integration_tests.ticket_schedule_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data"}, "seed.zendesk_integration_tests.ticket_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.ticket_tag_data"}, "seed.zendesk_integration_tests.time_zone_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "time_zone_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.time_zone_data"}, "seed.zendesk_integration_tests.user_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "user_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_data"}, "seed.zendesk_integration_tests.user_tag_data": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "user_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "seed.zendesk_integration_tests.user_tag_data"}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "integer", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "integer", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "integer", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "bigint", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "bigint", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "bigint", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "numeric", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "numeric", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "numeric", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "numeric", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "numeric", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours"}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "double precision", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "double precision", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "double precision", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "double precision", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours"}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses"}, "model.zendesk.int_zendesk__assignee_updates": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__assignee_updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "assignee_id": {"type": "bigint", "index": 2, "name": "assignee_id", "comment": null}, "last_updated": {"type": "timestamp without time zone", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "bigint", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__assignee_updates"}, "model.zendesk.int_zendesk__comment_metrics": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__comment_metrics", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "last_comment_added_at": {"type": "timestamp without time zone", "index": 2, "name": "last_comment_added_at", "comment": null}, "count_public_agent_comments": {"type": "bigint", "index": 3, "name": "count_public_agent_comments", "comment": null}, "count_agent_comments": {"type": "bigint", "index": 4, "name": "count_agent_comments", "comment": null}, "count_end_user_comments": {"type": "bigint", "index": 5, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "bigint", "index": 6, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "bigint", "index": 7, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "bigint", "index": 8, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "bigint", "index": 9, "name": "count_ticket_handoffs", "comment": null}, "count_agent_replies": {"type": "bigint", "index": 10, "name": "count_agent_replies", "comment": null}, "is_one_touch_resolution": {"type": "boolean", "index": 11, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "boolean", "index": 12, "name": "is_two_touch_resolution", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__comment_metrics"}, "model.zendesk.int_zendesk__field_calendar_spine": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"date_day": {"type": "date", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "text", "index": 3, "name": "ticket_day_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_calendar_spine"}, "model.zendesk.int_zendesk__field_history_pivot": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__field_history_pivot", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "date_day": {"type": "date", "index": 2, "name": "date_day", "comment": null}, "status": {"type": "text", "index": 3, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 4, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 5, "name": "priority", "comment": null}, "ticket_day_id": {"type": "text", "index": 6, "name": "ticket_day_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_pivot"}, "model.zendesk.int_zendesk__field_history_scd": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__field_history_scd", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"valid_from": {"type": "date", "index": 1, "name": "valid_from", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "ticket_day_id": {"type": "text", "index": 3, "name": "ticket_day_id", "comment": null}, "status": {"type": "text", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__field_history_scd"}, "model.zendesk.int_zendesk__latest_ticket_form": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_form_id": {"type": "bigint", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "text", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "latest_form_index": {"type": "bigint", "index": 7, "name": "latest_form_index", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__latest_ticket_form"}, "model.zendesk.int_zendesk__organization_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__organization_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "integer", "index": 6, "name": "external_id", "comment": null}, "organization_tags": {"type": "text", "index": 7, "name": "organization_tags", "comment": null}, "domain_names": {"type": "text", "index": 8, "name": "domain_names", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__organization_aggregates"}, "model.zendesk.int_zendesk__reply_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "sla_schedule_start_at": {"type": "timestamp without time zone", "index": 6, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_schedule_end_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "sum_lapsed_business_minutes": {"type": "numeric", "index": 9, "name": "sum_lapsed_business_minutes", "comment": null}, "in_business_hours": {"type": "boolean", "index": 10, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_breach_at", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 12, "name": "is_breached_during_schedule", "comment": null}, "total_schedule_weekly_business_minutes": {"type": "numeric", "index": 13, "name": "total_schedule_weekly_business_minutes", "comment": null}, "sla_breach_exact_time": {"type": "timestamp without time zone", "index": 14, "name": "sla_breach_exact_time", "comment": null}, "week_number": {"type": "integer", "index": 15, "name": "week_number", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours"}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "text", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "text", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "bigint", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "text", "index": 10, "name": "sla_policy_name", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours"}, "model.zendesk.int_zendesk__reply_time_combined": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__reply_time_combined", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 2, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 3, "name": "metric", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 4, "name": "ticket_created_at", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "sum_lapsed_business_minutes": {"type": "numeric", "index": 6, "name": "sum_lapsed_business_minutes", "comment": null}, "target": {"type": "integer", "index": 7, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 8, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 9, "name": "sla_breach_at", "comment": null}, "week_number": {"type": "numeric", "index": 10, "name": "week_number", "comment": null}, "sla_schedule_start_at": {"type": "timestamp without time zone", "index": 11, "name": "sla_schedule_start_at", "comment": null}, "sla_schedule_end_at": {"type": "timestamp without time zone", "index": 12, "name": "sla_schedule_end_at", "comment": null}, "agent_reply_at": {"type": "timestamp without time zone", "index": 13, "name": "agent_reply_at", "comment": null}, "next_solved_at": {"type": "timestamp without time zone", "index": 14, "name": "next_solved_at", "comment": null}, "day_index": {"type": "bigint", "index": 15, "name": "day_index", "comment": null}, "next_schedule_start": {"type": "timestamp without time zone", "index": 16, "name": "next_schedule_start", "comment": null}, "first_sla_breach_at": {"type": "timestamp without time zone", "index": 17, "name": "first_sla_breach_at", "comment": null}, "sum_lapsed_business_minutes_new": {"type": "numeric", "index": 18, "name": "sum_lapsed_business_minutes_new", "comment": null}, "total_runtime_minutes": {"type": "double precision", "index": 19, "name": "total_runtime_minutes", "comment": null}, "current_time_check": {"type": "timestamp with time zone", "index": 20, "name": "current_time_check", "comment": null}, "updated_sla_policy_starts_at": {"type": "timestamp without time zone", "index": 21, "name": "updated_sla_policy_starts_at", "comment": null}, "is_stale_sla_policy": {"type": "boolean", "index": 22, "name": "is_stale_sla_policy", "comment": null}, "is_sla_breached": {"type": "boolean", "index": 23, "name": "is_sla_breached", "comment": null}, "total_new_minutes": {"type": "double precision", "index": 24, "name": "total_new_minutes", "comment": null}, "sla_update_at": {"type": "timestamp without time zone", "index": 25, "name": "sla_update_at", "comment": null}, "sla_elapsed_time": {"type": "double precision", "index": 26, "name": "sla_elapsed_time", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__reply_time_combined"}, "model.zendesk.int_zendesk__requester_updates": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__requester_updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "requester_id": {"type": "bigint", "index": 2, "name": "requester_id", "comment": null}, "last_updated": {"type": "timestamp without time zone", "index": 3, "name": "last_updated", "comment": null}, "total_updates": {"type": "bigint", "index": 4, "name": "total_updates", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_updates"}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 2, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 3, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 4, "name": "sla_policy_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 5, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 6, "name": "valid_ending_at", "comment": null}, "week_number": {"type": "integer", "index": 7, "name": "week_number", "comment": null}, "ticket_week_start_time_minute": {"type": "integer", "index": 8, "name": "ticket_week_start_time_minute", "comment": null}, "ticket_week_end_time_minute": {"type": "integer", "index": 9, "name": "ticket_week_end_time_minute", "comment": null}, "schedule_start_time": {"type": "bigint", "index": 10, "name": "schedule_start_time", "comment": null}, "schedule_end_time": {"type": "bigint", "index": 11, "name": "schedule_end_time", "comment": null}, "scheduled_minutes": {"type": "bigint", "index": 12, "name": "scheduled_minutes", "comment": null}, "running_total_scheduled_minutes": {"type": "numeric", "index": 13, "name": "running_total_scheduled_minutes", "comment": null}, "remaining_target_minutes": {"type": "numeric", "index": 14, "name": "remaining_target_minutes", "comment": null}, "lag_check": {"type": "numeric", "index": 15, "name": "lag_check", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 16, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "numeric", "index": 17, "name": "breach_minutes", "comment": null}, "breach_minutes_from_week": {"type": "numeric", "index": 18, "name": "breach_minutes_from_week", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 19, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours"}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "calendar_minutes": {"type": "double precision", "index": 10, "name": "calendar_minutes", "comment": null}, "running_total_calendar_minutes": {"type": "double precision", "index": 11, "name": "running_total_calendar_minutes", "comment": null}, "remaining_target_minutes": {"type": "double precision", "index": 12, "name": "remaining_target_minutes", "comment": null}, "is_breached_during_schedule": {"type": "boolean", "index": 13, "name": "is_breached_during_schedule", "comment": null}, "breach_minutes": {"type": "double precision", "index": 14, "name": "breach_minutes", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 15, "name": "sla_breach_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "ticket_status": {"type": "text", "index": 4, "name": "ticket_status", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "sla_policy_name": {"type": "text", "index": 7, "name": "sla_policy_name", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_at", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"}, "model.zendesk.int_zendesk__schedule_spine": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__schedule_spine", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "valid_from": {"type": "timestamp without time zone", "index": 2, "name": "valid_from", "comment": null}, "valid_until": {"type": "timestamp without time zone", "index": 3, "name": "valid_until", "comment": null}, "start_time_utc": {"type": "bigint", "index": 4, "name": "start_time_utc", "comment": null}, "end_time_utc": {"type": "bigint", "index": 5, "name": "end_time_utc", "comment": null}, "is_holiday_week": {"type": "boolean", "index": 6, "name": "is_holiday_week", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__schedule_spine"}, "model.zendesk.int_zendesk__sla_policy_applied": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "ticket_created_at": {"type": "timestamp without time zone", "index": 2, "name": "ticket_created_at", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "ticket_current_status": {"type": "text", "index": 4, "name": "ticket_current_status", "comment": null}, "metric": {"type": "text", "index": 5, "name": "metric", "comment": null}, "latest_sla": {"type": "bigint", "index": 6, "name": "latest_sla", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 7, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 8, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 9, "name": "in_business_hours", "comment": null}, "sla_policy_name": {"type": "text", "index": 10, "name": "sla_policy_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__sla_policy_applied"}, "model.zendesk.int_zendesk__ticket_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "assignee_id": {"type": "bigint", "index": 3, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 4, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 9, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 11, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 12, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 13, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 14, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 15, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 16, "name": "status", "comment": null}, "subject": {"type": "text", "index": 17, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 18, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 19, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 20, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 21, "name": "type", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 23, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 24, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 25, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 26, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 27, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 28, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 29, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 30, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 31, "name": "ticket_tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_aggregates"}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "first_agent_assignment_date": {"type": "timestamp without time zone", "index": 2, "name": "first_agent_assignment_date", "comment": null}, "first_assignee_id": {"type": "text", "index": 3, "name": "first_assignee_id", "comment": null}, "last_agent_assignment_date": {"type": "timestamp without time zone", "index": 4, "name": "last_agent_assignment_date", "comment": null}, "last_assignee_id": {"type": "text", "index": 5, "name": "last_assignee_id", "comment": null}, "assignee_stations_count": {"type": "bigint", "index": 6, "name": "assignee_stations_count", "comment": null}, "unique_assignee_count": {"type": "bigint", "index": 7, "name": "unique_assignee_count", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "double precision", "index": 8, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee"}, "model.zendesk.int_zendesk__ticket_historical_group": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "group_stations_count": {"type": "bigint", "index": 2, "name": "group_stations_count", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_group"}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "latest_satisfaction_reason": {"type": "text", "index": 2, "name": "latest_satisfaction_reason", "comment": null}, "latest_satisfaction_comment": {"type": "text", "index": 3, "name": "latest_satisfaction_comment", "comment": null}, "first_satisfaction_score": {"type": "text", "index": 4, "name": "first_satisfaction_score", "comment": null}, "latest_satisfaction_score": {"type": "text", "index": 5, "name": "latest_satisfaction_score", "comment": null}, "count_satisfaction_scores": {"type": "bigint", "index": 6, "name": "count_satisfaction_scores", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 7, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 8, "name": "is_bad_to_good_satisfaction_score", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction"}, "model.zendesk.int_zendesk__ticket_historical_status": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 2, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_ending_at", "comment": null}, "status_duration_calendar_minutes": {"type": "double precision", "index": 4, "name": "status_duration_calendar_minutes", "comment": null}, "status": {"type": "text", "index": 5, "name": "status", "comment": null}, "ticket_status_counter": {"type": "bigint", "index": 6, "name": "ticket_status_counter", "comment": null}, "unique_status_counter": {"type": "bigint", "index": 7, "name": "unique_status_counter", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_historical_status"}, "model.zendesk.int_zendesk__ticket_schedules": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_schedules", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "schedule_id": {"type": "text", "index": 2, "name": "schedule_id", "comment": null}, "schedule_created_at": {"type": "timestamp without time zone", "index": 3, "name": "schedule_created_at", "comment": null}, "schedule_invalidated_at": {"type": "timestamp without time zone", "index": 4, "name": "schedule_invalidated_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__ticket_schedules"}, "model.zendesk.int_zendesk__updates": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__updates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "text", "index": 2, "name": "field_name", "comment": null}, "value": {"type": "text", "index": 3, "name": "value", "comment": null}, "is_public": {"type": "boolean", "index": 4, "name": "is_public", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 6, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 7, "name": "valid_ending_at", "comment": null}, "ticket_created_date": {"type": "timestamp without time zone", "index": 8, "name": "ticket_created_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__updates"}, "model.zendesk.int_zendesk__user_aggregates": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__user_aggregates", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "bigint", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 4, "name": "last_login_at", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "email": {"type": "text", "index": 7, "name": "email", "comment": null}, "name": {"type": "text", "index": 8, "name": "name", "comment": null}, "organization_id": {"type": "bigint", "index": 9, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 10, "name": "phone", "comment": null}, "role": {"type": "text", "index": 11, "name": "role", "comment": null}, "ticket_restriction": {"type": "text", "index": 12, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 13, "name": "time_zone", "comment": null}, "locale": {"type": "text", "index": 14, "name": "locale", "comment": null}, "is_active": {"type": "boolean", "index": 15, "name": "is_active", "comment": null}, "is_suspended": {"type": "boolean", "index": 16, "name": "is_suspended", "comment": null}, "user_tags": {"type": "text", "index": 17, "name": "user_tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.int_zendesk__user_aggregates"}, "model.zendesk_source.stg_zendesk__brand": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__brand", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"brand_id": {"type": "bigint", "index": 1, "name": "brand_id", "comment": null}, "brand_url": {"type": "text", "index": 2, "name": "brand_url", "comment": null}, "name": {"type": "text", "index": 3, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 4, "name": "subdomain", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand"}, "model.zendesk_source.stg_zendesk__brand_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__brand_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp"}, "model.zendesk_source.stg_zendesk__daylight_time": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__daylight_time", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"daylight_end_utc": {"type": "timestamp without time zone", "index": 1, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 2, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 3, "name": "daylight_start_utc", "comment": null}, "time_zone": {"type": "text", "index": 4, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 5, "name": "year", "comment": null}, "daylight_offset_minutes": {"type": "integer", "index": 6, "name": "daylight_offset_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time"}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp"}, "model.zendesk_source.stg_zendesk__domain_name": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__domain_name", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "domain_name": {"type": "text", "index": 2, "name": "domain_name", "comment": null}, "index": {"type": "integer", "index": 3, "name": "index", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name"}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp"}, "model.zendesk_source.stg_zendesk__group": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__group", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"group_id": {"type": "bigint", "index": 1, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 2, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group"}, "model.zendesk_source.stg_zendesk__group_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__group_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__group_tmp"}, "model.zendesk_source.stg_zendesk__organization": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__organization", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "external_id": {"type": "integer", "index": 6, "name": "external_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization"}, "model.zendesk_source.stg_zendesk__organization_tag": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__organization_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag"}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp"}, "model.zendesk_source.stg_zendesk__organization_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__organization_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp"}, "model.zendesk_source.stg_zendesk__schedule": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__schedule", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"schedule_id": {"type": "text", "index": 1, "name": "schedule_id", "comment": null}, "end_time": {"type": "bigint", "index": 2, "name": "end_time", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "schedule_name": {"type": "text", "index": 4, "name": "schedule_name", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "time_zone": {"type": "text", "index": 6, "name": "time_zone", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule"}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"_fivetran_deleted": {"type": "boolean", "index": 1, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "holiday_end_date_at": {"type": "timestamp without time zone", "index": 3, "name": "holiday_end_date_at", "comment": null}, "holiday_id": {"type": "text", "index": 4, "name": "holiday_id", "comment": null}, "holiday_name": {"type": "text", "index": 5, "name": "holiday_name", "comment": null}, "schedule_id": {"type": "text", "index": 6, "name": "schedule_id", "comment": null}, "holiday_start_date_at": {"type": "timestamp without time zone", "index": 7, "name": "holiday_start_date_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday"}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp"}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "assignee_id": {"type": "bigint", "index": 3, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 4, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 9, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 11, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 12, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 13, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 14, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 15, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 16, "name": "status", "comment": null}, "subject": {"type": "text", "index": 17, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 18, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 19, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 20, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 21, "name": "type", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 23, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 24, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 25, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 26, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 27, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 28, "name": "source_to_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket"}, "model.zendesk_source.stg_zendesk__ticket_comment": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_comment", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_comment_id": {"type": "bigint", "index": 1, "name": "ticket_comment_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "is_public": {"type": "boolean", "index": 5, "name": "is_public", "comment": null}, "ticket_id": {"type": "integer", "index": 6, "name": "ticket_id", "comment": null}, "user_id": {"type": "bigint", "index": 7, "name": "user_id", "comment": null}, "is_facebook_comment": {"type": "boolean", "index": 8, "name": "is_facebook_comment", "comment": null}, "is_tweet": {"type": "boolean", "index": 9, "name": "is_tweet", "comment": null}, "is_voice_comment": {"type": "boolean", "index": 10, "name": "is_voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment"}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp"}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "field_name": {"type": "text", "index": 2, "name": "field_name", "comment": null}, "valid_starting_at": {"type": "timestamp without time zone", "index": 3, "name": "valid_starting_at", "comment": null}, "valid_ending_at": {"type": "timestamp without time zone", "index": 4, "name": "valid_ending_at", "comment": null}, "value": {"type": "text", "index": 5, "name": "value", "comment": null}, "user_id": {"type": "bigint", "index": 6, "name": "user_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history"}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_form_id": {"type": "bigint", "index": 1, "name": "ticket_form_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 3, "name": "updated_at", "comment": null}, "display_name": {"type": "text", "index": 4, "name": "display_name", "comment": null}, "is_active": {"type": "boolean", "index": 5, "name": "is_active", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history"}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp"}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 2, "name": "created_at", "comment": null}, "schedule_id": {"type": "text", "index": 3, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule"}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tag": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "integer", "index": 1, "name": "ticket_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag"}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp"}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp"}, "model.zendesk_source.stg_zendesk__time_zone": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__time_zone", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"standard_offset": {"type": "text", "index": 1, "name": "standard_offset", "comment": null}, "time_zone": {"type": "text", "index": 2, "name": "time_zone", "comment": null}, "standard_offset_minutes": {"type": "integer", "index": 3, "name": "standard_offset_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone"}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp"}, "model.zendesk_source.stg_zendesk__user": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__user", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "external_id": {"type": "bigint", "index": 2, "name": "external_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 4, "name": "last_login_at", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "email": {"type": "text", "index": 7, "name": "email", "comment": null}, "name": {"type": "text", "index": 8, "name": "name", "comment": null}, "organization_id": {"type": "bigint", "index": 9, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 10, "name": "phone", "comment": null}, "role": {"type": "text", "index": 11, "name": "role", "comment": null}, "ticket_restriction": {"type": "text", "index": 12, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 13, "name": "time_zone", "comment": null}, "locale": {"type": "text", "index": 14, "name": "locale", "comment": null}, "is_active": {"type": "boolean", "index": 15, "name": "is_active", "comment": null}, "is_suspended": {"type": "boolean", "index": 16, "name": "is_suspended", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user"}, "model.zendesk_source.stg_zendesk__user_tag": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__user_tag", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_id": {"type": "bigint", "index": 1, "name": "user_id", "comment": null}, "tags": {"type": "text", "index": 2, "name": "tags", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag"}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp"}, "model.zendesk_source.stg_zendesk__user_tmp": {"metadata": {"type": "VIEW", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__user_tmp", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk_source.stg_zendesk__user_tmp"}, "model.zendesk.zendesk__sla_policies": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__sla_policies", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"sla_event_id": {"type": "text", "index": 1, "name": "sla_event_id", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "sla_policy_name": {"type": "text", "index": 3, "name": "sla_policy_name", "comment": null}, "metric": {"type": "text", "index": 4, "name": "metric", "comment": null}, "sla_applied_at": {"type": "timestamp without time zone", "index": 5, "name": "sla_applied_at", "comment": null}, "target": {"type": "integer", "index": 6, "name": "target", "comment": null}, "in_business_hours": {"type": "boolean", "index": 7, "name": "in_business_hours", "comment": null}, "sla_breach_at": {"type": "timestamp without time zone", "index": 8, "name": "sla_breach_at", "comment": null}, "sla_elapsed_time": {"type": "double precision", "index": 9, "name": "sla_elapsed_time", "comment": null}, "is_active_sla": {"type": "boolean", "index": 10, "name": "is_active_sla", "comment": null}, "is_sla_breach": {"type": "boolean", "index": 11, "name": "is_sla_breach", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__sla_policies"}, "model.zendesk.zendesk__ticket_backlog": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__ticket_backlog", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"date_day": {"type": "date", "index": 1, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "status": {"type": "text", "index": 3, "name": "status", "comment": null}, "created_channel": {"type": "text", "index": 4, "name": "created_channel", "comment": null}, "assignee_name": {"type": "text", "index": 5, "name": "assignee_name", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_backlog"}, "model.zendesk.zendesk__ticket_enriched": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__ticket_enriched", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "assignee_id": {"type": "bigint", "index": 3, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 4, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 9, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 11, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 12, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 13, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 14, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 15, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 16, "name": "status", "comment": null}, "subject": {"type": "text", "index": 17, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 18, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 19, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 20, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 21, "name": "type", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 23, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 24, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 25, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 26, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 27, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 28, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 29, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 30, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 31, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "text", "index": 32, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "bigint", "index": 33, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "text", "index": 34, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "text", "index": 35, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "text", "index": 36, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "text", "index": 37, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 38, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 39, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "text", "index": 40, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "text", "index": 41, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "bigint", "index": 42, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "timestamp without time zone", "index": 43, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "timestamp without time zone", "index": 44, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "text", "index": 45, "name": "requester_role", "comment": null}, "requester_email": {"type": "text", "index": 46, "name": "requester_email", "comment": null}, "requester_name": {"type": "text", "index": 47, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "boolean", "index": 48, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "text", "index": 49, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "text", "index": 50, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "bigint", "index": 51, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "timestamp without time zone", "index": 52, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "timestamp without time zone", "index": 53, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "bigint", "index": 54, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "text", "index": 55, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "text", "index": 56, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "integer", "index": 57, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "timestamp without time zone", "index": 58, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "timestamp without time zone", "index": 59, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "bigint", "index": 60, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "text", "index": 61, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "boolean", "index": 62, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "text", "index": 63, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "text", "index": 64, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "boolean", "index": 65, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "text", "index": 66, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "text", "index": 67, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "bigint", "index": 68, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "text", "index": 69, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "text", "index": 70, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "text", "index": 71, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "boolean", "index": 72, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "text", "index": 73, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "text", "index": 74, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "bigint", "index": 75, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "timestamp without time zone", "index": 76, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "timestamp without time zone", "index": 77, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "text", "index": 78, "name": "group_name", "comment": null}, "organization_name": {"type": "text", "index": 79, "name": "organization_name", "comment": null}, "requester_tag": {"type": "text", "index": 80, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "text", "index": 81, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "text", "index": 82, "name": "assignee_tag", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_enriched"}, "model.zendesk.zendesk__ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__ticket_field_history", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_day_id": {"type": "text", "index": 1, "name": "ticket_day_id", "comment": null}, "date_day": {"type": "date", "index": 2, "name": "date_day", "comment": null}, "ticket_id": {"type": "bigint", "index": 3, "name": "ticket_id", "comment": null}, "status": {"type": "text", "index": 4, "name": "status", "comment": null}, "assignee_id": {"type": "text", "index": 5, "name": "assignee_id", "comment": null}, "priority": {"type": "text", "index": 6, "name": "priority", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_field_history"}, "model.zendesk.zendesk__ticket_metrics": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__ticket_metrics", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"ticket_id": {"type": "bigint", "index": 1, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "assignee_id": {"type": "bigint", "index": 3, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 4, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 5, "name": "created_at", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "group_id": {"type": "bigint", "index": 9, "name": "group_id", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "is_public": {"type": "boolean", "index": 11, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 12, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 13, "name": "priority", "comment": null}, "recipient": {"type": "text", "index": 14, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 15, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 16, "name": "status", "comment": null}, "subject": {"type": "text", "index": 17, "name": "subject", "comment": null}, "problem_id": {"type": "bigint", "index": 18, "name": "problem_id", "comment": null}, "submitter_id": {"type": "bigint", "index": 19, "name": "submitter_id", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 20, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 21, "name": "type", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}, "created_channel": {"type": "text", "index": 23, "name": "created_channel", "comment": null}, "source_from_id": {"type": "integer", "index": 24, "name": "source_from_id", "comment": null}, "source_from_title": {"type": "integer", "index": 25, "name": "source_from_title", "comment": null}, "source_rel": {"type": "integer", "index": 26, "name": "source_rel", "comment": null}, "source_to_address": {"type": "text", "index": 27, "name": "source_to_address", "comment": null}, "source_to_name": {"type": "text", "index": 28, "name": "source_to_name", "comment": null}, "is_incident": {"type": "boolean", "index": 29, "name": "is_incident", "comment": null}, "ticket_brand_name": {"type": "text", "index": 30, "name": "ticket_brand_name", "comment": null}, "ticket_tags": {"type": "text", "index": 31, "name": "ticket_tags", "comment": null}, "ticket_form_name": {"type": "text", "index": 32, "name": "ticket_form_name", "comment": null}, "ticket_total_satisfaction_scores": {"type": "bigint", "index": 33, "name": "ticket_total_satisfaction_scores", "comment": null}, "ticket_first_satisfaction_score": {"type": "text", "index": 34, "name": "ticket_first_satisfaction_score", "comment": null}, "ticket_satisfaction_score": {"type": "text", "index": 35, "name": "ticket_satisfaction_score", "comment": null}, "ticket_satisfaction_comment": {"type": "text", "index": 36, "name": "ticket_satisfaction_comment", "comment": null}, "ticket_satisfaction_reason": {"type": "text", "index": 37, "name": "ticket_satisfaction_reason", "comment": null}, "is_good_to_bad_satisfaction_score": {"type": "boolean", "index": 38, "name": "is_good_to_bad_satisfaction_score", "comment": null}, "is_bad_to_good_satisfaction_score": {"type": "boolean", "index": 39, "name": "is_bad_to_good_satisfaction_score", "comment": null}, "ticket_organization_domain_names": {"type": "text", "index": 40, "name": "ticket_organization_domain_names", "comment": null}, "requester_organization_domain_names": {"type": "text", "index": 41, "name": "requester_organization_domain_names", "comment": null}, "requester_external_id": {"type": "bigint", "index": 42, "name": "requester_external_id", "comment": null}, "requester_created_at": {"type": "timestamp without time zone", "index": 43, "name": "requester_created_at", "comment": null}, "requester_updated_at": {"type": "timestamp without time zone", "index": 44, "name": "requester_updated_at", "comment": null}, "requester_role": {"type": "text", "index": 45, "name": "requester_role", "comment": null}, "requester_email": {"type": "text", "index": 46, "name": "requester_email", "comment": null}, "requester_name": {"type": "text", "index": 47, "name": "requester_name", "comment": null}, "is_requester_active": {"type": "boolean", "index": 48, "name": "is_requester_active", "comment": null}, "requester_locale": {"type": "text", "index": 49, "name": "requester_locale", "comment": null}, "requester_time_zone": {"type": "text", "index": 50, "name": "requester_time_zone", "comment": null}, "requester_ticket_update_count": {"type": "bigint", "index": 51, "name": "requester_ticket_update_count", "comment": null}, "requester_ticket_last_update_at": {"type": "timestamp without time zone", "index": 52, "name": "requester_ticket_last_update_at", "comment": null}, "requester_last_login_at": {"type": "timestamp without time zone", "index": 53, "name": "requester_last_login_at", "comment": null}, "requester_organization_id": {"type": "bigint", "index": 54, "name": "requester_organization_id", "comment": null}, "requester_organization_name": {"type": "text", "index": 55, "name": "requester_organization_name", "comment": null}, "requester_organization_tags": {"type": "text", "index": 56, "name": "requester_organization_tags", "comment": null}, "requester_organization_external_id": {"type": "integer", "index": 57, "name": "requester_organization_external_id", "comment": null}, "requester_organization_created_at": {"type": "timestamp without time zone", "index": 58, "name": "requester_organization_created_at", "comment": null}, "requester_organization_updated_at": {"type": "timestamp without time zone", "index": 59, "name": "requester_organization_updated_at", "comment": null}, "submitter_external_id": {"type": "bigint", "index": 60, "name": "submitter_external_id", "comment": null}, "submitter_role": {"type": "text", "index": 61, "name": "submitter_role", "comment": null}, "is_agent_submitted": {"type": "boolean", "index": 62, "name": "is_agent_submitted", "comment": null}, "submitter_email": {"type": "text", "index": 63, "name": "submitter_email", "comment": null}, "submitter_name": {"type": "text", "index": 64, "name": "submitter_name", "comment": null}, "is_submitter_active": {"type": "boolean", "index": 65, "name": "is_submitter_active", "comment": null}, "submitter_locale": {"type": "text", "index": 66, "name": "submitter_locale", "comment": null}, "submitter_time_zone": {"type": "text", "index": 67, "name": "submitter_time_zone", "comment": null}, "assignee_external_id": {"type": "bigint", "index": 68, "name": "assignee_external_id", "comment": null}, "assignee_role": {"type": "text", "index": 69, "name": "assignee_role", "comment": null}, "assignee_email": {"type": "text", "index": 70, "name": "assignee_email", "comment": null}, "assignee_name": {"type": "text", "index": 71, "name": "assignee_name", "comment": null}, "is_assignee_active": {"type": "boolean", "index": 72, "name": "is_assignee_active", "comment": null}, "assignee_locale": {"type": "text", "index": 73, "name": "assignee_locale", "comment": null}, "assignee_time_zone": {"type": "text", "index": 74, "name": "assignee_time_zone", "comment": null}, "assignee_ticket_update_count": {"type": "bigint", "index": 75, "name": "assignee_ticket_update_count", "comment": null}, "assignee_ticket_last_update_at": {"type": "timestamp without time zone", "index": 76, "name": "assignee_ticket_last_update_at", "comment": null}, "assignee_last_login_at": {"type": "timestamp without time zone", "index": 77, "name": "assignee_last_login_at", "comment": null}, "group_name": {"type": "text", "index": 78, "name": "group_name", "comment": null}, "organization_name": {"type": "text", "index": 79, "name": "organization_name", "comment": null}, "requester_tag": {"type": "text", "index": 80, "name": "requester_tag", "comment": null}, "submitter_tag": {"type": "text", "index": 81, "name": "submitter_tag", "comment": null}, "assignee_tag": {"type": "text", "index": 82, "name": "assignee_tag", "comment": null}, "first_reply_time_calendar_minutes": {"type": "double precision", "index": 83, "name": "first_reply_time_calendar_minutes", "comment": null}, "total_reply_time_calendar_minutes": {"type": "double precision", "index": 84, "name": "total_reply_time_calendar_minutes", "comment": null}, "count_agent_comments": {"type": "bigint", "index": 85, "name": "count_agent_comments", "comment": null}, "count_public_agent_comments": {"type": "bigint", "index": 86, "name": "count_public_agent_comments", "comment": null}, "count_end_user_comments": {"type": "bigint", "index": 87, "name": "count_end_user_comments", "comment": null}, "count_public_comments": {"type": "bigint", "index": 88, "name": "count_public_comments", "comment": null}, "count_internal_comments": {"type": "bigint", "index": 89, "name": "count_internal_comments", "comment": null}, "total_comments": {"type": "bigint", "index": 90, "name": "total_comments", "comment": null}, "count_ticket_handoffs": {"type": "bigint", "index": 91, "name": "count_ticket_handoffs", "comment": null}, "ticket_last_comment_date": {"type": "timestamp without time zone", "index": 92, "name": "ticket_last_comment_date", "comment": null}, "unique_assignee_count": {"type": "bigint", "index": 93, "name": "unique_assignee_count", "comment": null}, "assignee_stations_count": {"type": "bigint", "index": 94, "name": "assignee_stations_count", "comment": null}, "group_stations_count": {"type": "bigint", "index": 95, "name": "group_stations_count", "comment": null}, "first_assignee_id": {"type": "text", "index": 96, "name": "first_assignee_id", "comment": null}, "last_assignee_id": {"type": "text", "index": 97, "name": "last_assignee_id", "comment": null}, "first_agent_assignment_date": {"type": "timestamp without time zone", "index": 98, "name": "first_agent_assignment_date", "comment": null}, "last_agent_assignment_date": {"type": "timestamp without time zone", "index": 99, "name": "last_agent_assignment_date", "comment": null}, "first_solved_at": {"type": "timestamp without time zone", "index": 100, "name": "first_solved_at", "comment": null}, "last_solved_at": {"type": "timestamp without time zone", "index": 101, "name": "last_solved_at", "comment": null}, "first_assignment_to_resolution_calendar_minutes": {"type": "double precision", "index": 102, "name": "first_assignment_to_resolution_calendar_minutes", "comment": null}, "last_assignment_to_resolution_calendar_minutes": {"type": "double precision", "index": 103, "name": "last_assignment_to_resolution_calendar_minutes", "comment": null}, "ticket_unassigned_duration_calendar_minutes": {"type": "double precision", "index": 104, "name": "ticket_unassigned_duration_calendar_minutes", "comment": null}, "first_resolution_calendar_minutes": {"type": "double precision", "index": 105, "name": "first_resolution_calendar_minutes", "comment": null}, "final_resolution_calendar_minutes": {"type": "double precision", "index": 106, "name": "final_resolution_calendar_minutes", "comment": null}, "count_resolutions": {"type": "bigint", "index": 107, "name": "count_resolutions", "comment": null}, "count_reopens": {"type": "bigint", "index": 108, "name": "count_reopens", "comment": null}, "ticket_deleted_count": {"type": "bigint", "index": 109, "name": "ticket_deleted_count", "comment": null}, "total_ticket_recoveries": {"type": "bigint", "index": 110, "name": "total_ticket_recoveries", "comment": null}, "last_status_assignment_date": {"type": "timestamp without time zone", "index": 111, "name": "last_status_assignment_date", "comment": null}, "new_status_duration_in_calendar_minutes": {"type": "double precision", "index": 112, "name": "new_status_duration_in_calendar_minutes", "comment": null}, "open_status_duration_in_calendar_minutes": {"type": "double precision", "index": 113, "name": "open_status_duration_in_calendar_minutes", "comment": null}, "agent_wait_time_in_calendar_minutes": {"type": "double precision", "index": 114, "name": "agent_wait_time_in_calendar_minutes", "comment": null}, "requester_wait_time_in_calendar_minutes": {"type": "double precision", "index": 115, "name": "requester_wait_time_in_calendar_minutes", "comment": null}, "solve_time_in_calendar_minutes": {"type": "double precision", "index": 116, "name": "solve_time_in_calendar_minutes", "comment": null}, "agent_work_time_in_calendar_minutes": {"type": "double precision", "index": 117, "name": "agent_work_time_in_calendar_minutes", "comment": null}, "on_hold_time_in_calendar_minutes": {"type": "double precision", "index": 118, "name": "on_hold_time_in_calendar_minutes", "comment": null}, "total_agent_replies": {"type": "bigint", "index": 119, "name": "total_agent_replies", "comment": null}, "requester_last_login_age_minutes": {"type": "double precision", "index": 120, "name": "requester_last_login_age_minutes", "comment": null}, "assignee_last_login_age_minutes": {"type": "double precision", "index": 121, "name": "assignee_last_login_age_minutes", "comment": null}, "unsolved_ticket_age_minutes": {"type": "double precision", "index": 122, "name": "unsolved_ticket_age_minutes", "comment": null}, "unsolved_ticket_age_since_update_minutes": {"type": "double precision", "index": 123, "name": "unsolved_ticket_age_since_update_minutes", "comment": null}, "is_one_touch_resolution": {"type": "boolean", "index": 124, "name": "is_one_touch_resolution", "comment": null}, "is_two_touch_resolution": {"type": "boolean", "index": 125, "name": "is_two_touch_resolution", "comment": null}, "is_multi_touch_resolution": {"type": "boolean", "index": 126, "name": "is_multi_touch_resolution", "comment": null}, "first_resolution_business_minutes": {"type": "numeric", "index": 127, "name": "first_resolution_business_minutes", "comment": null}, "full_resolution_business_minutes": {"type": "numeric", "index": 128, "name": "full_resolution_business_minutes", "comment": null}, "first_reply_time_business_minutes": {"type": "numeric", "index": 129, "name": "first_reply_time_business_minutes", "comment": null}, "agent_wait_time_in_business_minutes": {"type": "numeric", "index": 130, "name": "agent_wait_time_in_business_minutes", "comment": null}, "requester_wait_time_in_business_minutes": {"type": "numeric", "index": 131, "name": "requester_wait_time_in_business_minutes", "comment": null}, "solve_time_in_business_minutes": {"type": "numeric", "index": 132, "name": "solve_time_in_business_minutes", "comment": null}, "agent_work_time_in_business_minutes": {"type": "numeric", "index": 133, "name": "agent_work_time_in_business_minutes", "comment": null}, "on_hold_time_in_business_minutes": {"type": "numeric", "index": 134, "name": "on_hold_time_in_business_minutes", "comment": null}, "new_status_duration_in_business_minutes": {"type": "numeric", "index": 135, "name": "new_status_duration_in_business_minutes", "comment": null}, "open_status_duration_in_business_minutes": {"type": "numeric", "index": 136, "name": "open_status_duration_in_business_minutes", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_metrics"}, "model.zendesk.zendesk__ticket_summary": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__ticket_summary", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"user_count": {"type": "bigint", "index": 1, "name": "user_count", "comment": null}, "active_agent_count": {"type": "bigint", "index": 2, "name": "active_agent_count", "comment": null}, "deleted_user_count": {"type": "bigint", "index": 3, "name": "deleted_user_count", "comment": null}, "end_user_count": {"type": "bigint", "index": 4, "name": "end_user_count", "comment": null}, "suspended_user_count": {"type": "bigint", "index": 5, "name": "suspended_user_count", "comment": null}, "new_ticket_count": {"type": "bigint", "index": 6, "name": "new_ticket_count", "comment": null}, "on_hold_ticket_count": {"type": "bigint", "index": 7, "name": "on_hold_ticket_count", "comment": null}, "open_ticket_count": {"type": "bigint", "index": 8, "name": "open_ticket_count", "comment": null}, "pending_ticket_count": {"type": "bigint", "index": 9, "name": "pending_ticket_count", "comment": null}, "solved_ticket_count": {"type": "bigint", "index": 10, "name": "solved_ticket_count", "comment": null}, "problem_ticket_count": {"type": "bigint", "index": 11, "name": "problem_ticket_count", "comment": null}, "assigned_ticket_count": {"type": "bigint", "index": 12, "name": "assigned_ticket_count", "comment": null}, "reassigned_ticket_count": {"type": "bigint", "index": 13, "name": "reassigned_ticket_count", "comment": null}, "reopened_ticket_count": {"type": "bigint", "index": 14, "name": "reopened_ticket_count", "comment": null}, "surveyed_satisfaction_ticket_count": {"type": "bigint", "index": 15, "name": "surveyed_satisfaction_ticket_count", "comment": null}, "unassigned_unsolved_ticket_count": {"type": "bigint", "index": 16, "name": "unassigned_unsolved_ticket_count", "comment": null}, "unreplied_ticket_count": {"type": "bigint", "index": 17, "name": "unreplied_ticket_count", "comment": null}, "unreplied_unsolved_ticket_count": {"type": "bigint", "index": 18, "name": "unreplied_unsolved_ticket_count", "comment": null}, "unsolved_ticket_count": {"type": "bigint", "index": 19, "name": "unsolved_ticket_count", "comment": null}, "recovered_ticket_count": {"type": "bigint", "index": 20, "name": "recovered_ticket_count", "comment": null}, "deleted_ticket_count": {"type": "bigint", "index": 21, "name": "deleted_ticket_count", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "model.zendesk.zendesk__ticket_summary"}}, "sources": {"source.zendesk_source.zendesk.brand": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "brand_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 4, "name": "active", "comment": null}, "brand_url": {"type": "text", "index": 5, "name": "brand_url", "comment": null}, "default": {"type": "boolean", "index": 6, "name": "default", "comment": null}, "has_help_center": {"type": "boolean", "index": 7, "name": "has_help_center", "comment": null}, "help_center_state": {"type": "text", "index": 8, "name": "help_center_state", "comment": null}, "logo_content_type": {"type": "text", "index": 9, "name": "logo_content_type", "comment": null}, "logo_content_url": {"type": "text", "index": 10, "name": "logo_content_url", "comment": null}, "logo_deleted": {"type": "boolean", "index": 11, "name": "logo_deleted", "comment": null}, "logo_file_name": {"type": "text", "index": 12, "name": "logo_file_name", "comment": null}, "logo_height": {"type": "integer", "index": 13, "name": "logo_height", "comment": null}, "logo_id": {"type": "integer", "index": 14, "name": "logo_id", "comment": null}, "logo_inline": {"type": "boolean", "index": 15, "name": "logo_inline", "comment": null}, "logo_mapped_content_url": {"type": "text", "index": 16, "name": "logo_mapped_content_url", "comment": null}, "logo_size": {"type": "integer", "index": 17, "name": "logo_size", "comment": null}, "logo_url": {"type": "text", "index": 18, "name": "logo_url", "comment": null}, "logo_width": {"type": "integer", "index": 19, "name": "logo_width", "comment": null}, "name": {"type": "text", "index": 20, "name": "name", "comment": null}, "subdomain": {"type": "text", "index": 21, "name": "subdomain", "comment": null}, "url": {"type": "text", "index": 22, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.brand"}, "source.zendesk_source.zendesk.daylight_time": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "daylight_time_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "year": {"type": "integer", "index": 2, "name": "year", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "daylight_end_utc": {"type": "timestamp without time zone", "index": 4, "name": "daylight_end_utc", "comment": null}, "daylight_offset": {"type": "integer", "index": 5, "name": "daylight_offset", "comment": null}, "daylight_start_utc": {"type": "timestamp without time zone", "index": 6, "name": "daylight_start_utc", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.daylight_time"}, "source.zendesk_source.zendesk.domain_name": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "domain_name_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"index": {"type": "integer", "index": 1, "name": "index", "comment": null}, "organization_id": {"type": "bigint", "index": 2, "name": "organization_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "domain_name": {"type": "text", "index": 4, "name": "domain_name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.domain_name"}, "source.zendesk_source.zendesk.group": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "group_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 2, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 4, "name": "created_at", "comment": null}, "name": {"type": "text", "index": 5, "name": "name", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 6, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 7, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.group"}, "source.zendesk_source.zendesk.organization": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "organization_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 3, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 4, "name": "details", "comment": null}, "external_id": {"type": "integer", "index": 5, "name": "external_id", "comment": null}, "group_id": {"type": "integer", "index": 6, "name": "group_id", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 8, "name": "notes", "comment": null}, "shared_comments": {"type": "boolean", "index": 9, "name": "shared_comments", "comment": null}, "shared_tickets": {"type": "boolean", "index": 10, "name": "shared_tickets", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 11, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 12, "name": "url", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization"}, "source.zendesk_source.zendesk.organization_tag": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "organization_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"organization_id": {"type": "bigint", "index": 1, "name": "organization_id", "comment": null}, "tag": {"type": "text", "index": 2, "name": "tag", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.organization_tag"}, "source.zendesk_source.zendesk.schedule": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"end_time": {"type": "bigint", "index": 1, "name": "end_time", "comment": null}, "id": {"type": "bigint", "index": 2, "name": "id", "comment": null}, "start_time": {"type": "bigint", "index": 3, "name": "start_time", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 4, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 5, "name": "_fivetran_synced", "comment": null}, "end_time_utc": {"type": "bigint", "index": 6, "name": "end_time_utc", "comment": null}, "name": {"type": "text", "index": 7, "name": "name", "comment": null}, "start_time_utc": {"type": "bigint", "index": 8, "name": "start_time_utc", "comment": null}, "time_zone": {"type": "text", "index": 9, "name": "time_zone", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 10, "name": "created_at", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule"}, "source.zendesk_source.zendesk.schedule_holiday": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "schedule_holiday_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "schedule_id": {"type": "bigint", "index": 2, "name": "schedule_id", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "end_date": {"type": "date", "index": 5, "name": "end_date", "comment": null}, "name": {"type": "text", "index": 6, "name": "name", "comment": null}, "start_date": {"type": "date", "index": 7, "name": "start_date", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.schedule_holiday"}, "source.zendesk_source.zendesk.ticket_comment": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_comment_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "body": {"type": "text", "index": 3, "name": "body", "comment": null}, "created": {"type": "timestamp without time zone", "index": 4, "name": "created", "comment": null}, "facebook_comment": {"type": "boolean", "index": 5, "name": "facebook_comment", "comment": null}, "public": {"type": "boolean", "index": 6, "name": "public", "comment": null}, "ticket_id": {"type": "integer", "index": 7, "name": "ticket_id", "comment": null}, "tweet": {"type": "boolean", "index": 8, "name": "tweet", "comment": null}, "user_id": {"type": "bigint", "index": 9, "name": "user_id", "comment": null}, "voice_comment": {"type": "boolean", "index": 10, "name": "voice_comment", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_comment"}, "source.zendesk_source.zendesk.ticket": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "allow_channelback": {"type": "boolean", "index": 3, "name": "allow_channelback", "comment": null}, "assignee_id": {"type": "bigint", "index": 4, "name": "assignee_id", "comment": null}, "brand_id": {"type": "bigint", "index": 5, "name": "brand_id", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "description": {"type": "text", "index": 7, "name": "description", "comment": null}, "due_at": {"type": "timestamp without time zone", "index": 8, "name": "due_at", "comment": null}, "external_id": {"type": "bigint", "index": 9, "name": "external_id", "comment": null}, "forum_topic_id": {"type": "bigint", "index": 10, "name": "forum_topic_id", "comment": null}, "group_id": {"type": "bigint", "index": 11, "name": "group_id", "comment": null}, "has_incidents": {"type": "boolean", "index": 12, "name": "has_incidents", "comment": null}, "is_public": {"type": "boolean", "index": 13, "name": "is_public", "comment": null}, "organization_id": {"type": "bigint", "index": 14, "name": "organization_id", "comment": null}, "priority": {"type": "text", "index": 15, "name": "priority", "comment": null}, "problem_id": {"type": "bigint", "index": 16, "name": "problem_id", "comment": null}, "recipient": {"type": "text", "index": 17, "name": "recipient", "comment": null}, "requester_id": {"type": "bigint", "index": 18, "name": "requester_id", "comment": null}, "status": {"type": "text", "index": 19, "name": "status", "comment": null}, "subject": {"type": "text", "index": 20, "name": "subject", "comment": null}, "submitter_id": {"type": "bigint", "index": 21, "name": "submitter_id", "comment": null}, "system_client": {"type": "integer", "index": 22, "name": "system_client", "comment": null}, "ticket_form_id": {"type": "bigint", "index": 23, "name": "ticket_form_id", "comment": null}, "type": {"type": "text", "index": 24, "name": "type", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 25, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 26, "name": "url", "comment": null}, "via_channel": {"type": "text", "index": 27, "name": "via_channel", "comment": null}, "via_source_from_id": {"type": "integer", "index": 28, "name": "via_source_from_id", "comment": null}, "via_source_from_title": {"type": "integer", "index": 29, "name": "via_source_from_title", "comment": null}, "via_source_rel": {"type": "integer", "index": 30, "name": "via_source_rel", "comment": null}, "via_source_to_address": {"type": "text", "index": 31, "name": "via_source_to_address", "comment": null}, "via_source_to_name": {"type": "text", "index": 32, "name": "via_source_to_name", "comment": null}, "merged_ticket_ids": {"type": "text", "index": 33, "name": "merged_ticket_ids", "comment": null}, "via_source_from_address": {"type": "integer", "index": 34, "name": "via_source_from_address", "comment": null}, "followup_ids": {"type": "text", "index": 35, "name": "followup_ids", "comment": null}, "via_followup_source_id": {"type": "integer", "index": 36, "name": "via_followup_source_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket"}, "source.zendesk_source.zendesk.ticket_field_history": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_field_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"field_name": {"type": "text", "index": 1, "name": "field_name", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "updated": {"type": "timestamp without time zone", "index": 3, "name": "updated", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "user_id": {"type": "bigint", "index": 5, "name": "user_id", "comment": null}, "value": {"type": "text", "index": 6, "name": "value", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_field_history"}, "source.zendesk_source.zendesk.ticket_form_history": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_form_history_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 2, "name": "updated_at", "comment": null}, "_fivetran_deleted": {"type": "boolean", "index": 3, "name": "_fivetran_deleted", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 4, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 5, "name": "active", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 6, "name": "created_at", "comment": null}, "display_name": {"type": "text", "index": 7, "name": "display_name", "comment": null}, "end_user_visible": {"type": "boolean", "index": 8, "name": "end_user_visible", "comment": null}, "name": {"type": "text", "index": 9, "name": "name", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_form_history"}, "source.zendesk_source.zendesk.ticket_schedule": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_schedule_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"created_at": {"type": "timestamp without time zone", "index": 1, "name": "created_at", "comment": null}, "ticket_id": {"type": "bigint", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}, "schedule_id": {"type": "bigint", "index": 4, "name": "schedule_id", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_schedule"}, "source.zendesk_source.zendesk.ticket_tag": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "ticket_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "ticket_id": {"type": "integer", "index": 2, "name": "ticket_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.ticket_tag"}, "source.zendesk_source.zendesk.time_zone": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "time_zone_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"time_zone": {"type": "text", "index": 1, "name": "time_zone", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "standard_offset": {"type": "text", "index": 3, "name": "standard_offset", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.time_zone"}, "source.zendesk_source.zendesk.user": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "user_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"id": {"type": "bigint", "index": 1, "name": "id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 2, "name": "_fivetran_synced", "comment": null}, "active": {"type": "boolean", "index": 3, "name": "active", "comment": null}, "alias": {"type": "integer", "index": 4, "name": "alias", "comment": null}, "authenticity_token": {"type": "integer", "index": 5, "name": "authenticity_token", "comment": null}, "chat_only": {"type": "boolean", "index": 6, "name": "chat_only", "comment": null}, "created_at": {"type": "timestamp without time zone", "index": 7, "name": "created_at", "comment": null}, "details": {"type": "integer", "index": 8, "name": "details", "comment": null}, "email": {"type": "text", "index": 9, "name": "email", "comment": null}, "external_id": {"type": "bigint", "index": 10, "name": "external_id", "comment": null}, "last_login_at": {"type": "timestamp without time zone", "index": 11, "name": "last_login_at", "comment": null}, "locale": {"type": "text", "index": 12, "name": "locale", "comment": null}, "locale_id": {"type": "bigint", "index": 13, "name": "locale_id", "comment": null}, "moderator": {"type": "boolean", "index": 14, "name": "moderator", "comment": null}, "name": {"type": "text", "index": 15, "name": "name", "comment": null}, "notes": {"type": "integer", "index": 16, "name": "notes", "comment": null}, "only_private_comments": {"type": "boolean", "index": 17, "name": "only_private_comments", "comment": null}, "organization_id": {"type": "bigint", "index": 18, "name": "organization_id", "comment": null}, "phone": {"type": "integer", "index": 19, "name": "phone", "comment": null}, "remote_photo_url": {"type": "integer", "index": 20, "name": "remote_photo_url", "comment": null}, "restricted_agent": {"type": "boolean", "index": 21, "name": "restricted_agent", "comment": null}, "role": {"type": "text", "index": 22, "name": "role", "comment": null}, "shared": {"type": "boolean", "index": 23, "name": "shared", "comment": null}, "shared_agent": {"type": "boolean", "index": 24, "name": "shared_agent", "comment": null}, "signature": {"type": "integer", "index": 25, "name": "signature", "comment": null}, "suspended": {"type": "boolean", "index": 26, "name": "suspended", "comment": null}, "ticket_restriction": {"type": "text", "index": 27, "name": "ticket_restriction", "comment": null}, "time_zone": {"type": "text", "index": 28, "name": "time_zone", "comment": null}, "two_factor_auth_enabled": {"type": "boolean", "index": 29, "name": "two_factor_auth_enabled", "comment": null}, "updated_at": {"type": "timestamp without time zone", "index": 30, "name": "updated_at", "comment": null}, "url": {"type": "text", "index": 31, "name": "url", "comment": null}, "verified": {"type": "boolean", "index": 32, "name": "verified", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user"}, "source.zendesk_source.zendesk.user_tag": {"metadata": {"type": "BASE TABLE", "schema": "zendesk_integration_tests_55", "name": "user_tag_data", "database": "postgres", "comment": null, "owner": "pguser"}, "columns": {"tag": {"type": "text", "index": 1, "name": "tag", "comment": null}, "user_id": {"type": "bigint", "index": 2, "name": "user_id", "comment": null}, "_fivetran_synced": {"type": "timestamp without time zone", "index": 3, "name": "_fivetran_synced", "comment": null}}, "stats": {"has_stats": {"id": "has_stats", "label": "Has Stats?", "value": false, "include": false, "description": "Indicates whether there are statistics for this table"}}, "unique_id": "source.zendesk_source.zendesk.user_tag"}}, "errors": null} \ No newline at end of file diff --git a/docs/manifest.json b/docs/manifest.json index 8fdf5bf7..5c0d8096 100644 --- a/docs/manifest.json +++ b/docs/manifest.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v11.json", "dbt_version": "1.7.9", "generated_at": "2024-05-14T15:31:13.041630Z", "invocation_id": "a6607f0c-5bee-4c0f-9bfc-3034194b1b1f", "env": {}, "project_name": "zendesk_integration_tests", "project_id": "b8a12ac1bacdf035438fc7646299ce11", "user_id": "9727b491-ecfe-4596-b1e2-53e646e8f80e", "send_anonymous_usage_stats": true, "adapter_type": "bigquery"}, "nodes": {"seed.zendesk_integration_tests.organization_tag_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "organization_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data.csv", "original_file_path": "seeds/organization_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data", "fqn": ["zendesk_integration_tests", "organization_tag_data"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "adebcb3827e908ab449435adc556aadf587cfad4103cab2c840d3d9fddc16e20"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "int64"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1715700423.923048, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`organization_tag_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_comment_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_comment_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_comment_data.csv", "original_file_path": "seeds/ticket_comment_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_comment_data", "fqn": ["zendesk_integration_tests", "ticket_comment_data"], "alias": "ticket_comment_data", "checksum": {"name": "sha256", "checksum": "033e18229b848b4809699f04f39605771faf437e583a1aefe1af5625f0ac7de5"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64", "user_id": "int64", "created": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created": "timestamp"}}, "created_at": 1715700423.923854, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_comment_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.brand_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "brand_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data.csv", "original_file_path": "seeds/brand_data.csv", "unique_id": "seed.zendesk_integration_tests.brand_data", "fqn": ["zendesk_integration_tests", "brand_data"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "203980ef5845715ee0758982a85b96a30c8e4b06fbda7f104705bd4cdd586aa9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'postgres' else false }}"}, "created_at": 1715700423.924742, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`brand_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_holiday_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "schedule_holiday_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_holiday_data.csv", "original_file_path": "seeds/schedule_holiday_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data", "fqn": ["zendesk_integration_tests", "schedule_holiday_data"], "alias": "schedule_holiday_data", "checksum": {"name": "sha256", "checksum": "f907dea5e2dc21649bf4eae0392add96a884f19f900dc0f2d568141038ba5d28"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64", "schedule_id": "int64"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1715700423.9255629, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`schedule_holiday_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.domain_name_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "domain_name_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "domain_name_data.csv", "original_file_path": "seeds/domain_name_data.csv", "unique_id": "seed.zendesk_integration_tests.domain_name_data", "fqn": ["zendesk_integration_tests", "domain_name_data"], "alias": "domain_name_data", "checksum": {"name": "sha256", "checksum": "3bf711417f9269957353aa9e1ddd28ada8bd74e03128a4b8c94e694a560a09cf"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "int64"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1715700423.927094, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`domain_name_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_field_history_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_field_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_field_history_data.csv", "original_file_path": "seeds/ticket_field_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data", "fqn": ["zendesk_integration_tests", "ticket_field_history_data"], "alias": "ticket_field_history_data", "checksum": {"name": "sha256", "checksum": "45c59dedcdf851242cac587e9fa78139af11d4fa855f366aa68175b8a65dc34e"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "int64", "user_id": "int64", "updated": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "updated": "timestamp"}}, "created_at": 1715700423.928768, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_field_history_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_data.csv", "original_file_path": "seeds/ticket_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_data", "fqn": ["zendesk_integration_tests", "ticket_data"], "alias": "ticket_data", "checksum": {"name": "sha256", "checksum": "76fc2c4e8fc33c3f2956d5689a89419b89936f94edd76ef5ad07cb52820ad20d"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "int64", "brand_id": "int64", "external_id": "int64", "forum_topic_id": "int64", "group_id": "int64", "organization_id": "int64", "problem_id": "int64", "requester_id": "int64", "submitter_id": "int64", "ticket_form_id": "int64"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "brand_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "forum_topic_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "group_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "problem_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "requester_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "submitter_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "ticket_form_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1715700423.9295611, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.time_zone_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "time_zone_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "time_zone_data.csv", "original_file_path": "seeds/time_zone_data.csv", "unique_id": "seed.zendesk_integration_tests.time_zone_data", "fqn": ["zendesk_integration_tests", "time_zone_data"], "alias": "time_zone_data", "checksum": {"name": "sha256", "checksum": "b02df4f14e54c7deb0b15c40b35196968de4374ceb1cc5ad95986620a506adb2"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1715700423.931068, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`time_zone_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_schedule_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_schedule_data.csv", "original_file_path": "seeds/ticket_schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data", "fqn": ["zendesk_integration_tests", "ticket_schedule_data"], "alias": "ticket_schedule_data", "checksum": {"name": "sha256", "checksum": "dc4892d18f3730242f5319bb24498d77a4c32a666b6b4d5c0eec0d4dafd7224b"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "int64", "schedule_id": "int64", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1715700423.93179, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_schedule_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.daylight_time_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "daylight_time_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "daylight_time_data.csv", "original_file_path": "seeds/daylight_time_data.csv", "unique_id": "seed.zendesk_integration_tests.daylight_time_data", "fqn": ["zendesk_integration_tests", "daylight_time_data"], "alias": "daylight_time_data", "checksum": {"name": "sha256", "checksum": "17642d90548c6367ab328762a47066a905e3ba2da8831cd86ef37ac659a38fc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1715700423.9327428, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`daylight_time_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "user_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data.csv", "original_file_path": "seeds/user_data.csv", "unique_id": "seed.zendesk_integration_tests.user_data", "fqn": ["zendesk_integration_tests", "user_data"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "8d3c4a4fd84769a9c06b4ffa4dd232a9f4b7744ebe6fce4d91cb7aac96401e49"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64", "external_id": "int64", "locale_id": "int64", "organization_id": "int64", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1715700423.933486, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`user_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_data.csv", "original_file_path": "seeds/schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_data", "fqn": ["zendesk_integration_tests", "schedule_data"], "alias": "schedule_data", "checksum": {"name": "sha256", "checksum": "e2596e44df02b53d13b850f9742084141b7b75755baae603c8d3db6b8354107a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64", "end_time": "int64", "start_time": "int64", "end_time_utc": "int64", "start_time_utc": "int64", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1715700423.934252, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`schedule_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_tag_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_tag_data.csv", "original_file_path": "seeds/ticket_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_tag_data", "fqn": ["zendesk_integration_tests", "ticket_tag_data"], "alias": "ticket_tag_data", "checksum": {"name": "sha256", "checksum": "020b25c3247e21387702778ce0af4e5a5b8b3aee62daaa05f48c643489b57ea0"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1715700423.9349918, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_tag_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.organization_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "organization_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_data.csv", "original_file_path": "seeds/organization_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_data", "fqn": ["zendesk_integration_tests", "organization_data"], "alias": "organization_data", "checksum": {"name": "sha256", "checksum": "b3e00faed1ea214f73182b110c5f55653a5b43f2bc082dcb87f6c63dea5303c3"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1715700423.935746, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`organization_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_form_history_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_form_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_form_history_data.csv", "original_file_path": "seeds/ticket_form_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data", "fqn": ["zendesk_integration_tests", "ticket_form_history_data"], "alias": "ticket_form_history_data", "checksum": {"name": "sha256", "checksum": "a5b4edef05a0baa9acac87db3eea1ac0ba55865809db778ff458e20b7352c665"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1715700423.936515, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_form_history_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.group_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "group_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "group_data.csv", "original_file_path": "seeds/group_data.csv", "unique_id": "seed.zendesk_integration_tests.group_data", "fqn": ["zendesk_integration_tests", "group_data"], "alias": "group_data", "checksum": {"name": "sha256", "checksum": "ded51f1b267e9785ca862ca30656faa2485b5814d834ea35de6892702c3dbd1a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1715700423.937556, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`group_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_tag_data": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "user_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data.csv", "original_file_path": "seeds/user_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data", "fqn": ["zendesk_integration_tests", "user_tag_data"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "fde0d85263495e783fd6fb342940a4dcd67c39581d55bfc9b28935d24367a096"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "user_id": "int64"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1715700423.93838, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`user_tag_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "model.zendesk.zendesk__ticket_enriched": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__ticket_enriched", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_enriched.sql", "original_file_path": "models/zendesk__ticket_enriched.sql", "unique_id": "model.zendesk.zendesk__ticket_enriched", "fqn": ["zendesk", "zendesk__ticket_enriched"], "alias": "zendesk__ticket_enriched", "checksum": {"name": "sha256", "checksum": "8d5ccce79dd53bd307569a9a086b4205cfebbd616bb74b594766e524a281c244"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.563322, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_enriched`", "raw_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n{% if var('using_ticket_form_history', True) %}\n), latest_ticket_form as (\n\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), latest_satisfaction_ratings as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_satisfaction') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), requester_updates as (\n\n select *\n from {{ ref('int_zendesk__requester_updates') }}\n\n), assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__assignee_updates') }}\n\n), ticket_group as (\n \n select *\n from {{ ref('stg_zendesk__group') }}\n\n), organization as (\n\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n latest_ticket_form.name as ticket_form_name,\n {% endif %}\n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n {% endif %}\n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n requester_org.organization_tags as requester_organization_tags,\n {% endif %}\n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n {% endif %}\n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n {% endif %}\n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "language": "sql", "refs": [{"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}, {"name": "int_zendesk__latest_ticket_form", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_satisfaction", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__requester_updates", "package": null, "version": null}, {"name": "int_zendesk__assignee_updates", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__assignee_updates", "model.zendesk_source.stg_zendesk__group", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_enriched.sql", "compiled": true, "compiled_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_aggregates`\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n\n), latest_ticket_form as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__latest_ticket_form`\n\n\n), latest_satisfaction_ratings as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_satisfaction`\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), requester_updates as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_updates`\n\n), assignee_updates as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__assignee_updates`\n\n), ticket_group as (\n \n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group`\n\n), organization as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__organization_aggregates`\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n latest_ticket_form.name as ticket_form_name,\n \n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n \n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n requester_org.organization_tags as requester_organization_tags,\n \n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n \n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n \n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n \n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_metrics": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__ticket_metrics", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_metrics.sql", "original_file_path": "models/zendesk__ticket_metrics.sql", "unique_id": "model.zendesk.zendesk__ticket_metrics", "fqn": ["zendesk", "zendesk__ticket_metrics"], "alias": "zendesk__ticket_metrics", "checksum": {"name": "sha256", "checksum": "0beb1421df42ed71b84e3cfec7f56029ec6bad71570ab9b4cb4ab712fc753ca6"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk Support ticket, enriched with metrics about reply times, resolution times and work times. Calendar and business hours are supported", "columns": {"first_reply_time_calendar_minutes": {"name": "first_reply_time_calendar_minutes", "description": "The number of calendar minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_reply_time_business_minutes": {"name": "first_reply_time_business_minutes", "description": "The number of business minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_reply_time_calendar_minutes": {"name": "total_reply_time_calendar_minutes", "description": "The combined calendar time between all end-user comments and the next public agent response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_solved_at": {"name": "first_solved_at", "description": "The time the ticket was first in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_solved_at": {"name": "last_solved_at", "description": "The time the ticket was last in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_calendar_minutes": {"name": "first_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "final_resolution_calendar_minutes": {"name": "final_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_one_touch_resolution": {"name": "is_one_touch_resolution", "description": "A boolean field indicating that the ticket has one public agent response and is in solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_business_minutes": {"name": "first_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "full_resolution_business_minutes": {"name": "full_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_business_minutes": {"name": "agent_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_business_minutes": {"name": "requester_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_business_minutes": {"name": "solve_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_business_minutes": {"name": "agent_work_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_business_minutes": {"name": "on_hold_time_in_business_minutes", "description": "The combined number of business minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_business_minutes": {"name": "new_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_business_minutes": {"name": "open_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_calendar_minutes": {"name": "agent_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_calendar_minutes": {"name": "requester_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_calendar_minutes": {"name": "solve_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_calendar_minutes": {"name": "agent_work_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_calendar_minutes": {"name": "on_hold_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_agent_comments": {"name": "count_agent_comments", "description": "Count of agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_agent_comments": {"name": "count_public_agent_comments", "description": "Count of public agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_end_user_comments": {"name": "count_end_user_comments", "description": "Count of end user comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_internal_comments": {"name": "count_internal_comments", "description": "Count of internal comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_comments": {"name": "count_public_comments", "description": "Count of public comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_comments": {"name": "total_comments", "description": "Total count of all comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_ticket_handoffs": {"name": "count_ticket_handoffs", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": [], "dev_snowflake": "Count of distinct internal users who have touched/commented on the ticket."}, "unique_assignee_count": {"name": "unique_assignee_count", "description": "The count of unique assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_stations_count": {"name": "assignee_stations_count", "description": "The total number of assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_stations_count": {"name": "group_stations_count", "description": "The total count of group stations within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignee_id": {"name": "first_assignee_id", "description": "Assignee id of the first agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignee_id": {"name": "last_assignee_id", "description": "Assignee id of the last agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_agent_assignment_date": {"name": "first_agent_assignment_date", "description": "The date the first agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_agent_assignment_date": {"name": "last_agent_assignment_date", "description": "The date the last agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignment_to_resolution_calendar_minutes": {"name": "first_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the first assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignment_to_resolution_calendar_minutes": {"name": "last_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the last assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_resolutions": {"name": "count_resolutions", "description": "The count of ticket resolutions", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_reopens": {"name": "count_reopens", "description": "The count of ticket reopen events", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_calendar_minutes": {"name": "new_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_calendar_minutes": {"name": "open_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_agent_replies": {"name": "total_agent_replies", "description": "The total number of agent replies within the ticket, excluding comments where an agent created the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_age_minutes": {"name": "requester_last_login_age_minutes", "description": "The time in minutes since the ticket requester was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_age_minutes": {"name": "assignee_last_login_age_minutes", "description": "The time in minutes since the ticket assignee was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_minutes": {"name": "unsolved_ticket_age_minutes", "description": "The time in minutes the ticket has been in an unsolved state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_since_update_minutes": {"name": "unsolved_ticket_age_since_update_minutes", "description": "The time in minutes the ticket has been unsolved since the last update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_two_touch_resolution": {"name": "is_two_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_multi_touch_resolution": {"name": "is_multi_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two or more public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_last_comment_date": {"name": "ticket_last_comment_date", "description": "The time the last comment was applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_unassigned_duration_calendar_minutes": {"name": "ticket_unassigned_duration_calendar_minutes", "description": "The time in minutes the ticket was in an unassigned state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_status_assignment_date": {"name": "last_status_assignment_date", "description": "The time the status was last changed on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.5714822, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_metrics`", "raw_code": "with ticket_enriched as (\n\n select *\n from {{ ref('zendesk__ticket_enriched') }}\n\n), ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_reply_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times_calendar') }}\n\n), ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comment_metrics') }}\n\n), ticket_work_time_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_calendar') }}\n\n-- business hour CTEs\n{% if var('using_schedules', True) %}\n\n), ticket_first_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_resolution_time_business') }}\n\n), ticket_full_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_full_resolution_time_business') }}\n\n), ticket_work_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_business') }}\n\n), ticket_first_reply_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_reply_time_business') }}\n\n{% endif %}\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.requester_last_login_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.assignee_last_login_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.created_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.updated_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n{% if var('using_schedules', True) %}\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n{% else %}\n\n) \n\nselect *\nfrom calendar_hour_metrics\n\n{% endif %}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}, {"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__comment_metrics", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_full_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_reply_time_business", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt.datediff"], "nodes": ["model.zendesk.zendesk__ticket_enriched", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_first_reply_time_business"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n where status = 'solved'\n\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_historical_assignee as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_assignee`\n\n), ticket_historical_group as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_group`\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.first_agent_assignment_date as datetime),\n minute\n )\n\n as first_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.last_agent_assignment_date as datetime),\n minute\n )\n\n as last_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.first_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as first_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n\n datetime_diff(\n cast(agent_responded_at as datetime),\n cast(end_user_comment_created_at as datetime),\n second\n )\n\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n), __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n), __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n\n datetime_diff(\n cast(least(valid_ending_at, schedule_invalidated_at) as datetime),\n cast(greatest(valid_starting_at, schedule_created_at) as datetime),\n second\n )\n\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n\n datetime_diff(\n cast(cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n (\n\n datetime_diff(\n cast(ticket_status_crossed_with_schedule.status_schedule_end as datetime),\n cast(ticket_status_crossed_with_schedule.status_schedule_start as datetime),\n second\n )\n\n /60\n ) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp),\n week\n ) as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as INT64) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on \n ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n), __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n), ticket_enriched as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_enriched`\n\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_reply_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times_calendar\n\n), ticket_comments as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__comment_metrics`\n\n), ticket_work_time_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_calendar\n\n-- business hour CTEs\n\n\n), ticket_first_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_resolution_time_business\n\n), ticket_full_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_full_resolution_time_business\n\n), ticket_work_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_business\n\n), ticket_first_reply_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_reply_time_business\n\n\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then (\n\n datetime_diff(\n cast(current_timestamp as datetime),\n cast(ticket_enriched.requester_last_login_at as datetime),\n second\n )\n\n /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then (\n\n datetime_diff(\n cast(current_timestamp as datetime),\n cast(ticket_enriched.assignee_last_login_at as datetime),\n second\n )\n\n /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n\n datetime_diff(\n cast(current_timestamp as datetime),\n cast(ticket_enriched.created_at as datetime),\n second\n )\n\n /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n\n datetime_diff(\n cast(current_timestamp as datetime),\n cast(ticket_enriched.updated_at as datetime),\n second\n )\n\n /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n where status = 'solved'\n\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_historical_assignee as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_assignee`\n\n), ticket_historical_group as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_group`\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.first_agent_assignment_date as datetime),\n minute\n )\n\n as first_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.last_agent_assignment_date as datetime),\n minute\n )\n\n as last_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.first_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as first_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}, {"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n\n datetime_diff(\n cast(agent_responded_at as datetime),\n cast(end_user_comment_created_at as datetime),\n second\n )\n\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "sql": " __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_business", "sql": " __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n\n datetime_diff(\n cast(least(valid_ending_at, schedule_invalidated_at) as datetime),\n cast(greatest(valid_starting_at, schedule_created_at) as datetime),\n second\n )\n\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n\n datetime_diff(\n cast(cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n (\n\n datetime_diff(\n cast(ticket_status_crossed_with_schedule.status_schedule_end as datetime),\n cast(ticket_status_crossed_with_schedule.status_schedule_start as datetime),\n second\n )\n\n /60\n ) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp),\n week\n ) as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as INT64) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on \n ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_summary": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__ticket_summary", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_summary.sql", "original_file_path": "models/zendesk__ticket_summary.sql", "unique_id": "model.zendesk.zendesk__ticket_summary", "fqn": ["zendesk", "zendesk__ticket_summary"], "alias": "zendesk__ticket_summary", "checksum": {"name": "sha256", "checksum": "085f6c784b70f6ca6f38a8f3d4defb1debb06049d0bb6fe1b778ad7638d08f2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A single record table containing Zendesk ticket and user summary metrics. These metrics are updated for the current day the model is run.", "columns": {"user_count": {"name": "user_count", "description": "Total count of users created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active_agent_count": {"name": "active_agent_count", "description": "Total count of agents", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_user_count": {"name": "deleted_user_count", "description": "Total deleted user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_user_count": {"name": "end_user_count", "description": "Total end user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended_user_count": {"name": "suspended_user_count", "description": "Total count of users in a suspended state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_ticket_count": {"name": "new_ticket_count", "description": "Total count of tickets in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_ticket_count": {"name": "on_hold_ticket_count", "description": "Total count of tickets in the \"hold\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_ticket_count": {"name": "open_ticket_count", "description": "Total count of tickets in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "pending_ticket_count": {"name": "pending_ticket_count", "description": "Total count of tickets in the \"pending\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solved_ticket_count": {"name": "solved_ticket_count", "description": "Total count of solved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_ticket_count": {"name": "problem_ticket_count", "description": "Total count of tickets labeled as problems", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reassigned_ticket_count": {"name": "reassigned_ticket_count", "description": "Total count of tickets that have been reassigned", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reopened_ticket_count": {"name": "reopened_ticket_count", "description": "Total count of tickets that have been reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "surveyed_satisfaction_ticket_count": {"name": "surveyed_satisfaction_ticket_count", "description": "Total count of tickets that have been surveyed for a satisfaction response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unassigned_unsolved_ticket_count": {"name": "unassigned_unsolved_ticket_count", "description": "Total count of tickets that are unassigned and unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_ticket_count": {"name": "unreplied_ticket_count", "description": "Total count of tickets that have not had a reply", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_unsolved_ticket_count": {"name": "unreplied_unsolved_ticket_count", "description": "Total count of tickets that have not had a reply and are unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_count": {"name": "unsolved_ticket_count", "description": "Total count of unsolved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assigned_ticket_count": {"name": "assigned_ticket_count", "description": "Total count of assigned tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_ticket_count": {"name": "deleted_ticket_count", "description": "Total count of deleted tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recovered_ticket_count": {"name": "recovered_ticket_count", "description": "Total count of tickets that were deleted then reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.574073, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_summary`", "raw_code": "with ticket_metrics as (\n select *\n from {{ ref('zendesk__ticket_metrics') }}\n\n), user_table as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), user_sum as (\n select\n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int"], "nodes": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_summary.sql", "compiled": true, "compiled_code": "with ticket_metrics as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_metrics`\n\n), user_table as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), user_sum as (\n select\n cast(1 as INT64) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as INT64) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_field_history": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_field_history.sql", "original_file_path": "models/zendesk__ticket_field_history.sql", "unique_id": "model.zendesk.zendesk__ticket_field_history", "fqn": ["zendesk", "zendesk__ticket_field_history"], "alias": "zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "e93a7aed2b5f41d1b7dd36c3cb924618bf985358d05296524e92896d6c333bbc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "merge", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date"}, "file_format": "delta"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable and the corresponding updater fields defined in the `ticket_field_history_updater_columns` variable.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_day_id": {"name": "ticket_day_id", "description": "The unique key of the table, a surrogate key of date_day and ticket_id.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The assignee id assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date"}, "unique_key": "ticket_day_id", "incremental_strategy": "merge", "file_format": "delta"}, "created_at": 1715700424.5649998, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history`", "raw_code": "{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{%- set change_data_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_scd')) -%}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_scd') }}\n \n {% if is_incremental() %}\n where valid_from >= (select max(date_day) from {{ this }})\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from {{ this }}\n where date_day = (select max(date_day) from {{ this }} )\n\n{% endif %}\n\n), calendar as (\n\n select *\n from {{ ref('int_zendesk__field_calendar_spine') }}\n where date_day <= current_date\n {% if is_incremental() %}\n and date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n {% if is_incremental() %} \n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , coalesce(change_data.{{ col.name }}, most_recent_data.{{ col.name }}) as {{ col.name }}\n {% endfor %}\n \n {% else %}\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , {{ col.name }}\n {% endfor %}\n {% endif %}\n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n {% if is_incremental() %}\n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n {% endif %}\n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n , {{ col.name }}\n -- create a batch/partition once a new value is provided\n , sum( case when {{ col.name }} is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as {{ col.name }}_field_partition\n\n {% endfor %}\n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n -- grab the value that started this batch/partition\n , first_value( {{ col.name }} ) over (\n partition by ticket_id, {{ col.name }}_field_partition \n order by date_day asc rows between unbounded preceding and current row) as {{ col.name }}\n {% endfor %}\n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( {{ col.name }} as {{ dbt.type_string() }} ) = 'is_null' then null else {{ col.name }} end as {{ col.name }}\n {% endfor %}\n\n from fill_values\n\n), surrogate_key as (\n\n select\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_calendar_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.type_string"], "nodes": ["model.zendesk.int_zendesk__field_history_scd", "model.zendesk.int_zendesk__field_calendar_spine"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with change_data as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_history_scd`\n \n \n where valid_from >= (select max(date_day) from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history`)\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history`\n where date_day = (select max(date_day) from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history` )\n\n\n\n), calendar as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_calendar_spine`\n where date_day <= current_date\n \n and date_day >= (select max(date_day) from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history`)\n \n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n \n \n , coalesce(change_data.status, most_recent_data.status) as status\n \n , coalesce(change_data.assignee_id, most_recent_data.assignee_id) as assignee_id\n \n , coalesce(change_data.priority, most_recent_data.priority) as priority\n \n \n \n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n \n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n \n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n \n , status\n -- create a batch/partition once a new value is provided\n , sum( case when status is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as status_field_partition\n\n \n , assignee_id\n -- create a batch/partition once a new value is provided\n , sum( case when assignee_id is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as assignee_id_field_partition\n\n \n , priority\n -- create a batch/partition once a new value is provided\n , sum( case when priority is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as priority_field_partition\n\n \n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n \n -- grab the value that started this batch/partition\n , first_value( status ) over (\n partition by ticket_id, status_field_partition \n order by date_day asc rows between unbounded preceding and current row) as status\n \n -- grab the value that started this batch/partition\n , first_value( assignee_id ) over (\n partition by ticket_id, assignee_id_field_partition \n order by date_day asc rows between unbounded preceding and current row) as assignee_id\n \n -- grab the value that started this batch/partition\n , first_value( priority ) over (\n partition by ticket_id, priority_field_partition \n order by date_day asc rows between unbounded preceding and current row) as priority\n \n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( status as string ) = 'is_null' then null else status end as status\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( assignee_id as string ) = 'is_null' then null else assignee_id end as assignee_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( priority as string ) = 'is_null' then null else priority end as priority\n \n\n from fill_values\n\n), surrogate_key as (\n\n select\n to_hex(md5(cast(coalesce(cast(date_day as string), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as string), '_dbt_utils_surrogate_key_null_') as string))) as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__sla_policies": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__sla_policies", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__sla_policies.sql", "original_file_path": "models/zendesk__sla_policies.sql", "unique_id": "model.zendesk.zendesk__sla_policies", "fqn": ["zendesk", "zendesk__sla_policies"], "alias": "zendesk__sla_policies", "checksum": {"name": "sha256", "checksum": "450c1289895dff2dce94dbed7926eeaa895ffa8c6a25524f558d9dcd5e7075fa"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents an SLA policy event and additional sla breach and achievement metrics. Calendar and business hour SLA breaches for `first_reply_time`, `next_reply_time`, `requester_wait_time`, and `agent_work_time` are supported. If there is a SLA you would like supported that is not included, please create a feature request.", "columns": {"sla_event_id": {"name": "sla_event_id", "description": "A surrogate key generated from the combination of ticket_id, metric, and sla_applied_at fields", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_policy_name": {"name": "sla_policy_name", "description": "The name of the SLA policy associated with the SLA metric", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "metric": {"name": "metric", "description": "The SLA metric, either agent_work_time, requester_wait_time, first_reply_time or next_reply_time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_applied_at": {"name": "sla_applied_at", "description": "When the SLA target was triggered. This is the starting time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "target": {"name": "target", "description": "The SLA target, in minutes", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "in_business_hours": {"name": "in_business_hours", "description": "Boolean field indicating if the SLA target is in business hours (true) or calendar hours (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_breach_at": {"name": "sla_breach_at", "description": "The time or expected time of the SLA breach or achieve event.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_elapsed_time": {"name": "sla_elapsed_time", "description": "The total elapsed time to achieve the SLA metric whether breached or achieved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active_sla": {"name": "is_active_sla", "description": "Boolean field indicating that the SLA event is currently active and not breached (true) or past (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_sla_breach": {"name": "is_sla_breach", "description": "Boolean field indicating if the SLA has been breached (true) or was achieved (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.56461, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__sla_policies`", "raw_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from {{ ref('int_zendesk__reply_time_combined') }}\n\n), agent_work_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_calendar_hours') }}\n\n), requester_wait_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), agent_work_business_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_business_hours') }}\n\n), requester_wait_business_sla as (\n select *\n from {{ ref('int_zendesk__requester_wait_time_business_hours') }}\n\n{% endif %}\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n{% if var('using_schedules', True) %}\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n{% endif %}\n\n)\n\nselect \n {{ dbt_utils.generate_surrogate_key(['ticket_id', 'metric', 'sla_applied_at']) }} as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then ({{ dbt.datediff(\"sla_applied_at\", dbt.current_timestamp_backcompat(), 'second') }} / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > {{ dbt.current_timestamp_backcompat() }})\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_combined", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_business_hours", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.max_bool", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.current_timestamp_backcompat", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__sla_policies.sql", "compiled": true, "compiled_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_combined`\n\n), agent_work_calendar_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_calendar_hours`\n\n), requester_wait_calendar_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_calendar_hours`\n\n\n\n), agent_work_business_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_business_hours`\n\n), requester_wait_business_sla as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_business_hours`\n\n\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n max( is_breached_during_schedule )\n\n\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n max( is_breached_during_schedule )\n\n\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n max( is_breached_during_schedule )\n\n\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n max( is_breached_during_schedule )\n\n\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n\n\n)\n\nselect \n to_hex(md5(cast(coalesce(cast(ticket_id as string), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(metric as string), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sla_applied_at as string), '_dbt_utils_surrogate_key_null_') as string))) as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then (\n\n datetime_diff(\n cast(current_timestamp as datetime),\n cast(sla_applied_at as datetime),\n second\n )\n\n / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > current_timestamp)\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_backlog": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk__ticket_backlog", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_backlog.sql", "original_file_path": "models/zendesk__ticket_backlog.sql", "unique_id": "model.zendesk.zendesk__ticket_backlog", "fqn": ["zendesk", "zendesk__ticket_backlog"], "alias": "zendesk__ticket_backlog", "checksum": {"name": "sha256", "checksum": "546f8460ab16ce0f4671b1ae5742bfdb0f97bc4184c9da30cd21de81400922f7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable for all backlog tickets. Backlog tickets being defined as any ticket not a 'closed', 'deleted', or 'solved' status.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel where the ticket was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The assignee name assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1715700424.574294, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_backlog`", "raw_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n{{ config(enabled = 'status' in var('ticket_field_history_columns')) }}\n\nwith ticket_field_history as (\n select *\n from {{ ref('zendesk__ticket_field_history') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), group_names as (\n select *\n from {{ ref('stg_zendesk__group') }}\n\n), users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), brands as (\n select *\n from {{ ref('stg_zendesk__brand') }}\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n{% if 'ticket_form_id' in var('ticket_field_history_columns') %}\n), ticket_forms as (\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), organizations as (\n select *\n from {{ ref('stg_zendesk__organization') }}\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n {% for col in var('ticket_field_history_columns') if col != 'status' %} --Looking at all history fields the users passed through in their dbt_project.yml file\n {% if col in ['assignee_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n {% elif col in ['requester_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,requester.name as requester_name\n\n {% elif col in ['ticket_form_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,ticket_forms.name as ticket_form_name\n\n {% elif col in ['organization_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,organizations.name as organization_name\n\n {% elif col in ['brand_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,brands.name as brand_name\n\n {% elif col in ['group_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,group_names.name as group_name\n\n {% elif col in ['locale_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.locale as local_name\n\n {% else %} --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.{{ col }}\n {% endif %}\n {% endfor %}\n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n {% if 'ticket_form_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join ticket_forms\n on ticket_forms.ticket_form_id = cast(ticket_field_history.ticket_form_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'group_id' in var('ticket_field_history_columns') %}--Join not needed if field is not located in variable, otherwise it is included.\n left join group_names\n on group_names.group_id = cast(ticket_field_history.group_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'assignee_id' in var('ticket_field_history_columns') or 'requester_id' in var('ticket_field_history_columns') or 'locale_id' in var('ticket_field_history_columns')%} --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'requester_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join users as requester\n on requester.user_id = cast(ticket_field_history.requester_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'brand_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join brands\n on brands.brand_id = cast(ticket_field_history.brand_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'organization_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join organizations\n on organizations.organization_id = cast(ticket_field_history.organization_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "language": "sql", "refs": [{"name": "zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}, {"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_bigint"], "nodes": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_backlog.sql", "compiled": true, "compiled_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n\n\nwith ticket_field_history as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history`\n\n), tickets as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), group_names as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group`\n\n), users as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), brands as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand`\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n\n\n), organizations as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization`\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n --Looking at all history fields the users passed through in their dbt_project.yml file\n --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n \n --Looking at all history fields the users passed through in their dbt_project.yml file\n --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.priority\n \n \n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n \n\n \n\n --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as bigint)\n \n\n \n\n \n\n \n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__sla_policy_applied": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/int_zendesk__sla_policy_applied.sql", "original_file_path": "models/sla_policy/int_zendesk__sla_policy_applied.sql", "unique_id": "model.zendesk.int_zendesk__sla_policy_applied", "fqn": ["zendesk", "sla_policy", "int_zendesk__sla_policy_applied"], "alias": "int_zendesk__sla_policy_applied", "checksum": {"name": "sha256", "checksum": "5879f6ab082c64d3650de0c8a5b3ec5ee85e25eb99646451eab7e9d6499c4d19"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.167136, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__sla_policy_applied`", "raw_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), sla_policy_name as (\n\n select \n *\n from {{ ref('int_zendesk__updates') }}\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast({{ fivetran_utils.json_parse('ticket_field_history.value', ['minutes']) }} as {{ dbt.type_int() }} ) as target,\n {{ fivetran_utils.json_parse('ticket_field_history.value', ['in_business_hours']) }} = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, {{ dbt.current_timestamp_backcompat() }}) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.json_parse", "macro.dbt.type_int", "macro.dbt.current_timestamp_backcompat"], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__ticket_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/int_zendesk__sla_policy_applied.sql", "compiled": true, "compiled_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n\n), sla_policy_name as (\n\n select \n *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_aggregates`\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast(\n\n \n json_extract_scalar(ticket_field_history.value, '$.minutes')\n\n as INT64 ) as target,\n \n\n \n json_extract_scalar(ticket_field_history.value, '$.in_business_hours')\n\n = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, current_timestamp) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_business_hours"], "alias": "int_zendesk__agent_work_time_business_hours", "checksum": {"name": "sha256", "checksum": "bf3885a1aad6f4f87b63d6c0f1131a6b6149407f7a0f2f7447172884f788cc50"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1715700424.17083, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_business_hours`", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n \n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_agent_work_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) > cast(schedule.valid_from as {{ dbt.type_timestamp() }})\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) < cast(schedule.valid_until as {{ dbt.type_timestamp() }})\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"\" ~ dbt.date_trunc('week', 'valid_starting_at') ~ \"\",\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom agent_work_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_filtered_statuses`\n where in_business_hours\n\n), schedule as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_schedules as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n\n datetime_diff(\n cast(least(valid_ending_at, schedule_invalidated_at) as datetime),\n cast(greatest(valid_starting_at, schedule_created_at) as datetime),\n second\n )\n\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n\n datetime_diff(\n cast(cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n\n datetime_diff(\n cast(ticket_status_crossed_with_schedule.valid_ending_at as datetime),\n cast(ticket_status_crossed_with_schedule.valid_starting_at as datetime),\n second\n )\n\n /60\n ) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp),\n week\n ) as date) as start_week_date\n \n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as INT64) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_agent_work_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time_minute minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time_minute minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n \n\n timestamp_add(timestamp_trunc(\n cast(valid_starting_at as timestamp),\n week\n ), interval cast(((7*24*60) * week_number) + breach_minutes_from_week as INT64 ) minute)\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom agent_work_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_calendar_hours"], "alias": "int_zendesk__agent_work_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "f25752139fd2e10c5d666783a5abbf36e9d81b6a4e0012f6e42d816e8d20aa81"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.186018, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_calendar_hours`", "raw_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_filtered_statuses`\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n \n\n datetime_diff(\n cast(valid_ending_at as datetime),\n cast(valid_starting_at as datetime),\n minute\n )\n\n as calendar_minutes,\n sum(\n\n datetime_diff(\n cast(valid_ending_at as datetime),\n cast(valid_starting_at as datetime),\n minute\n )\n\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n timestamp_add(valid_starting_at, interval (remaining_target_minutes + calendar_minutes) minute)\n\n as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_filtered_statuses"], "alias": "int_zendesk__agent_work_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "fbb6aeccc9d5c6ec4e48160a9f5fdf94c7be4e3639d19a3e55e64ecbedccaa62"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.188682, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_filtered_statuses`", "raw_code": "with agent_work_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp_backcompat() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with agent_work_time_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__sla_policy_applied`\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n timestamp_add(current_timestamp, interval 30 day)\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_business_hours": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_business_hours"], "alias": "int_zendesk__reply_time_business_hours", "checksum": {"name": "sha256", "checksum": "9ff6bb4774c2854a7d21ac27ac2690db52bc80920ae8d4e88680631557a9b590"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1715700424.191639, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_business_hours`", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), ticket_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from {{ ref('stg_zendesk__schedule') }}\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(sla_policy_applied.sla_applied_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n {{ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') }} as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_created_at') }} <= sla_policy_applied.sla_applied_at\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_invalidated_at') }} > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n {{ dbt_utils.group_by(n=14) }}\n\n), week_index_calc as (\n select \n *,\n {{ dbt.datediff(\"sla_applied_at\", \"least(coalesce(first_reply_time, \" ~ dbt.current_timestamp() ~ \"), coalesce(first_solved_time, \" ~ dbt.current_timestamp() ~ \"))\", \"week\") }} + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast((7*24*60) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n {{ dbt.date_trunc('week', 'sla_applied_at') }} as starting_point,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_breach_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_start_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_start_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_end_at,\n {{ dbt_date.week_end(\"sla_applied_at\", tz=\"America/UTC\") }} as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "language": "sql", "refs": [{"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.fivetran_utils.timestamp_add", "macro.dbt_utils.group_by", "macro.dbt.current_timestamp", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt_date.week_end"], "nodes": ["model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), sla_policy_applied as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__sla_policy_applied`\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), ticket_updates as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule`\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n (\n\n datetime_diff(\n cast(cast(sla_policy_applied.sla_applied_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(sla_policy_applied.sla_applied_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n cast(timestamp_trunc(\n cast(sla_policy_applied.sla_applied_at as timestamp),\n week\n ) as date) as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and \n\n timestamp_add(ticket_schedules.schedule_created_at, interval -1 second)\n\n <= sla_policy_applied.sla_applied_at\n and \n\n timestamp_add(ticket_schedules.schedule_invalidated_at, interval -1 second)\n\n > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10,11,12,13,14\n\n), week_index_calc as (\n select \n *,\n \n\n datetime_diff(\n cast(least(coalesce(first_reply_time, current_timestamp()), coalesce(first_solved_time, current_timestamp())) as datetime),\n cast(sla_applied_at as datetime),\n week\n )\n\n + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as INT64) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n cast((7*24*60) as INT64) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast (\n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as date) > cast(schedule.valid_from as date)\n and cast (\n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n timestamp_trunc(\n cast(sla_applied_at as timestamp),\n week\n ) as starting_point,\n \n\n timestamp_add(cast(cast(timestamp_trunc(\n cast(sla_applied_at as timestamp),\n week\n ) as date) as timestamp), interval cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as INT64 ) minute)\n\n as sla_breach_at,\n \n\n timestamp_add(cast(cast(timestamp_trunc(\n cast(sla_applied_at as timestamp),\n week\n ) as date) as timestamp), interval cast(((7*24*60) * week_number) + (schedule_start_time) as INT64 ) minute)\n\n as sla_schedule_start_at,\n \n\n timestamp_add(cast(cast(timestamp_trunc(\n cast(sla_applied_at as timestamp),\n week\n ) as date) as timestamp), interval cast(((7*24*60) * week_number) + (schedule_end_time) as INT64 ) minute)\n\n as sla_schedule_end_at,\n cast(\n \n\n datetime_add(\n cast( \n\n datetime_add(\n cast( timestamp_trunc(\n cast(sla_applied_at as timestamp),\n week\n ) as datetime),\n interval 1 week\n )\n\n as datetime),\n interval -1 day\n )\n\n\n as date) as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_calendar_hours"], "alias": "int_zendesk__reply_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "6ec2775efbac4d405efd0b30a1ec5c593e140c3f4a1be4ff8df7fd0cd4791a2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.202933, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_calendar_hours`", "raw_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), final as (\n select\n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(target as \" ~ dbt.type_int() ~ \" )\",\n \"sla_applied_at\" ) }} as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "compiled": true, "compiled_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__sla_policy_applied`\n\n), final as (\n select\n *,\n \n\n timestamp_add(sla_applied_at, interval cast(target as INT64 ) minute)\n\n as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_combined": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__reply_time_combined", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_combined", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_combined"], "alias": "int_zendesk__reply_time_combined", "checksum": {"name": "sha256", "checksum": "3a7a8ddea0400ea314ff4ae83b81654414788634e76af330bf27c384733ac43b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.205228, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_combined`", "raw_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from {{ ref('int_zendesk__reply_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), reply_time_business_hours_sla as (\n\n select *\n from {{ ref('int_zendesk__reply_time_business_hours') }}\n\n{% endif %}\n\n), ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as {{ dbt.type_numeric() }}) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as {{ dbt.type_numeric() }}) as week_number,\n cast(null as {{ dbt.type_numeric() }}) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n{% if var('using_schedules', True) %}\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n{% endif %}\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n {{ dbt_utils.group_by(n=10) }}\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n {{ dbt.datediff(\"sla_schedule_start_at\", \"agent_reply_at\", 'second') }} / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and {{ dbt.current_timestamp() }} >= sla_schedule_start_at and ({{ dbt.current_timestamp() }} < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= {{ dbt.current_timestamp() }}) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n {{ dbt.current_timestamp() }} as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + ({{ dbt.datediff(\"sla_schedule_start_at\", \"coalesce(agent_reply_at, next_solved_at, current_time_check)\", 'second') }} / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__reply_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_numeric", "macro.dbt_utils.group_by", "macro.dbt.datediff", "macro.dbt.current_timestamp"], "nodes": ["model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "compiled": true, "compiled_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_calendar_hours`\n\n\n\n), reply_time_business_hours_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_business_hours`\n\n\n\n), ticket_updates as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as numeric) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as numeric) as week_number,\n cast(null as numeric) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n \n\n datetime_diff(\n cast(agent_reply_at as datetime),\n cast(sla_schedule_start_at as datetime),\n second\n )\n\n / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and current_timestamp() >= sla_schedule_start_at and (current_timestamp() < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= current_timestamp()) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n current_timestamp() as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + (\n\n datetime_diff(\n cast(coalesce(agent_reply_at, next_solved_at, current_time_check) as datetime),\n cast(sla_schedule_start_at as datetime),\n second\n )\n\n / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_calendar_hours"], "alias": "int_zendesk__requester_wait_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "adaa86b537177e2792f3b8e48def56a520c6a442b11f3859c649f549d4b60087"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.2102249, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_calendar_hours`", "raw_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_filtered_statuses`\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n \n\n datetime_diff(\n cast(valid_ending_at as datetime),\n cast(valid_starting_at as datetime),\n minute\n )\n\n as calendar_minutes,\n sum(\n\n datetime_diff(\n cast(valid_ending_at as datetime),\n cast(valid_starting_at as datetime),\n minute\n )\n\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n timestamp_add(valid_starting_at, interval (remaining_target_minutes + calendar_minutes) minute)\n\n as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_business_hours"], "alias": "int_zendesk__requester_wait_time_business_hours", "checksum": {"name": "sha256", "checksum": "101f406be8cee0e94ed9a45f338aa5618ac7a9bc030632f0cf8c33008de8394a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1715700424.213115, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_business_hours`", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_requester_wait_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) > cast(schedule.valid_from as {{ dbt.type_timestamp() }})\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) < cast(schedule.valid_until as {{ dbt.type_timestamp() }})\n \n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"\" ~ dbt.date_trunc('week', 'valid_starting_at') ~ \"\",\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_filtered_statuses`\n where in_business_hours\n\n), schedule as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_schedules as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n\n datetime_diff(\n cast(least(valid_ending_at, schedule_invalidated_at) as datetime),\n cast(greatest(valid_starting_at, schedule_created_at) as datetime),\n second\n )\n\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n\n datetime_diff(\n cast(cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n\n datetime_diff(\n cast(ticket_status_crossed_with_schedule.valid_ending_at as datetime),\n cast(ticket_status_crossed_with_schedule.valid_starting_at as datetime),\n second\n )\n\n /60\n ) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp),\n week\n ) as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as INT64) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_requester_wait_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time_minute minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time_minute minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n \n\n timestamp_add(timestamp_trunc(\n cast(valid_starting_at as timestamp),\n week\n ), interval cast(((7*24*60) * week_number) + breach_minutes_from_week as INT64 ) minute)\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_filtered_statuses"], "alias": "int_zendesk__requester_wait_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "1ddb077adfbf13244c13cb12643a6914f5eac17c714885eac834f9e1eee88475"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.219746, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_filtered_statuses`", "raw_code": "with requester_wait_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp_backcompat() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with requester_wait_time_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__sla_policy_applied`\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n timestamp_add(current_timestamp, interval 30 day)\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_reply_times", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times"], "alias": "int_zendesk__ticket_reply_times", "checksum": {"name": "sha256", "checksum": "6de1b30f99a9bbd078c823538ca0e87c5b57d33160f65c290ecd67765e8d4472"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1715700424.2220101, "relation_name": null, "raw_code": "with ticket_public_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n ({{ dbt.datediff(\n 'end_user_comment_created_at',\n 'agent_responded_at',\n 'second') }} / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n\n datetime_diff(\n cast(agent_responded_at as datetime),\n cast(end_user_comment_created_at as datetime),\n second\n )\n\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times_calendar": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_reply_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times_calendar.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times_calendar"], "alias": "int_zendesk__ticket_reply_times_calendar", "checksum": {"name": "sha256", "checksum": "6fb6a60134019d78fcfc8c135b4a7887b3ce52ec53d8db463194f7824d2c71c2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1715700424.224132, "relation_name": null, "raw_code": "with ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_reply_times"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n\n datetime_diff(\n cast(agent_responded_at as datetime),\n cast(end_user_comment_created_at as datetime),\n second\n )\n\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n\n datetime_diff(\n cast(agent_responded_at as datetime),\n cast(end_user_comment_created_at as datetime),\n second\n )\n\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comments_enriched": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__comments_enriched", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__comments_enriched.sql", "original_file_path": "models/reply_times/int_zendesk__comments_enriched.sql", "unique_id": "model.zendesk.int_zendesk__comments_enriched", "fqn": ["zendesk", "reply_times", "int_zendesk__comments_enriched"], "alias": "int_zendesk__comments_enriched", "checksum": {"name": "sha256", "checksum": "970004a2aa343ae78a3f810828600c7eca8585428b52b05e4353f9debc6f1af5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1715700424.225208, "relation_name": null, "raw_code": "with ticket_comment as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'comment'\n\n), users as (\n\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__comments_enriched.sql", "compiled": true, "compiled_code": "with ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_reply_time_business": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_first_reply_time_business", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_first_reply_time_business.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_first_reply_time_business"], "alias": "int_zendesk__ticket_first_reply_time_business", "checksum": {"name": "sha256", "checksum": "3b0a4efc758ab6f25063ec97d60455b76873e355e0e916fc4670d5d67066430a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1715700424.2262988, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n), ticket_schedules as (\n\n select \n *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n\n datetime_diff(\n cast(agent_responded_at as datetime),\n cast(end_user_comment_created_at as datetime),\n second\n )\n\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n\n datetime_diff(\n cast(agent_responded_at as datetime),\n cast(end_user_comment_created_at as datetime),\n second\n )\n\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_enriched": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__field_history_enriched", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_enriched.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_enriched.sql", "unique_id": "model.zendesk.int_zendesk__field_history_enriched", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_enriched"], "alias": "int_zendesk__field_history_enriched", "checksum": {"name": "sha256", "checksum": "cdf920b1df5fee8c6a08b0e26996028d327964903e8acc4dd15498d23c00005c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1715700424.230677, "relation_name": null, "raw_code": "with ticket_field_history as (\n\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), updater_info as (\n select *\n from {{ ref('int_zendesk__updater_information') }}\n\n), final as (\n select\n ticket_field_history.*\n\n {% if var('ticket_field_history_updater_columns')%} --The below will be run if any fields are included in the variable within the dbt_project.yml.\n {% for col in var('ticket_field_history_updater_columns') %} --Iterating through the updater fields included in the variable.\n\n --The below statements are needed to populate Zendesk automated fields for when the zendesk triggers automatically change fields based on user defined triggers.\n {% if col in ['updater_is_active'] %}\n ,coalesce(updater_info.{{ col|lower }}, true) as {{ col }}\n\n {% elif col in ['updater_user_id','updater_organization_id'] %}\n ,coalesce(updater_info.{{ col|lower }}, -1) as {{ col }}\n \n {% elif col in ['updater_last_login_at'] %}\n ,coalesce(updater_info.{{ col|lower }}, current_timestamp) as {{ col }}\n \n {% else %}\n ,coalesce(updater_info.{{ col|lower }}, concat('zendesk_trigger_change_', '{{ col }}' )) as {{ col }}\n \n {% endif %}\n {% endfor %}\n {% endif %} \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "int_zendesk__updater_information", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk.int_zendesk__updater_information"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_enriched.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), organizations as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__organization_aggregates`\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), ticket_field_history as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history`\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), organizations as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__organization_aggregates`\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_pivot": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__field_history_pivot", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_pivot.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_pivot.sql", "unique_id": "model.zendesk.int_zendesk__field_history_pivot", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_pivot"], "alias": "int_zendesk__field_history_pivot", "checksum": {"name": "sha256", "checksum": "afcfd385ea28733bd02b37c9b5c8531309982466046b9d8323f0c6c0d7091c8c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "merge", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date"}, "unique_key": "ticket_day_id", "incremental_strategy": "merge", "file_format": "delta"}, "created_at": 1715700424.2335842, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_history_pivot`", "raw_code": "-- depends_on: {{ source('zendesk', 'ticket_field_history') }}\n\n{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{% if execute -%}\n {% set results = run_query('select distinct field_name from ' ~ source('zendesk', 'ticket_field_history') ) %}\n {% set results_list = results.columns[0].values() %}\n{% endif -%}\n\nwith field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n ,\n {{ var('ticket_field_history_updater_columns') | join (\", \")}}\n\n {% endif %}\n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from {{ ref('int_zendesk__field_history_enriched') }}\n {% if is_incremental() %}\n where cast( {{ dbt.date_trunc('day', 'valid_starting_at') }} as date) >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast({{ dbt.date_trunc('day', 'valid_starting_at') }} as date) as date_day\n\n {% for col in results_list if col in var('ticket_field_history_columns') %}\n {% set col_xf = col|lower %}\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.value end) as {{ col_xf }}\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n\n {% for upd in var('ticket_field_history_updater_columns') %}\n\n {% set upd_xf = (col|lower + '_' + upd ) %} --Creating the appropriate column name based on the history field + update field names.\n\n {% if upd == 'updater_is_active' and target.type in ('postgres', 'redshift') %}\n\n ,bool_or(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% else %}\n\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% endif %}\n {% endfor %}\n {% endif %}\n {% endfor %}\n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n {{ dbt_utils.generate_surrogate_key(['ticket_id','date_day'])}} as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_enriched", "package": null, "version": null}], "sources": [["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.date_trunc", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.run_query"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history", "model.zendesk.int_zendesk__field_history_enriched"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_pivot.sql", "compiled": true, "compiled_code": "-- depends_on: `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_field_history_data`\n\n\n\n\n \nwith __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), organizations as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__organization_aggregates`\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history`\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n), field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from __dbt__cte__int_zendesk__field_history_enriched\n \n where cast( timestamp_trunc(\n cast(valid_starting_at as timestamp),\n day\n ) as date) >= (select max(date_day) from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_history_pivot`)\n \n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast(timestamp_trunc(\n cast(valid_starting_at as timestamp),\n day\n ) as date) as date_day\n\n \n \n ,min(case when lower(field_name) = 'status' then filtered.value end) as status\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'assignee_id' then filtered.value end) as assignee_id\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'priority' then filtered.value end) as priority\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n to_hex(md5(cast(coalesce(cast(ticket_id as string), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(date_day as string), '_dbt_utils_surrogate_key_null_') as string))) as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), organizations as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__organization_aggregates`\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}, {"id": "model.zendesk.int_zendesk__field_history_enriched", "sql": " __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history`\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updater_information": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__updater_information", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__updater_information.sql", "original_file_path": "models/ticket_history/int_zendesk__updater_information.sql", "unique_id": "model.zendesk.int_zendesk__updater_information", "fqn": ["zendesk", "ticket_history", "int_zendesk__updater_information"], "alias": "int_zendesk__updater_information", "checksum": {"name": "sha256", "checksum": "62a690646cff991c0e0b6e205440a070bb44aab8d4d9286714710c52a4c6677a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1715700424.240094, "relation_name": null, "raw_code": "with users as (\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), organizations as (\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,users.user_tags as updater_user_tags\n {% endif %}\n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,organizations.domain_names as updater_organization_domain_names\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,organizations.organization_tags as updater_organization_organization_tags\n {% endif %}\n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__updater_information.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), organizations as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__organization_aggregates`\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_scd": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__field_history_scd", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_scd.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_scd.sql", "unique_id": "model.zendesk.int_zendesk__field_history_scd", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_scd"], "alias": "int_zendesk__field_history_scd", "checksum": {"name": "sha256", "checksum": "a748f9163dc6edaca993c8a3f5e3cecc9d057d3b47817d403e0b0778deda2466"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.243413, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_history_scd`", "raw_code": "-- model needs to materialize as a table to avoid erroneous null values\n{{ config( materialized='table') }} \n\n{% set ticket_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_pivot')) %}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_pivot') }}\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,{{ col.name }}\n ,sum(case when {{ col.name }} is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as {{ col.name }}_field_partition\n {% endfor %}\n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,first_value( {{ col.name }} ) over (partition by {{ col.name }}_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as {{ col.name }}\n \n {% endfor %}\n from set_values\n) \n\nselect *\nfrom fill_values", "language": "sql", "refs": [{"name": "int_zendesk__field_history_pivot", "package": null, "version": null}, {"name": "int_zendesk__field_history_pivot", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__field_history_pivot"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_scd.sql", "compiled": true, "compiled_code": "-- model needs to materialize as a table to avoid erroneous null values\n \n\n\n\nwith change_data as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_history_pivot`\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n \n\n ,status\n ,sum(case when status is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as status_field_partition\n \n\n ,assignee_id\n ,sum(case when assignee_id is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as assignee_id_field_partition\n \n\n ,priority\n ,sum(case when priority is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as priority_field_partition\n \n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n \n\n ,first_value( status ) over (partition by status_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as status\n \n \n\n ,first_value( assignee_id ) over (partition by assignee_id_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as assignee_id\n \n \n\n ,first_value( priority ) over (partition by priority_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as priority\n \n \n from set_values\n) \n\nselect *\nfrom fill_values", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_calendar_spine": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_calendar_spine.sql", "original_file_path": "models/ticket_history/int_zendesk__field_calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__field_calendar_spine", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_calendar_spine"], "alias": "int_zendesk__field_calendar_spine", "checksum": {"name": "sha256", "checksum": "6026098c7eaa73db8ed28b5320fa2d651aaf02dd6fd36700df55f01624449741"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "merge", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date"}, "unique_key": "ticket_day_id", "incremental_strategy": "merge", "file_format": "delta"}, "created_at": 1715700424.246346, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_calendar_spine`", "raw_code": "{{\n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n )\n}}\n\nwith calendar as (\n\n select *\n from {{ ref('int_zendesk__calendar_spine') }}\n {% if is_incremental() %}\n where date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( {{ dbt.date_trunc('day', \"case when status != 'closed' then \" ~ dbt.current_timestamp_backcompat() ~ \" else updated_at end\") }} as date) as open_until\n from {{ var('ticket') }}\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and {{ dbt.dateadd('month', var('ticket_field_history_extension_months', 0), 'ticket.open_until') }} >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__calendar_spine", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.current_timestamp_backcompat", "macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_utils.generate_surrogate_key"], "nodes": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_calendar_spine.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data`\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1559\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n datetime_add(\n cast( '2020-02-13' as datetime),\n interval row_number() over (order by 1) - 1 day\n )\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n datetime_add(\n cast( current_date as datetime),\n interval 1 week\n )\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), calendar as (\n\n select *\n from __dbt__cte__int_zendesk__calendar_spine\n \n where date_day >= (select max(date_day) from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_calendar_spine`)\n \n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( timestamp_trunc(\n cast(case when status != 'closed' then current_timestamp else updated_at end as timestamp),\n day\n ) as date) as open_until\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and \n\n datetime_add(\n cast( ticket.open_until as datetime),\n interval 0 month\n )\n\n >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n to_hex(md5(cast(coalesce(cast(date_day as string), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as string), '_dbt_utils_surrogate_key_null_') as string))) as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__calendar_spine", "sql": " __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data`\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1559\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n datetime_add(\n cast( '2020-02-13' as datetime),\n interval row_number() over (order by 1) - 1 day\n )\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n datetime_add(\n cast( current_date as datetime),\n interval 1 week\n )\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_calendar": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_work_time_calendar", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_calendar"], "alias": "int_zendesk__ticket_work_time_calendar", "checksum": {"name": "sha256", "checksum": "e3cda559c663cc0e6ef1defcf5d8c418bbb9c20bb60aa118fc698579b3c37814"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1715700424.250644, "relation_name": null, "raw_code": "with ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "compiled": true, "compiled_code": "with ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_business": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_work_time_business", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_business.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_business", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_business"], "alias": "int_zendesk__ticket_work_time_business", "checksum": {"name": "sha256", "checksum": "abc04dab22fb0e16e5cdb757bab3b4edb0573dcc230d769126e24a88a3f53430"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1715700424.251619, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where {{ dbt.datediff('greatest(valid_starting_at, schedule_created_at)', 'least(valid_ending_at, schedule_invalidated_at)', 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.status_schedule_start as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.status_schedule_start',\n 'ticket_status_crossed_with_schedule.status_schedule_end',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=7) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on \n ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) > cast(schedule.valid_from as {{ dbt.type_timestamp() }})\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) < cast(schedule.valid_until as {{ dbt.type_timestamp() }})\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "compiled": true, "compiled_code": "\n\nwith ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n\n datetime_diff(\n cast(least(valid_ending_at, schedule_invalidated_at) as datetime),\n cast(greatest(valid_starting_at, schedule_created_at) as datetime),\n second\n )\n\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n\n datetime_diff(\n cast(cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n (\n\n datetime_diff(\n cast(ticket_status_crossed_with_schedule.status_schedule_end as datetime),\n cast(ticket_status_crossed_with_schedule.status_schedule_start as datetime),\n second\n )\n\n /60\n ) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp),\n week\n ) as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as INT64) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on \n ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__calendar_spine": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "utils/int_zendesk__calendar_spine.sql", "original_file_path": "models/utils/int_zendesk__calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__calendar_spine", "fqn": ["zendesk", "utils", "int_zendesk__calendar_spine"], "alias": "int_zendesk__calendar_spine", "checksum": {"name": "sha256", "checksum": "2131dbec96be6f5fee780a243b7f48940504a36a33c6fe1b66b24be1a8396928"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1715700424.256958, "relation_name": null, "raw_code": "-- depends_on: {{ source('zendesk', 'ticket') }}\n\nwith spine as (\n\n {% if execute %}\n {% set current_ts = dbt.current_timestamp_backcompat() %}\n {% set first_date_query %}\n select min( created_at ) as min_date from {{ source('zendesk', 'ticket') }}\n -- by default take all the data \n where cast(created_at as date) >= {{ dbt.dateadd('year', - var('ticket_field_history_timeframe_years', 50), current_ts ) }}\n {% endset %}\n\n {% set first_date = run_query(first_date_query).columns[0][0]|string %}\n \n {% if target.type == 'postgres' %}\n {% set first_date_adjust = \"cast('\" ~ first_date[0:10] ~ \"' as date)\" %}\n\n {% else %}\n {% set first_date_adjust = \"'\" ~ first_date[0:10] ~ \"'\" %}\n\n {% endif %}\n\n {% else %} {% set first_date_adjust = \"2016-01-01\" %}\n {% endif %}\n\n\n{{\n dbt_utils.date_spine(\n datepart = \"day\", \n start_date = first_date_adjust,\n end_date = dbt.dateadd(\"week\", 1, \"current_date\")\n ) \n}}\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast", "language": "sql", "refs": [], "sources": [["zendesk", "ticket"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_utils.date_spine", "macro.dbt.current_timestamp_backcompat", "macro.dbt.run_query"], "nodes": ["source.zendesk_source.zendesk.ticket"]}, "compiled_path": "target/compiled/zendesk/models/utils/int_zendesk__calendar_spine.sql", "compiled": true, "compiled_code": "-- depends_on: `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data`\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1559\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n datetime_add(\n cast( '2020-02-13' as datetime),\n interval row_number() over (order by 1) - 1 day\n )\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n datetime_add(\n cast( current_date as datetime),\n interval 1 week\n )\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_resolution_times_calendar": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_resolution_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_resolution_times_calendar"], "alias": "int_zendesk__ticket_resolution_times_calendar", "checksum": {"name": "sha256", "checksum": "0c3e1e19084b3e1829c18b80315e8f64aaf63e94522fc56d64652e89b02afadc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1715700424.265677, "relation_name": null, "raw_code": "with historical_solved_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n where status = 'solved'\n\n), ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_historical_assignee as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_assignee') }}\n\n), ticket_historical_group as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_group') }}\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n {{ dbt.datediff(\n 'ticket_historical_assignee.first_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as first_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket_historical_assignee.last_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as last_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.first_solved_at',\n 'minute' ) }} as first_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.last_solved_at',\n 'minute') }} as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_assignee", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "compiled": true, "compiled_code": "with historical_solved_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n where status = 'solved'\n\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_historical_assignee as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_assignee`\n\n), ticket_historical_group as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_group`\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.first_agent_assignment_date as datetime),\n minute\n )\n\n as first_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.last_agent_assignment_date as datetime),\n minute\n )\n\n as last_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.first_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as first_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_resolution_time_business": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_first_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_first_resolution_time_business"], "alias": "int_zendesk__ticket_first_resolution_time_business", "checksum": {"name": "sha256", "checksum": "b26eaf93a2a443204c26eba5cf4dd0fcec83a4e9ec3ab6a6abdd1b58273bddbd"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1715700424.269092, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) > cast(schedule.valid_from as {{ dbt.type_timestamp() }})\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) < cast(schedule.valid_until as {{ dbt.type_timestamp() }})\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n where status = 'solved'\n\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_historical_assignee as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_assignee`\n\n), ticket_historical_group as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_group`\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.first_agent_assignment_date as datetime),\n minute\n )\n\n as first_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.last_agent_assignment_date as datetime),\n minute\n )\n\n as last_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.first_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as first_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n where status = 'solved'\n\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_historical_assignee as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_assignee`\n\n), ticket_historical_group as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_group`\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.first_agent_assignment_date as datetime),\n minute\n )\n\n as first_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.last_agent_assignment_date as datetime),\n minute\n )\n\n as last_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.first_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as first_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_full_resolution_time_business": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_full_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_full_resolution_time_business"], "alias": "int_zendesk__ticket_full_resolution_time_business", "checksum": {"name": "sha256", "checksum": "f9e2ed998cdaa9765b1aa3dccd7b7e8b2dc28779d555ca845d1b0115a26d8577"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1715700424.273908, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) > cast(schedule.valid_from as {{ dbt.type_timestamp() }})\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) < cast(schedule.valid_until as {{ dbt.type_timestamp() }})\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n where status = 'solved'\n\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_historical_assignee as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_assignee`\n\n), ticket_historical_group as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_group`\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.first_agent_assignment_date as datetime),\n minute\n )\n\n as first_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.last_agent_assignment_date as datetime),\n minute\n )\n\n as last_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.first_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as first_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n where status = 'solved'\n\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_historical_assignee as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_assignee`\n\n), ticket_historical_group as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_group`\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.first_agent_assignment_date as datetime),\n minute\n )\n\n as first_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.last_agent_assignment_date as datetime),\n minute\n )\n\n as last_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.first_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as first_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updates": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__updates.sql", "original_file_path": "models/intermediate/int_zendesk__updates.sql", "unique_id": "model.zendesk.int_zendesk__updates", "fqn": ["zendesk", "intermediate", "int_zendesk__updates"], "alias": "int_zendesk__updates", "checksum": {"name": "sha256", "checksum": "3ecf6bfe15bd7a820b369379fff7dadf236c00ce2fe6c7e335c73c07ba67de0e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.278903, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`", "raw_code": "with ticket_history as (\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), ticket_comment as (\n select *\n from {{ ref('stg_zendesk__ticket_comment') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as {{ dbt.type_string() }}) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__updates.sql", "compiled": true, "compiled_code": "with ticket_history as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history`\n\n), ticket_comment as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment`\n\n), tickets as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as string) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_assignee.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_assignee.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_assignee"], "alias": "int_zendesk__ticket_historical_assignee", "checksum": {"name": "sha256", "checksum": "7ae5d5632274b7ccf900910f272cf791e7e976e48fbd170adca647955ab5e2ae"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.280921, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_assignee`", "raw_code": "with assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then {{ dbt.datediff(\"coalesce(previous_update, ticket_created_date)\", \"valid_starting_at\", 'second') }} / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n {{ dbt_utils.group_by(n=6) }}\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_utils.group_by"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_assignee.sql", "compiled": true, "compiled_code": "with assignee_updates as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then \n\n datetime_diff(\n cast(valid_starting_at as datetime),\n cast(coalesce(previous_update, ticket_created_date) as datetime),\n second\n )\n\n / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n group by 1,2,3,4,5,6\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_status": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_status.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_status.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_status", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_status"], "alias": "int_zendesk__ticket_historical_status", "checksum": {"name": "sha256", "checksum": "1c8a86acea05e857271d7f0a2b8f3ec6c09481ecdb9278f922b2df35d5410a48"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.283113, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`", "raw_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n {{ dbt.datediff(\n 'valid_starting_at',\n \"coalesce(valid_ending_at, \" ~ dbt.current_timestamp_backcompat() ~ \")\",\n 'minute') }} as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_status.sql", "compiled": true, "compiled_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n \n\n datetime_diff(\n cast(coalesce(valid_ending_at, current_timestamp) as datetime),\n cast(valid_starting_at as datetime),\n minute\n )\n\n as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__user_aggregates": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__user_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__user_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__user_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__user_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__user_aggregates"], "alias": "int_zendesk__user_aggregates", "checksum": {"name": "sha256", "checksum": "ae23565fdc62d13c33ddb03f3b25a5e288ec6e6ffe6b57cb01496be6ecd2b73f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.2852612, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`", "raw_code": "with users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n--If you use user tags this will be included, if not it will be ignored.\n{% if var('using_user_tags', True) %}\n), user_tags as (\n\n select *\n from {{ ref('stg_zendesk__user_tag') }}\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n {{ fivetran_utils.string_agg( 'user_tags.tags', \"', '\" )}} as user_tags\n from user_tags\n group by 1\n\n{% endif %}\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,user_tag_aggregate.user_tags\n {% endif %}\n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n left join user_tag_aggregate\n using(user_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__user_tag", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__user_aggregates.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n--If you use user tags this will be included, if not it will be ignored.\n\n), user_tags as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tag`\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n \n string_agg(user_tags.tags, ', ')\n\n as user_tags\n from user_tags\n group by 1\n\n\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,user_tag_aggregate.user_tags\n \n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n \n left join user_tag_aggregate\n using(user_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_spine": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__schedule_spine", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_spine.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_spine.sql", "unique_id": "model.zendesk.int_zendesk__schedule_spine", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_spine"], "alias": "int_zendesk__schedule_spine", "checksum": {"name": "sha256", "checksum": "7f1a9c1fa0958132d288a2a8f980d661f6f04662b0573382c9a434574a8daf6b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1715700424.288827, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings.\n End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time)\n*/\n\nwith timezone as (\n\n select *\n from {{ var('time_zone') }}\n\n), daylight_time as (\n\n select *\n from {{ var('daylight_time') }}\n\n), schedule as (\n\n select *\n from {{ var('schedule') }} \n\n-- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules.\n), schedule_holiday as ( \n\n select\n _fivetran_synced,\n cast(date_day as {{ dbt.type_timestamp() }} ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n cast(date_day as {{ dbt.type_timestamp() }} ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n holiday_id,\n holiday_name,\n schedule_id\n\n from {{ var('schedule_holiday') }} \n inner join {{ ref('int_zendesk__calendar_spine') }} \n on holiday_start_date_at <= cast(date_day as {{ dbt.type_timestamp() }} )\n and holiday_end_date_at >= cast(date_day as {{ dbt.type_timestamp() }} )\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard schedule (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT schedule (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp_backcompat() }} as date)\n\n), calculate_schedules as (\n\n select \n schedule.schedule_id,\n schedule.time_zone,\n schedule.start_time,\n schedule.end_time,\n schedule.created_at,\n schedule.schedule_name,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes_to_add,\n -- we'll use these to determine which schedule version to associate tickets with\n cast(split_timezones.valid_from as {{ dbt.type_timestamp() }}) as valid_from,\n cast(split_timezones.valid_until as {{ dbt.type_timestamp() }}) as valid_until\n\n from schedule\n left join split_timezones\n on split_timezones.time_zone = schedule.time_zone\n\n-- Now we need take holiday's into consideration and perform the following transformations to account for Holidays in existing schedules\n), holiday_start_end_times as (\n\n select\n calculate_schedules.*,\n schedule_holiday.holiday_name,\n schedule_holiday.holiday_start_date_at,\n cast({{ dbt.dateadd(\"second\", \"86400\", \"schedule_holiday.holiday_end_date_at\") }} as {{ dbt.type_timestamp() }}) as holiday_end_date_at, -- add 24*60*60 seconds\n cast({{ dbt_date.week_start(\"schedule_holiday.holiday_start_date_at\") }} as {{ dbt.type_timestamp() }}) as holiday_week_start,\n cast({{ dbt_date.week_end(\"schedule_holiday.holiday_end_date_at\") }} as {{ dbt.type_timestamp() }}) as holiday_week_end\n from schedule_holiday\n inner join calculate_schedules\n on calculate_schedules.schedule_id = schedule_holiday.schedule_id\n and schedule_holiday.holiday_start_date_at >= calculate_schedules.valid_from \n and schedule_holiday.holiday_start_date_at < calculate_schedules.valid_until\n\n-- Let's calculate the start and end date of the Holiday in terms of minutes from Sunday (like other Zendesk schedules)\n), holiday_minutes as(\n\n select\n holiday_start_end_times.*,\n {{ dbt.datediff(\"holiday_week_start\", \"holiday_start_date_at\", \"minute\") }} - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_start,\n {{ dbt.datediff(\"holiday_week_start\", \"holiday_end_date_at\", \"minute\") }} - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_end\n from holiday_start_end_times\n left join timezone\n on timezone.time_zone = holiday_start_end_times.time_zone\n\n-- Determine which schedule days include a holiday\n), holiday_check as (\n\n select\n *,\n case when minutes_from_sunday_start < start_time_utc and minutes_from_sunday_end > end_time_utc \n then holiday_name \n end as holiday_name_check\n from holiday_minutes\n\n-- Consolidate the holiday records that were just created\n), holiday_consolidated as (\n\n select \n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n cast({{ dbt.dateadd(\"second\", \"86400\", \"holiday_week_end\") }} as {{ dbt.type_timestamp() }}) as holiday_week_end,\n max(holiday_name_check) as holiday_name_check\n from holiday_check\n {{ dbt_utils.group_by(n=9) }}\n\n-- Since we have holiday schedules and normal schedules, we need to union them into a holistic schedule spine\n), spine_union as (\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n holiday_week_end,\n holiday_name_check\n from holiday_consolidated\n\n union all\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n null as holiday_week_start,\n null as holiday_week_end,\n null as holiday_name_check\n from calculate_schedules\n\n-- Now that we have an understanding of which weeks are holiday's let's consolidate them with non holiday weeks\n), all_periods as (\n\n select distinct\n schedule_id,\n holiday_week_start as period_start,\n holiday_week_end as period_end,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n true as is_holiday_week\n from spine_union\n where holiday_week_start is not null\n and holiday_week_end is not null\n\n union all\n\n select distinct\n schedule_id,\n valid_from as period_start,\n valid_until as period_end,\n start_time_utc,\n end_time_utc,\n cast(null as {{ dbt.type_string() }}) as holiday_name_check,\n false as is_holiday_week\n from spine_union\n\n-- We have holiday and non holiday schedules together, now let's sort them to understand the previous end and next start of neighboring schedules\n), sorted_periods as (\n\n select distinct\n *,\n lag(period_end) over (partition by schedule_id order by period_start, start_time_utc) as prev_end,\n lead(period_start) over (partition by schedule_id order by period_start, start_time_utc) as next_start\n from all_periods\n\n-- We need to adjust some non holiday schedules in order to properly fill holiday gaps in the schedules later down the transformation\n), non_holiday_period_adjustments as (\n\n select\n schedule_id, \n period_start, \n period_end,\n prev_end,\n next_start,\n -- taking first_value/last_value because prev_end and next_start are inconsistent within the schedule partitions -- they all include a record that is outside the partition. so we need to ignore those erroneous records that slip in\n coalesce(greatest(case \n when not is_holiday_week and prev_end is not null then first_value(prev_end) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_start\n end, period_start), period_start) as valid_from,\n coalesce(case \n when not is_holiday_week and next_start is not null then last_value(next_start) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_end\n end, period_end) as valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from sorted_periods\n\n-- A few window function results will be leveraged downstream. Let's generate them now.\n), gap_starter as (\n select \n *,\n max(period_end) over (partition by schedule_id) as max_valid_until,\n last_value(next_start) over (partition by schedule_id, period_start order by valid_until rows between unbounded preceding and unbounded following) as lead_next_start,\n first_value(prev_end) over (partition by schedule_id, valid_from order by start_time_utc rows between unbounded preceding and unbounded following) as first_prev_end\n from non_holiday_period_adjustments\n\n-- There may be gaps in holiday and non holiday schedules, so we need to identify where these gaps are\n), gap_adjustments as(\n\n select \n *,\n -- In order to identify the gaps we check to see if the valid_from and previous valid_until are right next to one. If we add two hours to the previous valid_until it should always be greater than the current valid_from.\n -- However, if the valid_from is greater instead then we can identify that this period has a gap that needs to be filled.\n case \n when cast({{ dbt.dateadd(\"hour\", \"2\", \"valid_until\") }} as {{ dbt.type_timestamp() }}) < cast(lead_next_start as {{ dbt.type_timestamp() }})\n then 'gap'\n when (lead_next_start is null and valid_from < max_valid_until and period_end != max_valid_until)\n then 'gap'\n else null\n end as is_schedule_gap\n\n from gap_starter\n\n-- We know where the gaps are, so now lets prime the data to fill those gaps\n), schedule_spine_primer as (\n\n select \n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n lead_next_start,\n max_valid_until,\n holiday_name_check,\n is_holiday_week,\n max(is_schedule_gap) over (partition by schedule_id, valid_until) as is_gap_period,\n lead(valid_from) over (partition by schedule_id order by valid_from, start_time_utc) as fill_primer\n from gap_adjustments\n\n-- We know the gaps and where they are, so let's fill them with the following union\n), final_union as (\n\n -- For all gap periods, let's properly create a schedule filled before the holiday.\n select \n schedule_id,\n valid_until as valid_from,\n coalesce(last_value(fill_primer) over (partition by schedule_id, valid_until order by start_time_utc rows between unbounded preceding and unbounded following), max_valid_until) as valid_until,\n start_time_utc, \n end_time_utc, \n cast(null as {{ dbt.type_string() }}) as holiday_name_check,\n false as is_holiday_week\n from schedule_spine_primer\n where is_gap_period is not null\n\n union all\n\n -- Fill all other normal schedules.\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from schedule_spine_primer\n\n-- We can finally filter out the holiday_name_check results as the gap filling properly filled in the gaps for holidays\n), final as(\n\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n is_holiday_week\n from final_union\n where holiday_name_check is null\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}, {"name": "int_zendesk__calendar_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.current_timestamp_backcompat", "macro.dbt.dateadd", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt.datediff", "macro.dbt_utils.group_by", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone", "model.zendesk_source.stg_zendesk__daylight_time", "model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday", "model.zendesk.int_zendesk__calendar_spine"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_spine.sql", "compiled": true, "compiled_code": "\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings.\n End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time)\n*/\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data`\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1559\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n datetime_add(\n cast( '2020-02-13' as datetime),\n interval row_number() over (order by 1) - 1 day\n )\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n datetime_add(\n cast( current_date as datetime),\n interval 1 week\n )\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), timezone as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone`\n\n), daylight_time as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__daylight_time`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule` \n\n-- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules.\n), schedule_holiday as ( \n\n select\n _fivetran_synced,\n cast(date_day as timestamp ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n cast(date_day as timestamp ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n holiday_id,\n holiday_name,\n schedule_id\n\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday` \n inner join __dbt__cte__int_zendesk__calendar_spine \n on holiday_start_date_at <= cast(date_day as timestamp )\n and holiday_end_date_at >= cast(date_day as timestamp )\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard schedule (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n datetime_add(\n cast( current_timestamp as datetime),\n interval 1 year\n )\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT schedule (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n datetime_add(\n cast( current_timestamp as datetime),\n interval 1 year\n )\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(current_timestamp as date)\n\n), calculate_schedules as (\n\n select \n schedule.schedule_id,\n schedule.time_zone,\n schedule.start_time,\n schedule.end_time,\n schedule.created_at,\n schedule.schedule_name,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes_to_add,\n -- we'll use these to determine which schedule version to associate tickets with\n cast(split_timezones.valid_from as timestamp) as valid_from,\n cast(split_timezones.valid_until as timestamp) as valid_until\n\n from schedule\n left join split_timezones\n on split_timezones.time_zone = schedule.time_zone\n\n-- Now we need take holiday's into consideration and perform the following transformations to account for Holidays in existing schedules\n), holiday_start_end_times as (\n\n select\n calculate_schedules.*,\n schedule_holiday.holiday_name,\n schedule_holiday.holiday_start_date_at,\n cast(\n\n datetime_add(\n cast( schedule_holiday.holiday_end_date_at as datetime),\n interval 86400 second\n )\n\n as timestamp) as holiday_end_date_at, -- add 24*60*60 seconds\n cast(cast(timestamp_trunc(\n cast(schedule_holiday.holiday_start_date_at as timestamp),\n week\n ) as date) as timestamp) as holiday_week_start,\n cast(cast(\n \n\n datetime_add(\n cast( \n\n datetime_add(\n cast( timestamp_trunc(\n cast(schedule_holiday.holiday_end_date_at as timestamp),\n week\n ) as datetime),\n interval 1 week\n )\n\n as datetime),\n interval -1 day\n )\n\n\n as date) as timestamp) as holiday_week_end\n from schedule_holiday\n inner join calculate_schedules\n on calculate_schedules.schedule_id = schedule_holiday.schedule_id\n and schedule_holiday.holiday_start_date_at >= calculate_schedules.valid_from \n and schedule_holiday.holiday_start_date_at < calculate_schedules.valid_until\n\n-- Let's calculate the start and end date of the Holiday in terms of minutes from Sunday (like other Zendesk schedules)\n), holiday_minutes as(\n\n select\n holiday_start_end_times.*,\n \n\n datetime_diff(\n cast(holiday_start_date_at as datetime),\n cast(holiday_week_start as datetime),\n minute\n )\n\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_start,\n \n\n datetime_diff(\n cast(holiday_end_date_at as datetime),\n cast(holiday_week_start as datetime),\n minute\n )\n\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_end\n from holiday_start_end_times\n left join timezone\n on timezone.time_zone = holiday_start_end_times.time_zone\n\n-- Determine which schedule days include a holiday\n), holiday_check as (\n\n select\n *,\n case when minutes_from_sunday_start < start_time_utc and minutes_from_sunday_end > end_time_utc \n then holiday_name \n end as holiday_name_check\n from holiday_minutes\n\n-- Consolidate the holiday records that were just created\n), holiday_consolidated as (\n\n select \n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n cast(\n\n datetime_add(\n cast( holiday_week_end as datetime),\n interval 86400 second\n )\n\n as timestamp) as holiday_week_end,\n max(holiday_name_check) as holiday_name_check\n from holiday_check\n group by 1,2,3,4,5,6,7,8,9\n\n-- Since we have holiday schedules and normal schedules, we need to union them into a holistic schedule spine\n), spine_union as (\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n holiday_week_end,\n holiday_name_check\n from holiday_consolidated\n\n union all\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n null as holiday_week_start,\n null as holiday_week_end,\n null as holiday_name_check\n from calculate_schedules\n\n-- Now that we have an understanding of which weeks are holiday's let's consolidate them with non holiday weeks\n), all_periods as (\n\n select distinct\n schedule_id,\n holiday_week_start as period_start,\n holiday_week_end as period_end,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n true as is_holiday_week\n from spine_union\n where holiday_week_start is not null\n and holiday_week_end is not null\n\n union all\n\n select distinct\n schedule_id,\n valid_from as period_start,\n valid_until as period_end,\n start_time_utc,\n end_time_utc,\n cast(null as string) as holiday_name_check,\n false as is_holiday_week\n from spine_union\n\n-- We have holiday and non holiday schedules together, now let's sort them to understand the previous end and next start of neighboring schedules\n), sorted_periods as (\n\n select distinct\n *,\n lag(period_end) over (partition by schedule_id order by period_start, start_time_utc) as prev_end,\n lead(period_start) over (partition by schedule_id order by period_start, start_time_utc) as next_start\n from all_periods\n\n-- We need to adjust some non holiday schedules in order to properly fill holiday gaps in the schedules later down the transformation\n), non_holiday_period_adjustments as (\n\n select\n schedule_id, \n period_start, \n period_end,\n prev_end,\n next_start,\n -- taking first_value/last_value because prev_end and next_start are inconsistent within the schedule partitions -- they all include a record that is outside the partition. so we need to ignore those erroneous records that slip in\n coalesce(greatest(case \n when not is_holiday_week and prev_end is not null then first_value(prev_end) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_start\n end, period_start), period_start) as valid_from,\n coalesce(case \n when not is_holiday_week and next_start is not null then last_value(next_start) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_end\n end, period_end) as valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from sorted_periods\n\n-- A few window function results will be leveraged downstream. Let's generate them now.\n), gap_starter as (\n select \n *,\n max(period_end) over (partition by schedule_id) as max_valid_until,\n last_value(next_start) over (partition by schedule_id, period_start order by valid_until rows between unbounded preceding and unbounded following) as lead_next_start,\n first_value(prev_end) over (partition by schedule_id, valid_from order by start_time_utc rows between unbounded preceding and unbounded following) as first_prev_end\n from non_holiday_period_adjustments\n\n-- There may be gaps in holiday and non holiday schedules, so we need to identify where these gaps are\n), gap_adjustments as(\n\n select \n *,\n -- In order to identify the gaps we check to see if the valid_from and previous valid_until are right next to one. If we add two hours to the previous valid_until it should always be greater than the current valid_from.\n -- However, if the valid_from is greater instead then we can identify that this period has a gap that needs to be filled.\n case \n when cast(\n\n datetime_add(\n cast( valid_until as datetime),\n interval 2 hour\n )\n\n as timestamp) < cast(lead_next_start as timestamp)\n then 'gap'\n when (lead_next_start is null and valid_from < max_valid_until and period_end != max_valid_until)\n then 'gap'\n else null\n end as is_schedule_gap\n\n from gap_starter\n\n-- We know where the gaps are, so now lets prime the data to fill those gaps\n), schedule_spine_primer as (\n\n select \n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n lead_next_start,\n max_valid_until,\n holiday_name_check,\n is_holiday_week,\n max(is_schedule_gap) over (partition by schedule_id, valid_until) as is_gap_period,\n lead(valid_from) over (partition by schedule_id order by valid_from, start_time_utc) as fill_primer\n from gap_adjustments\n\n-- We know the gaps and where they are, so let's fill them with the following union\n), final_union as (\n\n -- For all gap periods, let's properly create a schedule filled before the holiday.\n select \n schedule_id,\n valid_until as valid_from,\n coalesce(last_value(fill_primer) over (partition by schedule_id, valid_until order by start_time_utc rows between unbounded preceding and unbounded following), max_valid_until) as valid_until,\n start_time_utc, \n end_time_utc, \n cast(null as string) as holiday_name_check,\n false as is_holiday_week\n from schedule_spine_primer\n where is_gap_period is not null\n\n union all\n\n -- Fill all other normal schedules.\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from schedule_spine_primer\n\n-- We can finally filter out the holiday_name_check results as the gap filling properly filled in the gaps for holidays\n), final as(\n\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n is_holiday_week\n from final_union\n where holiday_name_check is null\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__calendar_spine", "sql": " __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data`\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1559\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n datetime_add(\n cast( '2020-02-13' as datetime),\n interval row_number() over (order by 1) - 1 day\n )\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n datetime_add(\n cast( current_date as datetime),\n interval 1 week\n )\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_schedules": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_schedules", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_schedules.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_schedules.sql", "unique_id": "model.zendesk.int_zendesk__ticket_schedules", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_schedules"], "alias": "int_zendesk__ticket_schedules", "checksum": {"name": "sha256", "checksum": "cb5b35bbbe0e39c8ef56fcffc75db481246fe4863cd5c80d4a6dd43d956f93af"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1715700424.296889, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket as (\n \n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_schedule as (\n \n select *\n from {{ ref('stg_zendesk__ticket_schedule') }}\n\n), schedule as (\n \n select *\n from {{ ref('stg_zendesk__schedule') }}\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n{% if execute %}\n\n {% set default_schedule_id_query %}\n with set_default_schedule_flag as (\n select \n row_number() over (order by created_at) = 1 as is_default_schedule,\n id\n from {{ source('zendesk','schedule') }}\n where not coalesce(_fivetran_deleted, false)\n )\n select \n id\n from set_default_schedule_flag\n where is_default_schedule\n\n {% endset %}\n\n {% set default_schedule_id = run_query(default_schedule_id_query).columns[0][0]|string %}\n\n {% endif %}\n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '{{default_schedule_id}}' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -5, 'first_schedule.created_at') }} <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , {{ fivetran_utils.timestamp_add(\"hour\", 1000, \"\" ~ dbt.current_timestamp_backcompat() ~ \"\") }} ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.timestamp_add", "macro.dbt.current_timestamp_backcompat", "macro.dbt.run_query"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_schedules.sql", "compiled": true, "compiled_code": "\n\nwith ticket as (\n \n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_schedule as (\n \n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_schedule`\n\n), schedule as (\n \n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule`\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n\n\n \n\n \n\n \n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '360000310393' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and \n\n timestamp_add(first_schedule.created_at, interval -5 second)\n\n <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , \n\n timestamp_add(current_timestamp, interval 1000 hour)\n\n ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__assignee_updates": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__assignee_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__assignee_updates.sql", "original_file_path": "models/intermediate/int_zendesk__assignee_updates.sql", "unique_id": "model.zendesk.int_zendesk__assignee_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__assignee_updates"], "alias": "int_zendesk__assignee_updates", "checksum": {"name": "sha256", "checksum": "951ec2d4f8c9a7470a50cfc6e01838a090472a9f18fccd2dd65097d309d43aed"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.3002071, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__assignee_updates`", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__assignee_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n\n), ticket as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comment_metrics": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__comment_metrics", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__comment_metrics.sql", "original_file_path": "models/intermediate/int_zendesk__comment_metrics.sql", "unique_id": "model.zendesk.int_zendesk__comment_metrics", "fqn": ["zendesk", "intermediate", "int_zendesk__comment_metrics"], "alias": "int_zendesk__comment_metrics", "checksum": {"name": "sha256", "checksum": "b82ef2f9d10d6344cd46dcce904fe263a3b5b2cc12fd9b5c662e8b477a4b5f95"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.301132, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__comment_metrics`", "raw_code": "with ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__comment_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_group": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_group.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_group.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_group", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_group"], "alias": "int_zendesk__ticket_historical_group", "checksum": {"name": "sha256", "checksum": "7d4d72f5d6a7ef73a23ad4be966b00683532fe2a11c9729a8d640752ebee1adc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.301923, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_group`", "raw_code": "with ticket_group_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_group.sql", "compiled": true, "compiled_code": "with ticket_group_history as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_updates": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__requester_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__requester_updates.sql", "original_file_path": "models/intermediate/int_zendesk__requester_updates.sql", "unique_id": "model.zendesk.int_zendesk__requester_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__requester_updates"], "alias": "int_zendesk__requester_updates", "checksum": {"name": "sha256", "checksum": "b2d14b09db3cadfb56e4b3dcb55c4f9000e670e3c7c29ef89b249e626e8ba103"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.302838, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_updates`", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__requester_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n\n), ticket as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_satisfaction.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_satisfaction"], "alias": "int_zendesk__ticket_historical_satisfaction", "checksum": {"name": "sha256", "checksum": "dce9b5b8705d72688802f99250a8f8a34b8791c3cb440f85efa11f09ebfe3e1d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.303742, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_satisfaction`", "raw_code": "with satisfaction_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "compiled": true, "compiled_code": "with satisfaction_updates as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__latest_ticket_form": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__latest_ticket_form.sql", "original_file_path": "models/intermediate/int_zendesk__latest_ticket_form.sql", "unique_id": "model.zendesk.int_zendesk__latest_ticket_form", "fqn": ["zendesk", "intermediate", "int_zendesk__latest_ticket_form"], "alias": "int_zendesk__latest_ticket_form", "checksum": {"name": "sha256", "checksum": "906a97576bff9f4fead3b0ed4632aa8a04b94f523e62b0e05425770213f78ea5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1715700424.304498, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__latest_ticket_form`", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith ticket_form_history as (\n select *\n from {{ ref('stg_zendesk__ticket_form_history') }}\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__latest_ticket_form.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith ticket_form_history as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_form_history`\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_aggregates": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__ticket_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_aggregates"], "alias": "int_zendesk__ticket_aggregates", "checksum": {"name": "sha256", "checksum": "cef0c080fae7a2b361b077473aa1ccfd4bfa472469b9006038aa3866a5bf8b50"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.306518, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_aggregates`", "raw_code": "with tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_tags as (\n\n select *\n from {{ ref('stg_zendesk__ticket_tag') }}\n\n), brands as (\n\n select *\n from {{ ref('stg_zendesk__brand') }}\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n {{ fivetran_utils.string_agg( 'ticket_tags.tags', \"', '\" )}} as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag", "model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_aggregates.sql", "compiled": true, "compiled_code": "with tickets as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_tags as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tag`\n\n), brands as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand`\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n \n string_agg(ticket_tags.tags, ', ')\n\n as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__organization_aggregates": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "int_zendesk__organization_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__organization_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__organization_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__organization_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__organization_aggregates"], "alias": "int_zendesk__organization_aggregates", "checksum": {"name": "sha256", "checksum": "a16300f45d2cb0bd1c26dfec62e967a047095b92f340974bfef56178bfff6cf9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.308498, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__organization_aggregates`", "raw_code": "with organizations as (\n select * \n from {{ ref('stg_zendesk__organization') }}\n\n--If you use organization tags this will be included, if not it will be ignored.\n{% if var('using_organization_tags', True) %}\n), organization_tags as (\n select * \n from {{ ref('stg_zendesk__organization_tag') }}\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('organization_tags.tags', \"', '\" ) }} as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n{% endif %}\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n{% if var('using_domain_names', True) %}\n), domain_names as (\n\n select *\n from {{ ref('stg_zendesk__domain_name') }}\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('domain_names.domain_name', \"', '\" ) }} as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n{% endif %}\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,tag_aggregates.organization_tags\n {% endif %}\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,domain_aggregates.domain_names\n {% endif %}\n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n left join domain_aggregates\n using(organization_id)\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n left join tag_aggregates\n using(organization_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag", "package": null, "version": null}, {"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag", "model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__organization_aggregates.sql", "compiled": true, "compiled_code": "with organizations as (\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization`\n\n--If you use organization tags this will be included, if not it will be ignored.\n\n), organization_tags as (\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tag`\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(organization_tags.tags, ', ')\n\n as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n\n), domain_names as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__domain_name`\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(domain_names.domain_name, ', ')\n\n as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,tag_aggregates.organization_tags\n \n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,domain_aggregates.domain_names\n \n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n left join domain_aggregates\n using(organization_id)\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n left join tag_aggregates\n using(organization_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "operation.zendesk.zendesk-on-run-start-0": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "zendesk-on-run-start-0", "resource_type": "operation", "package_name": "zendesk", "path": "hooks/zendesk-on-run-start-0.sql", "original_file_path": "./dbt_project.yml", "unique_id": "operation.zendesk.zendesk-on-run-start-0", "fqn": ["zendesk", "hooks", "zendesk-on-run-start-0"], "alias": "zendesk-on-run-start-0", "checksum": {"name": "sha256", "checksum": "5492578b3bcde7c7c4926c6fa245dc883af7750974a1aa97bf464c83d2100d26"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": ["on-run-start"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1715700424.334223, "relation_name": null, "raw_code": "{{ fivetran_utils.empty_variable_warning(\"ticket_field_history_columns\", \"zendesk_ticket_field_history\") }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.empty_variable_warning"], "nodes": []}, "compiled_path": "target/compiled/zendesk/./dbt_project.yml/hooks/zendesk-on-run-start-0.sql", "compiled": true, "compiled_code": "\n\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "index": 0}, "model.zendesk_source.stg_zendesk__user_tag": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__user_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user_tag.sql", "original_file_path": "models/stg_zendesk__user_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag", "fqn": ["zendesk_source", "stg_zendesk__user_tag"], "alias": "stg_zendesk__user_tag", "checksum": {"name": "sha256", "checksum": "0aabe5c461e492bc7afb162a0dcb6e3334cca4c60093eb5be52b74e5dbfa429b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.605461, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tag`", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__user_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tag_tmp')),\n staging_columns=get_user_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tag_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_tag.sql", "original_file_path": "models/stg_zendesk__ticket_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag", "fqn": ["zendesk_source", "stg_zendesk__ticket_tag"], "alias": "stg_zendesk__ticket_tag", "checksum": {"name": "sha256", "checksum": "41ea7cea80e135bf87adfff97bfadecd5c8ee0622d74f9904759305fd6cb7541"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.608362, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tag`", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tag_tmp')),\n staging_columns=get_ticket_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n {% if target.type == 'redshift' %}\n \"tag\" as tags\n {% else %}\n tag as tags\n {% endif %}\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_tag.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tag_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n \n tag as tags\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_field_history.sql", "original_file_path": "models/stg_zendesk__ticket_field_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_field_history"], "alias": "stg_zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "5c165700bdcc50383952e4c645b4d6c42d5410205205c5de889b009dad3b0a10"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_starting_at": {"name": "valid_starting_at", "description": "The time the ticket field value became valid", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_ending_at": {"name": "valid_ending_at", "description": "The time the ticket field value became invalidated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.608896, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history`", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_field_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_field_history_tmp')),\n staging_columns=get_ticket_field_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as {{ dbt.type_timestamp() }}) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as {{ dbt.type_timestamp() }}) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_field_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n field_name\n \n as \n \n field_name\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n updated\n \n as \n \n updated\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n value\n \n as \n \n value\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as timestamp) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as timestamp) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule_holiday.sql", "original_file_path": "models/stg_zendesk__schedule_holiday.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday", "fqn": ["zendesk_source", "stg_zendesk__schedule_holiday"], "alias": "stg_zendesk__schedule_holiday", "checksum": {"name": "sha256", "checksum": "154109fa9fd9dc5e3b0b034929ac3e3ddb591755d52a78f64ab2bb7d6cfe2476"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Information about holidays for each specified schedule.", "columns": {"end_date_at": {"name": "end_date_at", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_id": {"name": "holiday_id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_name": {"name": "holiday_name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date_at": {"name": "start_date_at", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.610501, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday`", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_holiday_tmp') }}\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_holiday_tmp')),\n staging_columns=get_schedule_holiday_columns()\n )\n }}\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as {{ dbt.type_timestamp() }} ) as _fivetran_synced,\n cast(end_date as {{ dbt.type_timestamp() }} ) as holiday_end_date_at,\n cast(id as {{ dbt.type_string() }} ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as {{ dbt.type_string() }} ) as schedule_id,\n cast(start_date as {{ dbt.type_timestamp() }} ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_schedule_holiday_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule_holiday.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday_tmp`\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n end_date\n \n as \n \n end_date\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n start_date\n \n as \n \n start_date\n \n\n\n\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as timestamp ) as _fivetran_synced,\n cast(end_date as timestamp ) as holiday_end_date_at,\n cast(id as string ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as string ) as schedule_id,\n cast(start_date as timestamp ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__daylight_time", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__daylight_time.sql", "original_file_path": "models/stg_zendesk__daylight_time.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time", "fqn": ["zendesk_source", "stg_zendesk__daylight_time"], "alias": "stg_zendesk__daylight_time", "checksum": {"name": "sha256", "checksum": "8bc98221c9781fc37b2424b62b5d72cd62b62c53aa887be08e98114f98530df9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset_minutes": {"name": "daylight_offset_minutes", "description": "Number of **minutes** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.6095302, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__daylight_time`", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__daylight_time_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__daylight_time_tmp')),\n staging_columns=get_daylight_time_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_daylight_time_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__daylight_time.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__daylight_time_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n daylight_end_utc\n \n as \n \n daylight_end_utc\n \n, \n \n \n daylight_offset\n \n as \n \n daylight_offset\n \n, \n \n \n daylight_start_utc\n \n as \n \n daylight_start_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n year\n \n as \n \n year\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__organization", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization.sql", "original_file_path": "models/stg_zendesk__organization.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization", "fqn": ["zendesk_source", "stg_zendesk__organization"], "alias": "stg_zendesk__organization", "checksum": {"name": "sha256", "checksum": "5fb51f160efdf3ffa60e0a7be33e40e4b59f814d345558631e06fcce160f6329"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"organization_id": {"name": "organization_id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.604531, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization`", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tmp')),\n staging_columns=get_organization_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__organization_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_columns", "macro.fivetran_utils.fill_staging_columns", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n shared_comments\n \n as \n \n shared_comments\n \n, \n \n \n shared_tickets\n \n as \n \n shared_tickets\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__time_zone", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__time_zone.sql", "original_file_path": "models/stg_zendesk__time_zone.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone", "fqn": ["zendesk_source", "stg_zendesk__time_zone"], "alias": "stg_zendesk__time_zone", "checksum": {"name": "sha256", "checksum": "289f08e30f9298f5b4beed89d28c1ff6a82386ee7c9f5084499eedb8998aa137"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset_minutes": {"name": "standard_offset_minutes", "description": "Standard offset of the timezone (non-daylight savings hours) in minutes.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.6098921, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone`", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__time_zone_tmp') }}\n\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__time_zone_tmp')),\n staging_columns=get_time_zone_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=1) }} as {{ dbt.type_int() }} ) * 60 +\n (cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=2) }} as {{ dbt.type_int() }} ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}, {"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_time_zone_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.split_part", "macro.dbt.type_int"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__time_zone.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone_tmp`\n\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n standard_offset\n \n as \n \n standard_offset\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( \n\n \n split(\n standard_offset,\n ':'\n )[safe_offset(0)]\n \n\n as INT64 ) * 60 +\n (cast( \n\n \n split(\n standard_offset,\n ':'\n )[safe_offset(1)]\n \n\n as INT64 ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__group", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__group.sql", "original_file_path": "models/stg_zendesk__group.sql", "unique_id": "model.zendesk_source.stg_zendesk__group", "fqn": ["zendesk_source", "stg_zendesk__group"], "alias": "stg_zendesk__group", "checksum": {"name": "sha256", "checksum": "21a956af3b03e9e49e9e94ade093fa716db9f061e7eb9e209c3ff7f9986b15b9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"group_id": {"name": "group_id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.603999, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group`", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__group_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__group_tmp')),\n staging_columns=get_group_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__group_tmp", "package": null, "version": null}, {"name": "stg_zendesk__group_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_group_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__group_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__group.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_comment", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_comment.sql", "original_file_path": "models/stg_zendesk__ticket_comment.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment", "fqn": ["zendesk_source", "stg_zendesk__ticket_comment"], "alias": "stg_zendesk__ticket_comment", "checksum": {"name": "sha256", "checksum": "d81e21e84092f9b0ddb806817680c774a31e35cefafd5ad15895436887156439"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"ticket_comment_id": {"name": "ticket_comment_id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_facebook_comment": {"name": "is_facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_tweet": {"name": "is_tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_voice_comment": {"name": "is_voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.605292, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment`", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_comment_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_comment_tmp')),\n staging_columns=get_ticket_comment_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n body,\n cast(created as {{ dbt.type_timestamp() }}) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_comment_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_comment.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n body\n \n as \n \n body\n \n, \n cast(null as INT64) as \n \n call_duration\n \n , \n cast(null as INT64) as \n \n call_id\n \n , \n \n \n created\n \n as \n \n created\n \n, \n \n \n facebook_comment\n \n as \n \n facebook_comment\n \n, \n \n \n id\n \n as \n \n id\n \n, \n cast(null as INT64) as \n \n location\n \n , \n \n \n public\n \n as \n \n public\n \n, \n cast(null as INT64) as \n \n recording_url\n \n , \n cast(null as timestamp) as \n \n started_at\n \n , \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n cast(null as INT64) as \n \n transcription_status\n \n , \n cast(null as INT64) as \n \n transcription_text\n \n , \n cast(null as INT64) as \n \n trusted\n \n , \n \n \n tweet\n \n as \n \n tweet\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n voice_comment\n \n as \n \n voice_comment\n \n, \n cast(null as INT64) as \n \n voice_comment_transcription_visible\n \n \n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n body,\n cast(created as timestamp) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_schedule.sql", "original_file_path": "models/stg_zendesk__ticket_schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule", "fqn": ["zendesk_source", "stg_zendesk__ticket_schedule"], "alias": "stg_zendesk__ticket_schedule", "checksum": {"name": "sha256", "checksum": "69d32ac51b73241f990f8c1a08309cb42e79d0c1b26b99a7060353bfee88066e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.60732, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_schedule`", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_schedule_tmp')),\n staging_columns=get_ticket_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(schedule_id as {{ dbt.type_string() }}) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_schedule_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as timestamp) as created_at,\n cast(schedule_id as string) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule.sql", "original_file_path": "models/stg_zendesk__schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule", "fqn": ["zendesk_source", "stg_zendesk__schedule"], "alias": "stg_zendesk__schedule", "checksum": {"name": "sha256", "checksum": "336dabaf980af5f08c6a5f43d04cdfd00146191b0927176fe4add5f65117c673"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The support schedules created with different business hours and holidays.", "columns": {"schedule_id": {"name": "schedule_id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_name": {"name": "schedule_name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.607148, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule`", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_tmp')),\n staging_columns=get_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as {{ dbt.type_string() }}) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n end_time\n \n as \n \n end_time\n \n, \n \n \n end_time_utc\n \n as \n \n end_time_utc\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n start_time\n \n as \n \n start_time\n \n, \n \n \n start_time_utc\n \n as \n \n start_time_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as string) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__user", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user.sql", "original_file_path": "models/stg_zendesk__user.sql", "unique_id": "model.zendesk_source.stg_zendesk__user", "fqn": ["zendesk_source", "stg_zendesk__user"], "alias": "stg_zendesk__user", "checksum": {"name": "sha256", "checksum": "0bdebead73baf5943015b93700be6c9a96569a4a6d96ec23f2da0327082dc351"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"user_id": {"name": "user_id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active": {"name": "is_active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_suspended": {"name": "is_suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.606603, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__user_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tmp')),\n staging_columns=get_user_columns()\n )\n }}\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n cast(last_login_at as {{ dbt.type_timestamp() }}) as last_login_at,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n email,\n name,\n organization_id,\n phone,\n {% if var('internal_user_criteria', false) -%}\n case \n when role in ('admin', 'agent') then role\n when {{ var('internal_user_criteria', false) }} then 'agent'\n else role end as role,\n {% else -%}\n role,\n {% endif -%}\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__user_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n alias\n \n as \n \n alias\n \n, \n \n \n authenticity_token\n \n as \n \n authenticity_token\n \n, \n \n \n chat_only\n \n as \n \n chat_only\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n email\n \n as \n \n email\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n last_login_at\n \n as \n \n last_login_at\n \n, \n \n \n locale\n \n as \n \n locale\n \n, \n \n \n locale_id\n \n as \n \n locale_id\n \n, \n \n \n moderator\n \n as \n \n moderator\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n only_private_comments\n \n as \n \n only_private_comments\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n phone\n \n as \n \n phone\n \n, \n \n \n remote_photo_url\n \n as \n \n remote_photo_url\n \n, \n \n \n restricted_agent\n \n as \n \n restricted_agent\n \n, \n \n \n role\n \n as \n \n role\n \n, \n \n \n shared\n \n as \n \n shared\n \n, \n \n \n shared_agent\n \n as \n \n shared_agent\n \n, \n \n \n signature\n \n as \n \n signature\n \n, \n \n \n suspended\n \n as \n \n suspended\n \n, \n \n \n ticket_restriction\n \n as \n \n ticket_restriction\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n two_factor_auth_enabled\n \n as \n \n two_factor_auth_enabled\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n verified\n \n as \n \n verified\n \n\n\n\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n cast(last_login_at as timestamp) as last_login_at,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n email,\n name,\n organization_id,\n phone,\n role,\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__brand", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__brand.sql", "original_file_path": "models/stg_zendesk__brand.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand", "fqn": ["zendesk_source", "stg_zendesk__brand"], "alias": "stg_zendesk__brand", "checksum": {"name": "sha256", "checksum": "106699200d371f2fac9fe94ce084a357331b215d4130195e1e94d2d07c6d169c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"brand_id": {"name": "brand_id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.60338, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand`", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__brand_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__brand_tmp')),\n staging_columns=get_brand_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__brand_tmp", "package": null, "version": null}, {"name": "stg_zendesk__brand_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_brand_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__brand_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__brand.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n brand_url\n \n as \n \n brand_url\n \n, \n \n \n has_help_center\n \n as \n \n has_help_center\n \n, \n \n \n help_center_state\n \n as \n \n help_center_state\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n logo_content_type\n \n as \n \n logo_content_type\n \n, \n \n \n logo_content_url\n \n as \n \n logo_content_url\n \n, \n \n \n logo_deleted\n \n as \n \n logo_deleted\n \n, \n \n \n logo_file_name\n \n as \n \n logo_file_name\n \n, \n \n \n logo_height\n \n as \n \n logo_height\n \n, \n \n \n logo_id\n \n as \n \n logo_id\n \n, \n \n \n logo_inline\n \n as \n \n logo_inline\n \n, \n \n \n logo_mapped_content_url\n \n as \n \n logo_mapped_content_url\n \n, \n \n \n logo_size\n \n as \n \n logo_size\n \n, \n \n \n logo_url\n \n as \n \n logo_url\n \n, \n \n \n logo_width\n \n as \n \n logo_width\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n subdomain\n \n as \n \n subdomain\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_form_history.sql", "original_file_path": "models/stg_zendesk__ticket_form_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_form_history"], "alias": "stg_zendesk__ticket_form_history", "checksum": {"name": "sha256", "checksum": "1e70e9a0b2dfce82e649a8a0507d59d6f3f2832429191ea67988ba0dfd1017cf"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"ticket_form_id": {"name": "ticket_form_id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.6081111, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_form_history`", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_form_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_form_history_tmp')),\n staging_columns=get_ticket_form_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_form_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_form_history.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_form_history_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n display_name\n \n as \n \n display_name\n \n, \n \n \n end_user_visible\n \n as \n \n end_user_visible\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__domain_name", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__domain_name.sql", "original_file_path": "models/stg_zendesk__domain_name.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name", "fqn": ["zendesk_source", "stg_zendesk__domain_name"], "alias": "stg_zendesk__domain_name", "checksum": {"name": "sha256", "checksum": "8c3a4735e0cdea5a463eefc3c6820d15d622857af45dab942410dc64a0ac4bda"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.603811, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__domain_name`", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__domain_name_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__domain_name_tmp')),\n staging_columns=get_domain_name_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}, {"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_domain_name_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__domain_name.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__domain_name_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n domain_name\n \n as \n \n domain_name\n \n, \n \n \n index\n \n as \n \n index\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__organization_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization_tag.sql", "original_file_path": "models/stg_zendesk__organization_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag", "fqn": ["zendesk_source", "stg_zendesk__organization_tag"], "alias": "stg_zendesk__organization_tag", "checksum": {"name": "sha256", "checksum": "15f1f4014e4ba78ae7992f28c61e3926b7cd12c6bb32efc7b516db93c1e20d82"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.6042519, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tag`", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tag_tmp')),\n staging_columns=get_organization_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tag_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket.sql", "original_file_path": "models/stg_zendesk__ticket.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket", "fqn": ["zendesk_source", "stg_zendesk__ticket"], "alias": "stg_zendesk__ticket", "checksum": {"name": "sha256", "checksum": "618e84a2e5a55edffebd745bc81183a58912db69c48150d984c3cd582e0a41dd"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.602742, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tmp')),\n staging_columns=get_ticket_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n assignee_id,\n brand_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__ticket_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n allow_channelback\n \n as \n \n allow_channelback\n \n, \n \n \n assignee_id\n \n as \n \n assignee_id\n \n, \n \n \n brand_id\n \n as \n \n brand_id\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n description\n \n as \n \n description\n \n, \n \n \n due_at\n \n as \n \n due_at\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n forum_topic_id\n \n as \n \n forum_topic_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n has_incidents\n \n as \n \n has_incidents\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n is_public\n \n as \n \n is_public\n \n, \n \n \n merged_ticket_ids\n \n as \n \n merged_ticket_ids\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n priority\n \n as \n \n priority\n \n, \n \n \n problem_id\n \n as \n \n problem_id\n \n, \n \n \n recipient\n \n as \n \n recipient\n \n, \n \n \n requester_id\n \n as \n \n requester_id\n \n, \n \n \n status\n \n as \n \n status\n \n, \n \n \n subject\n \n as \n \n subject\n \n, \n \n \n submitter_id\n \n as \n \n submitter_id\n \n, \n cast(null as INT64) as \n \n system_ccs\n \n , \n \n \n system_client\n \n as \n \n system_client\n \n, \n cast(null as string) as \n \n system_ip_address\n \n , \n cast(null as INT64) as \n \n system_json_email_identifier\n \n , \n cast(null as FLOAT64) as \n \n system_latitude\n \n , \n cast(null as string) as \n \n system_location\n \n , \n cast(null as FLOAT64) as \n \n system_longitude\n \n , \n cast(null as INT64) as \n \n system_machine_generated\n \n , \n cast(null as INT64) as \n \n system_message_id\n \n , \n cast(null as INT64) as \n \n system_raw_email_identifier\n \n , \n \n \n ticket_form_id\n \n as \n \n ticket_form_id\n \n, \n \n \n type\n \n as \n \n type\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n via_channel\n \n as \n \n via_channel\n \n, \n \n \n via_source_from_address\n \n as \n \n via_source_from_address\n \n, \n \n \n via_source_from_id\n \n as \n \n via_source_from_id\n \n, \n \n \n via_source_from_title\n \n as \n \n via_source_from_title\n \n, \n \n \n via_source_rel\n \n as \n \n via_source_rel\n \n, \n \n \n via_source_to_address\n \n as \n \n via_source_to_address\n \n, \n \n \n via_source_to_name\n \n as \n \n via_source_to_name\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n assignee_id,\n brand_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__daylight_time_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__daylight_time_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__daylight_time_tmp"], "alias": "stg_zendesk__daylight_time_tmp", "checksum": {"name": "sha256", "checksum": "01afb893cce2ef776ef8c4c64dbd2cf3e40fe1f73986fdc4b78fd99ff0948ac8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.434627, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__daylight_time_tmp`", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'daylight_time')) }}\nfrom {{ source('zendesk', 'daylight_time') }} as daylight_time_table", "language": "sql", "refs": [], "sources": [["zendesk", "daylight_time"], ["zendesk", "daylight_time"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__daylight_time_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect `time_zone`,\n `year`,\n `_fivetran_synced`,\n `daylight_end_utc`,\n `daylight_offset`,\n `daylight_start_utc`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`daylight_time_data` as daylight_time_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__user_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tmp"], "alias": "stg_zendesk__user_tmp", "checksum": {"name": "sha256", "checksum": "606364c3b138f68707d75a04f859f28d4b0f17f99966b27a8f6087adfa091042"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.493857, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tmp`", "raw_code": "select {{ dbt_utils.star(source('zendesk','user')) }} \nfrom {{ source('zendesk','user') }} as user_table", "language": "sql", "refs": [], "sources": [["zendesk", "user"], ["zendesk", "user"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tmp.sql", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_synced`,\n `active`,\n `alias`,\n `authenticity_token`,\n `chat_only`,\n `created_at`,\n `details`,\n `email`,\n `external_id`,\n `last_login_at`,\n `locale`,\n `locale_id`,\n `moderator`,\n `name`,\n `notes`,\n `only_private_comments`,\n `organization_id`,\n `phone`,\n `remote_photo_url`,\n `restricted_agent`,\n `role`,\n `shared`,\n `shared_agent`,\n `signature`,\n `suspended`,\n `ticket_restriction`,\n `time_zone`,\n `two_factor_auth_enabled`,\n `updated_at`,\n `url`,\n `verified` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`user_data` as user_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__group_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__group_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__group_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__group_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__group_tmp"], "alias": "stg_zendesk__group_tmp", "checksum": {"name": "sha256", "checksum": "dc91ce1ab4b5ce5fec29b74b8f999d04fa063ab6354b7387d5875997f4db7e11"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.496018, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group_tmp`", "raw_code": "select {{ dbt_utils.star(source('zendesk','group')) }} \nfrom {{ source('zendesk','group') }} as group_table", "language": "sql", "refs": [], "sources": [["zendesk", "group"], ["zendesk", "group"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.group"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__group_tmp.sql", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_deleted`,\n `_fivetran_synced`,\n `created_at`,\n `name`,\n `updated_at`,\n `url` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`group_data` as group_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tmp"], "alias": "stg_zendesk__ticket_tmp", "checksum": {"name": "sha256", "checksum": "b90132a6d22e753a066ebeaaea0bc164376837b702d7886ad0d1bb1a993e6e9a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.498267, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tmp`", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket')) }}\nfrom {{ source('zendesk', 'ticket') }} as ticket_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket"], ["zendesk", "ticket"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tmp.sql", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_synced`,\n `allow_channelback`,\n `assignee_id`,\n `brand_id`,\n `created_at`,\n `description`,\n `due_at`,\n `external_id`,\n `forum_topic_id`,\n `group_id`,\n `has_incidents`,\n `is_public`,\n `organization_id`,\n `priority`,\n `problem_id`,\n `recipient`,\n `requester_id`,\n `status`,\n `subject`,\n `submitter_id`,\n `system_client`,\n `ticket_form_id`,\n `type`,\n `updated_at`,\n `url`,\n `via_channel`,\n `via_source_from_id`,\n `via_source_from_title`,\n `via_source_rel`,\n `via_source_to_address`,\n `via_source_to_name`,\n `merged_ticket_ids`,\n `via_source_from_address`,\n `followup_ids`,\n `via_followup_source_id`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data` as ticket_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__brand_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__brand_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__brand_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__brand_tmp"], "alias": "stg_zendesk__brand_tmp", "checksum": {"name": "sha256", "checksum": "9658c9bd90fda5610067615a971eff98dc7c7b8c04827b9ab04da65f28630381"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.500581, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand_tmp`", "raw_code": "select {{ dbt_utils.star(source('zendesk','brand')) }} \nfrom {{ source('zendesk','brand') }} as brand_table", "language": "sql", "refs": [], "sources": [["zendesk", "brand"], ["zendesk", "brand"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.brand"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__brand_tmp.sql", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_deleted`,\n `_fivetran_synced`,\n `active`,\n `brand_url`,\n `default`,\n `has_help_center`,\n `help_center_state`,\n `logo_content_type`,\n `logo_content_url`,\n `logo_deleted`,\n `logo_file_name`,\n `logo_height`,\n `logo_id`,\n `logo_inline`,\n `logo_mapped_content_url`,\n `logo_size`,\n `logo_url`,\n `logo_width`,\n `name`,\n `subdomain`,\n `url` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`brand_data` as brand_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tag_tmp"], "alias": "stg_zendesk__ticket_tag_tmp", "checksum": {"name": "sha256", "checksum": "d88425c9db1a948768fa8683e58654de3aab9ffc2966d829b6707c12afd94283"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.5025249, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tag_tmp`", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_tag')) }}\nfrom {{ source('zendesk', 'ticket_tag') }} as ticket_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_tag"], ["zendesk", "ticket_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tag_tmp.sql", "compiled": true, "compiled_code": "select `tag`,\n `ticket_id`,\n `_fivetran_synced`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_tag_data` as ticket_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_holiday_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_holiday_tmp"], "alias": "stg_zendesk__schedule_holiday_tmp", "checksum": {"name": "sha256", "checksum": "9cd5e53ebcb7f11e55f772a7826b78a7f5f6b27ba975834c28c504181a548a3b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.504506, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday_tmp`", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule_holiday')) }}\nfrom {{ source('zendesk', 'schedule_holiday') }} as schedule_holiday_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule_holiday"], ["zendesk", "schedule_holiday"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect `id`,\n `schedule_id`,\n `_fivetran_deleted`,\n `_fivetran_synced`,\n `end_date`,\n `name`,\n `start_date`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`schedule_holiday_data` as schedule_holiday_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tag_tmp"], "alias": "stg_zendesk__user_tag_tmp", "checksum": {"name": "sha256", "checksum": "7ee78431bec698af41296439428c74a8d5f8fa607c55e9b5a9b97de8b777f490"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.5081651, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tag_tmp`", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','user_tag')) }} \nfrom {{ source('zendesk','user_tag') }} as user_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "user_tag"], ["zendesk", "user_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nselect `tag`,\n `user_id`,\n `_fivetran_synced` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`user_tag_data` as user_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_field_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_field_history_tmp"], "alias": "stg_zendesk__ticket_field_history_tmp", "checksum": {"name": "sha256", "checksum": "9dbb7257a2998c6e0d0d7a572aa7b0d301c777cea8e7085abfa42809b9312aa7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.511159, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history_tmp`", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_field_history')) }}\nfrom {{ source('zendesk', 'ticket_field_history') }} as ticket_field_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_field_history"], ["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "compiled": true, "compiled_code": "select `field_name`,\n `ticket_id`,\n `updated`,\n `_fivetran_synced`,\n `user_id`,\n `value`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_field_history_data` as ticket_field_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_form_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_form_history_tmp"], "alias": "stg_zendesk__ticket_form_history_tmp", "checksum": {"name": "sha256", "checksum": "0e95f65a6932c12231ef9419574fd09b287a70ca20612cce228a7fb642fe1609"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.513407, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_form_history_tmp`", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_form_history')) }}\nfrom {{ source('zendesk', 'ticket_form_history') }} as ticket_form_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_form_history"], ["zendesk", "ticket_form_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nselect `id`,\n `updated_at`,\n `_fivetran_deleted`,\n `_fivetran_synced`,\n `active`,\n `created_at`,\n `display_name`,\n `end_user_visible`,\n `name`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_form_history_data` as ticket_form_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_comment_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_comment_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_comment_tmp"], "alias": "stg_zendesk__ticket_comment_tmp", "checksum": {"name": "sha256", "checksum": "756209cf9e8c53e873cd7ac7a2dce2bdbafbd5a9d416e503c628b3ee57603c86"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.5156212, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment_tmp`", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_comment')) }}\nfrom {{ source('zendesk', 'ticket_comment') }} as ticket_comment_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_comment"], ["zendesk", "ticket_comment"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_comment_tmp.sql", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_synced`,\n `body`,\n `created`,\n `facebook_comment`,\n `public`,\n `ticket_id`,\n `tweet`,\n `user_id`,\n `voice_comment`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_comment_data` as ticket_comment_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tag_tmp"], "alias": "stg_zendesk__organization_tag_tmp", "checksum": {"name": "sha256", "checksum": "b917812c188e64cda849a61d784cd95507c1c9187fc0ef2e083f2eee61c58231"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.517756, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tag_tmp`", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','organization_tag')) }} \nfrom {{ source('zendesk','organization_tag') }} as organization_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization_tag"], ["zendesk", "organization_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nselect `organization_id`,\n `tag`,\n `_fivetran_synced` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`organization_tag_data` as organization_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_tmp"], "alias": "stg_zendesk__schedule_tmp", "checksum": {"name": "sha256", "checksum": "7d55acbaaa3cc93868bcd3fe4f945b1ecb4871da7b8bed7bf04714ce3fc11eef"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.5199108, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_tmp`", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule')) }}\nfrom {{ source('zendesk', 'schedule') }} as schedule_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule"], ["zendesk", "schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect `end_time`,\n `id`,\n `start_time`,\n `_fivetran_deleted`,\n `_fivetran_synced`,\n `end_time_utc`,\n `name`,\n `start_time_utc`,\n `time_zone`,\n `created_at`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`schedule_data` as schedule_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__organization_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tmp"], "alias": "stg_zendesk__organization_tmp", "checksum": {"name": "sha256", "checksum": "f2b39377f97f3a1a71fee168330c6971c06292c4ea702091a978eb64af9bd28f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1715700424.522144, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tmp`", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'organization')) }}\nfrom {{ source('zendesk','organization') }} as organization_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization"], ["zendesk", "organization"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tmp.sql", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_synced`,\n `created_at`,\n `details`,\n `external_id`,\n `group_id`,\n `name`,\n `notes`,\n `shared_comments`,\n `shared_tickets`,\n `updated_at`,\n `url`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`organization_data` as organization_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_schedule_tmp"], "alias": "stg_zendesk__ticket_schedule_tmp", "checksum": {"name": "sha256", "checksum": "59d017b8bb4285288bd47b79a1cb1afdb64faca436f52a718f6c8051d24cf6f1"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.524933, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_schedule_tmp`", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\n{%- set source_relation = adapter.get_relation(\n database=source('zendesk', 'ticket_schedule').database,\n schema=source('zendesk', 'ticket_schedule').schema,\n identifier=source('zendesk', 'ticket_schedule').name) -%}\n\n{% set table_exists=source_relation is not none %}\n\n{% if table_exists %}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_schedule')) }}\nfrom {{ source('zendesk', 'ticket_schedule') }} as ticket_schedule_table\n\n{% else %}\n\nselect\n cast(null as {{ dbt.type_timestamp() }}) as _fivetran_synced,\n cast(null as {{ dbt.type_timestamp() }}) as created_at,\n cast(null as {{ dbt.type_int() }}) as schedule_id,\n cast(null as {{ dbt.type_int() }}) as ticket_id\n\n{% endif %}", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\n\n\nselect `created_at`,\n `ticket_id`,\n `_fivetran_synced`,\n `schedule_id`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_schedule_data` as ticket_schedule_table\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__domain_name_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__domain_name_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__domain_name_tmp"], "alias": "stg_zendesk__domain_name_tmp", "checksum": {"name": "sha256", "checksum": "58ba804a3f1cf2e7abe29a28cc9064e9be0355e6b358cca9e714e5777ff11b4b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.528176, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__domain_name_tmp`", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'domain_name')) }} \nfrom {{ source('zendesk', 'domain_name') }} as domain_name_table", "language": "sql", "refs": [], "sources": [["zendesk", "domain_name"], ["zendesk", "domain_name"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__domain_name_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nselect `index`,\n `organization_id`,\n `_fivetran_synced`,\n `domain_name` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`domain_name_data` as domain_name_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__time_zone_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__time_zone_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__time_zone_tmp"], "alias": "stg_zendesk__time_zone_tmp", "checksum": {"name": "sha256", "checksum": "b2a214af27259564121fd0c977a7d7388bd644f797f972ed48575a4979819ec2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1715700424.530321, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone_tmp`", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'time_zone')) }} \nfrom {{ source('zendesk', 'time_zone') }} as time_zone_table", "language": "sql", "refs": [], "sources": [["zendesk", "time_zone"], ["zendesk", "time_zone"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__time_zone_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect `time_zone`,\n `_fivetran_synced`,\n `standard_offset` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`time_zone_data` as time_zone_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef", "fqn": ["zendesk", "unique_zendesk__ticket_enriched_ticket_id"], "alias": "unique_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.594639, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select ticket_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_enriched`\n where ticket_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched"}, "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "fqn": ["zendesk", "not_null_zendesk__ticket_enriched_ticket_id"], "alias": "not_null_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.595465, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_enriched`\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched"}, "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "sla_event_id", "model": "{{ get_where_subquery(ref('zendesk__sla_policies')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_zendesk__sla_policies_sla_event_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__sla_policies_sla_event_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd", "fqn": ["zendesk", "unique_zendesk__sla_policies_sla_event_id"], "alias": "unique_zendesk__sla_policies_sla_event_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.596076, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__sla_policies"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__sla_policies_sla_event_id.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select sla_event_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__sla_policies`\n where sla_event_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "sla_event_id", "file_key_name": "models.zendesk__sla_policies", "attached_node": "model.zendesk.zendesk__sla_policies"}, "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c", "fqn": ["zendesk", "unique_zendesk__ticket_metrics_ticket_id"], "alias": "unique_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.596683, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select ticket_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_metrics`\n where ticket_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics"}, "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "fqn": ["zendesk", "not_null_zendesk__ticket_metrics_ticket_id"], "alias": "not_null_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.597258, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_metrics`\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics"}, "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_ticket_id"], "alias": "unique_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.610858, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select ticket_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n where ticket_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket"}, "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_ticket_id"], "alias": "not_null_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.6115148, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket"}, "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e", "fqn": ["zendesk_source", "unique_stg_zendesk__brand_brand_id"], "alias": "unique_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.612087, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select brand_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand`\n where brand_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand"}, "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "fqn": ["zendesk_source", "not_null_stg_zendesk__brand_brand_id"], "alias": "not_null_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.61265, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect brand_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand`\nwhere brand_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand"}, "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__domain_name')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_stg_zendesk__domain_name_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__domain_name_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3", "fqn": ["zendesk_source", "not_null_stg_zendesk__domain_name_organization_id"], "alias": "not_null_stg_zendesk__domain_name_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.6134279, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__domain_name_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__domain_name`\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__domain_name", "attached_node": "model.zendesk_source.stg_zendesk__domain_name"}, "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd", "fqn": ["zendesk_source", "unique_stg_zendesk__group_group_id"], "alias": "unique_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.614103, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select group_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group`\n where group_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group"}, "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "fqn": ["zendesk_source", "not_null_stg_zendesk__group_group_id"], "alias": "not_null_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.614702, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect group_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group`\nwhere group_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group"}, "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31", "fqn": ["zendesk_source", "unique_stg_zendesk__organization_organization_id"], "alias": "unique_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.615276, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select organization_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization`\n where organization_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization"}, "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "fqn": ["zendesk_source", "not_null_stg_zendesk__organization_organization_id"], "alias": "not_null_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.615867, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization`\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization"}, "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.61661, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select ticket_comment_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment`\n where ticket_comment_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment"}, "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.617177, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_comment_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment`\nwhere ticket_comment_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment"}, "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11", "fqn": ["zendesk_source", "unique_stg_zendesk__user_user_id"], "alias": "unique_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.617749, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select user_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n where user_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user"}, "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "fqn": ["zendesk_source", "not_null_stg_zendesk__user_user_id"], "alias": "not_null_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.6183171, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect user_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\nwhere user_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user"}, "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_form_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_form_history')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_form_history_ticket_form_id"], "alias": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.618878, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_form_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_form_history`\nwhere ticket_form_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_form_id", "file_key_name": "models.stg_zendesk__ticket_form_history", "attached_node": "model.zendesk_source.stg_zendesk__ticket_form_history"}, "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": {"test_metadata": {"name": "unique_combination_of_columns", "kwargs": {"combination_of_columns": ["time_zone", "year"], "model": "{{ get_where_subquery(ref('stg_zendesk__daylight_time')) }}"}, "namespace": "dbt_utils"}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year", "resource_type": "test", "package_name": "zendesk_source", "path": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d", "fqn": ["zendesk_source", "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year"], "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9"}, "created_at": 1715700424.61945, "relation_name": null, "raw_code": "{{ dbt_utils.test_unique_combination_of_columns(**_dbt_generic_test_kwargs) }}{{ config(alias=\"dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9\") }}", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.test_unique_combination_of_columns", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "compiled": true, "compiled_code": "\n\n\n\n\n\nwith validation_errors as (\n\n select\n time_zone, year\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__daylight_time`\n group by time_zone, year\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.stg_zendesk__daylight_time", "attached_node": "model.zendesk_source.stg_zendesk__daylight_time"}, "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf", "fqn": ["zendesk_source", "unique_stg_zendesk__time_zone_time_zone"], "alias": "unique_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.6243699, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select time_zone as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone`\n where time_zone is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone"}, "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "fqn": ["zendesk_source", "not_null_stg_zendesk__time_zone_time_zone"], "alias": "not_null_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.625049, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect time_zone\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone`\nwhere time_zone is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone"}, "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "unique_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a", "fqn": ["zendesk_source", "unique_stg_zendesk__schedule_holiday_holiday_id"], "alias": "unique_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.625628, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select holiday_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday`\n where holiday_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday"}, "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}, "database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "not_null_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "fqn": ["zendesk_source", "not_null_stg_zendesk__schedule_holiday_holiday_id"], "alias": "not_null_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1715700424.626206, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect holiday_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday`\nwhere holiday_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday"}}, "sources": {"source.zendesk_source.zendesk.ticket": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket", "fqn": ["zendesk_source", "zendesk", "ticket"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_channel": {"name": "via_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_id": {"name": "via_source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_title": {"name": "via_source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_rel": {"name": "via_source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_address": {"name": "via_source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_name": {"name": "via_source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data`", "created_at": 1715700424.658519}, "source.zendesk_source.zendesk.brand": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "brand", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.brand", "fqn": ["zendesk_source", "zendesk", "brand"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "brand_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"id": {"name": "id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`brand_data`", "created_at": 1715700424.658607}, "source.zendesk_source.zendesk.domain_name": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "domain_name", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.domain_name", "fqn": ["zendesk_source", "zendesk", "domain_name"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "domain_name_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`domain_name_data`", "created_at": 1715700424.658676}, "source.zendesk_source.zendesk.group": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "group", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.group", "fqn": ["zendesk_source", "zendesk", "group"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "group_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`group_data`", "created_at": 1715700424.658723}, "source.zendesk_source.zendesk.organization_tag": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "organization_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization_tag", "fqn": ["zendesk_source", "zendesk", "organization_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`organization_tag_data`", "created_at": 1715700424.658776}, "source.zendesk_source.zendesk.organization": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "organization", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization", "fqn": ["zendesk_source", "zendesk", "organization"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique external id to associate organizations to an external record", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_names": {"name": "domain_names", "description": "An array of domain names associated with this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "notes": {"name": "notes", "description": "Any notes you have about the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "New tickets from users in this organization are automatically put in this group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_tickets": {"name": "shared_tickets", "description": "End users in this organization are able to see each other's tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_comments": {"name": "shared_comments", "description": "End users in this organization are able to see each other's comments on tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tags of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_fields": {"name": "organization_fields", "description": "Custom fields for this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`organization_data`", "created_at": 1715700424.658829}, "source.zendesk_source.zendesk.ticket_comment": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_comment", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_comment", "fqn": ["zendesk_source", "zendesk", "ticket_comment"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_comment_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created": {"name": "created", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "public": {"name": "public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "facebook_comment": {"name": "facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tweet": {"name": "tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "voice_comment": {"name": "voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_comment_data`", "created_at": 1715700424.658883}, "source.zendesk_source.zendesk.user_tag": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "user_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user_tag", "fqn": ["zendesk_source", "zendesk", "user_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`user_tag_data`", "created_at": 1715700424.658931}, "source.zendesk_source.zendesk.user": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "user", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user", "fqn": ["zendesk_source", "zendesk", "user"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended": {"name": "suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`user_data`", "created_at": 1715700424.658983}, "source.zendesk_source.zendesk.schedule": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule", "fqn": ["zendesk_source", "zendesk", "schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The support schedules created with different business hours and holidays.", "columns": {"id": {"name": "id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`schedule_data`", "created_at": 1715700424.659034}, "source.zendesk_source.zendesk.ticket_schedule": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_schedule", "fqn": ["zendesk_source", "zendesk", "ticket_schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_schedule_data`", "created_at": 1715700424.6590762}, "source.zendesk_source.zendesk.ticket_form_history": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_form_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_form_history", "fqn": ["zendesk_source", "zendesk", "ticket_form_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_form_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_form_history_data`", "created_at": 1715700424.659168}, "source.zendesk_source.zendesk.ticket_tag": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_tag", "fqn": ["zendesk_source", "zendesk", "ticket_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_tag_data`", "created_at": 1715700424.659212}, "source.zendesk_source.zendesk.ticket_field_history": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "ticket_field_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_field_history", "fqn": ["zendesk_source", "zendesk", "ticket_field_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_field_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated": {"name": "updated", "description": "The time the ticket field value was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_field_history_data`", "created_at": 1715700424.6592572}, "source.zendesk_source.zendesk.daylight_time": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "daylight_time", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.daylight_time", "fqn": ["zendesk_source", "zendesk", "daylight_time"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "daylight_time_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`daylight_time_data`", "created_at": 1715700424.6592991}, "source.zendesk_source.zendesk.time_zone": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "time_zone", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.time_zone", "fqn": ["zendesk_source", "zendesk", "time_zone"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "time_zone_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`time_zone_data`", "created_at": 1715700424.659339}, "source.zendesk_source.zendesk.schedule_holiday": {"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "schedule_holiday", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule_holiday", "fqn": ["zendesk_source", "zendesk", "schedule_holiday"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_holiday_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Information about holidays for each specified schedule.", "columns": {"end_date": {"name": "end_date", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "id": {"name": "id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date": {"name": "start_date", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`schedule_holiday_data`", "created_at": 1715700424.6593862}}, "macros": {"macro.zendesk_integration_tests.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "zendesk_integration_tests", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.zendesk_integration_tests.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.329157, "supported_languages": null}, "macro.dbt_bigquery.date_sharded_table": {"name": "date_sharded_table", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/etc.sql", "original_file_path": "macros/etc.sql", "unique_id": "macro.dbt_bigquery.date_sharded_table", "macro_sql": "{% macro date_sharded_table(base_name) %}\n {{ return(base_name ~ \"[DBT__PARTITION_DATE]\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.329401, "supported_languages": null}, "macro.dbt_bigquery.grant_access_to": {"name": "grant_access_to", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/etc.sql", "original_file_path": "macros/etc.sql", "unique_id": "macro.dbt_bigquery.grant_access_to", "macro_sql": "{% macro grant_access_to(entity, entity_type, role, grant_target_dict) -%}\n {% do adapter.grant_access_to(entity, entity_type, role, grant_target_dict) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.329542, "supported_languages": null}, "macro.dbt_bigquery.get_partitions_metadata": {"name": "get_partitions_metadata", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/etc.sql", "original_file_path": "macros/etc.sql", "unique_id": "macro.dbt_bigquery.get_partitions_metadata", "macro_sql": "\n\n{%- macro get_partitions_metadata(table) -%}\n {%- if execute -%}\n {%- set res = adapter.get_partitions_metadata(table) -%}\n {{- return(res) -}}\n {%- endif -%}\n {{- return(None) -}}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.329718, "supported_languages": null}, "macro.dbt_bigquery.bigquery__create_table_as": {"name": "bigquery__create_table_as", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__create_table_as", "macro_sql": "{% macro bigquery__create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {%- if language == 'sql' -%}\n {%- set raw_partition_by = config.get('partition_by', none) -%}\n {%- set raw_cluster_by = config.get('cluster_by', none) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {%- set partition_config = adapter.parse_partition_by(raw_partition_by) -%}\n {%- if partition_config.time_ingestion_partitioning -%}\n {%- set columns = get_columns_with_types_in_query_sql(sql) -%}\n {%- set table_dest_columns_csv = columns_without_partition_fields_csv(partition_config, columns) -%}\n {%- set columns = '(' ~ table_dest_columns_csv ~ ')' -%}\n {%- endif -%}\n\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {%- set contract_config = config.get('contract') -%}\n {%- if contract_config.enforced -%}\n {{ get_assert_columns_equivalent(compiled_code) }}\n {{ get_table_columns_and_constraints() }}\n {%- set compiled_code = get_select_subquery(compiled_code) %}\n {% else %}\n {#-- cannot do contracts at the same time as time ingestion partitioning -#}\n {{ columns }}\n {% endif %}\n {{ partition_by(partition_config) }}\n {{ cluster_by(raw_cluster_by) }}\n\n {{ bigquery_table_options(config, model, temporary) }}\n\n {#-- PARTITION BY cannot be used with the AS query_statement clause.\n https://cloud.google.com/bigquery/docs/reference/standard-sql/data-definition-language#partition_expression\n -#}\n {%- if not partition_config.time_ingestion_partitioning %}\n as (\n {{ compiled_code }}\n );\n {%- endif %}\n {%- elif language == 'python' -%}\n {#--\n N.B. Python models _can_ write to temp views HOWEVER they use a different session\n and have already expired by the time they need to be used (I.E. in merges for incremental models)\n\n TODO: Deep dive into spark sessions to see if we can reuse a single session for an entire\n dbt invocation.\n --#}\n\n {#-- when a user wants to change the schema of an existing relation, they must intentionally drop the table in the dataset --#}\n {%- set old_relation = adapter.get_relation(database=relation.database, schema=relation.schema, identifier=relation.identifier) -%}\n {%- if (old_relation.is_table and (should_full_refresh())) -%}\n {% do adapter.drop_relation(relation) %}\n {%- endif -%}\n {{ py_write_table(compiled_code=compiled_code, target_relation=relation.quote(database=False, schema=False, identifier=False)) }}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"bigquery__create_table_as macro didn't get supported language, it got %s\" % language) %}\n {%- endif -%}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_bigquery.get_columns_with_types_in_query_sql", "macro.dbt_bigquery.columns_without_partition_fields_csv", "macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery", "macro.dbt_bigquery.partition_by", "macro.dbt_bigquery.cluster_by", "macro.dbt_bigquery.bigquery_table_options", "macro.dbt.should_full_refresh", "macro.dbt_bigquery.py_write_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.332542, "supported_languages": null}, "macro.dbt_bigquery.bigquery__create_view_as": {"name": "bigquery__create_view_as", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__create_view_as", "macro_sql": "{% macro bigquery__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {{ bigquery_view_options(config, model) }}\n {%- set contract_config = config.get('contract') -%}\n {%- if contract_config.enforced -%}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as {{ sql }};\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery_view_options", "macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3328588, "supported_languages": null}, "macro.dbt_bigquery.bigquery__drop_schema": {"name": "bigquery__drop_schema", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__drop_schema", "macro_sql": "{% macro bigquery__drop_schema(relation) -%}\n {{ adapter.drop_schema(relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.33295, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_columns_in_relation": {"name": "bigquery__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_columns_in_relation", "macro_sql": "{% macro bigquery__get_columns_in_relation(relation) -%}\n {{ return(adapter.get_columns_in_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.333044, "supported_languages": null}, "macro.dbt_bigquery.bigquery__list_relations_without_caching": {"name": "bigquery__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__list_relations_without_caching", "macro_sql": "{% macro bigquery__list_relations_without_caching(schema_relation) -%}\n {{ return(adapter.list_relations_without_caching(schema_relation)) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3331351, "supported_languages": null}, "macro.dbt_bigquery.bigquery__list_schemas": {"name": "bigquery__list_schemas", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__list_schemas", "macro_sql": "{% macro bigquery__list_schemas(database) -%}\n {{ return(adapter.list_schemas(database)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.333225, "supported_languages": null}, "macro.dbt_bigquery.bigquery__check_schema_exists": {"name": "bigquery__check_schema_exists", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__check_schema_exists", "macro_sql": "{% macro bigquery__check_schema_exists(information_schema, schema) %}\n {{ return(adapter.check_schema_exists(information_schema.database, schema)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.333339, "supported_languages": null}, "macro.dbt_bigquery.bigquery__persist_docs": {"name": "bigquery__persist_docs", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__persist_docs", "macro_sql": "{% macro bigquery__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do alter_column_comment(relation, model.columns) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.333518, "supported_languages": null}, "macro.dbt_bigquery.bigquery__alter_column_comment": {"name": "bigquery__alter_column_comment", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__alter_column_comment", "macro_sql": "{% macro bigquery__alter_column_comment(relation, column_dict) -%}\n {% do adapter.update_columns(relation, column_dict) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.333621, "supported_languages": null}, "macro.dbt_bigquery.bigquery__alter_relation_add_columns": {"name": "bigquery__alter_relation_add_columns", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__alter_relation_add_columns", "macro_sql": "{% macro bigquery__alter_relation_add_columns(relation, add_columns) %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {{ return(run_query(sql)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.333866, "supported_languages": null}, "macro.dbt_bigquery.bigquery__alter_relation_drop_columns": {"name": "bigquery__alter_relation_drop_columns", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__alter_relation_drop_columns", "macro_sql": "{% macro bigquery__alter_relation_drop_columns(relation, drop_columns) %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in drop_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {{ return(run_query(sql)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3340878, "supported_languages": null}, "macro.dbt_bigquery.bigquery__alter_column_type": {"name": "bigquery__alter_column_type", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__alter_column_type", "macro_sql": "{% macro bigquery__alter_column_type(relation, column_name, new_column_type) -%}\n {#-- Changing a column's data type using a query requires you to scan the entire table.\n The query charges can be significant if the table is very large.\n\n https://cloud.google.com/bigquery/docs/manually-changing-schemas#changing_a_columns_data_type\n #}\n {% set relation_columns = get_columns_in_relation(relation) %}\n\n {% set sql %}\n select\n {%- for col in relation_columns -%}\n {% if col.column == column_name %}\n CAST({{ col.quoted }} AS {{ new_column_type }}) AS {{ col.quoted }}\n {%- else %}\n {{ col.quoted }}\n {%- endif %}\n {%- if not loop.last %},{% endif -%}\n {%- endfor %}\n from {{ relation }}\n {% endset %}\n\n {% call statement('alter_column_type') %}\n {{ create_table_as(False, relation, sql)}}\n {%- endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_relation", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.334486, "supported_languages": null}, "macro.dbt_bigquery.bigquery__test_unique": {"name": "bigquery__test_unique", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__test_unique", "macro_sql": "{% macro bigquery__test_unique(model, column_name) %}\n\nwith dbt_test__target as (\n\n select {{ column_name }} as unique_field\n from {{ model }}\n where {{ column_name }} is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.334592, "supported_languages": null}, "macro.dbt_bigquery.bigquery__upload_file": {"name": "bigquery__upload_file", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_bigquery.bigquery__upload_file", "macro_sql": "{% macro bigquery__upload_file(local_file_path, database, table_schema, table_name) %}\n\n {{ log(\"kwargs: \" ~ kwargs) }}\n\n {% do adapter.upload_file(local_file_path, database, table_schema, table_name, kwargs=kwargs) %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.334754, "supported_languages": null}, "macro.dbt_bigquery.bigquery__create_csv_table": {"name": "bigquery__create_csv_table", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/seed.sql", "original_file_path": "macros/materializations/seed.sql", "unique_id": "macro.dbt_bigquery.bigquery__create_csv_table", "macro_sql": "{% macro bigquery__create_csv_table(model, agate_table) %}\n -- no-op\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.334985, "supported_languages": null}, "macro.dbt_bigquery.bigquery__reset_csv_table": {"name": "bigquery__reset_csv_table", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/seed.sql", "original_file_path": "macros/materializations/seed.sql", "unique_id": "macro.dbt_bigquery.bigquery__reset_csv_table", "macro_sql": "{% macro bigquery__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.335078, "supported_languages": null}, "macro.dbt_bigquery.bigquery__load_csv_rows": {"name": "bigquery__load_csv_rows", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/seed.sql", "original_file_path": "macros/materializations/seed.sql", "unique_id": "macro.dbt_bigquery.bigquery__load_csv_rows", "macro_sql": "{% macro bigquery__load_csv_rows(model, agate_table) %}\n\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {{ adapter.load_dataframe(model['database'], model['schema'], model['alias'],\n \t\t\t\t\t\t\tagate_table, column_override) }}\n\n {% call statement() %}\n alter table {{ this.render() }} set {{ bigquery_table_options(config, model) }}\n {% endcall %}\n\n {% if config.persist_relation_docs() and 'description' in model %}\n\n \t{{ adapter.update_table_description(model['database'], model['schema'], model['alias'], model['description']) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_bigquery.bigquery_table_options"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.335507, "supported_languages": null}, "macro.dbt_bigquery.bigquery__handle_existing_table": {"name": "bigquery__handle_existing_table", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/view.sql", "original_file_path": "macros/materializations/view.sql", "unique_id": "macro.dbt_bigquery.bigquery__handle_existing_table", "macro_sql": "{% macro bigquery__handle_existing_table(full_refresh, old_relation) %}\n {%- if full_refresh -%}\n {{ adapter.drop_relation(old_relation) }}\n {%- else -%}\n {{ exceptions.relation_wrong_type(old_relation, 'view') }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.335846, "supported_languages": null}, "macro.dbt_bigquery.materialization_view_bigquery": {"name": "materialization_view_bigquery", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/view.sql", "original_file_path": "macros/materializations/view.sql", "unique_id": "macro.dbt_bigquery.materialization_view_bigquery", "macro_sql": "{% materialization view, adapter='bigquery' -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {% set to_return = bigquery__create_or_replace_view() %}\n\n {% set target_relation = this.incorporate(type='view') %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if config.get('grant_access_to') %}\n {% for grant_target_dict in config.get('grant_access_to') %}\n {% do adapter.grant_access_to(this, 'view', None, grant_target_dict) %}\n {% endfor %}\n {% endif %}\n\n {% do return(to_return) %}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__create_or_replace_view", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3363268, "supported_languages": ["sql"]}, "macro.dbt_bigquery.materialization_table_bigquery": {"name": "materialization_table_bigquery", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/table.sql", "original_file_path": "macros/materializations/table.sql", "unique_id": "macro.dbt_bigquery.materialization_table_bigquery", "macro_sql": "{% materialization table, adapter='bigquery', supported_languages=['sql', 'python']-%}\n\n {%- set language = model['language'] -%}\n {%- set identifier = model['alias'] -%}\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_not_as_table = (old_relation is not none and not old_relation.is_table) -%}\n {%- set target_relation = api.Relation.create(database=database, schema=schema, identifier=identifier, type='table') -%}\n\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {{ run_hooks(pre_hooks) }}\n\n {#\n We only need to drop this thing if it is not a table.\n If it _is_ already a table, then we can overwrite it without downtime\n Unlike table -> view, no need for `--full-refresh`: dropping a view is no big deal\n #}\n {%- if exists_not_as_table -%}\n {{ adapter.drop_relation(old_relation) }}\n {%- endif -%}\n\n -- build model\n {%- set raw_partition_by = config.get('partition_by', none) -%}\n {%- set partition_by = adapter.parse_partition_by(raw_partition_by) -%}\n {%- set cluster_by = config.get('cluster_by', none) -%}\n {% if not adapter.is_replaceable(old_relation, partition_by, cluster_by) %}\n {% do log(\"Hard refreshing \" ~ old_relation ~ \" because it is not replaceable\") %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n -- build model\n {%- call statement('main', language=language) -%}\n {{ create_table_as(False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {{ run_hooks(post_hooks) }}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.create_table_as", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.338623, "supported_languages": ["sql", "python"]}, "macro.dbt_bigquery.py_write_table": {"name": "py_write_table", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/table.sql", "original_file_path": "macros/materializations/table.sql", "unique_id": "macro.dbt_bigquery.py_write_table", "macro_sql": "{% macro py_write_table(compiled_code, target_relation) %}\nfrom pyspark.sql import SparkSession\n{%- set raw_partition_by = config.get('partition_by', none) -%}\n{%- set raw_cluster_by = config.get('cluster_by', none) -%}\n{%- set partition_config = adapter.parse_partition_by(raw_partition_by) %}\n\nspark = SparkSession.builder.appName('smallTest').getOrCreate()\n\nspark.conf.set(\"viewsEnabled\",\"true\")\nspark.conf.set(\"temporaryGcsBucket\",\"{{target.gcs_bucket}}\")\n\n{{ compiled_code }}\ndbt = dbtObj(spark.read.format(\"bigquery\").load)\ndf = model(dbt, spark)\n\n# COMMAND ----------\n# this is materialization code dbt generated, please do not modify\n\nimport pyspark\n# make sure pandas exists before using it\ntry:\n import pandas\n pandas_available = True\nexcept ImportError:\n pandas_available = False\n\n# make sure pyspark.pandas exists before using it\ntry:\n import pyspark.pandas\n pyspark_pandas_api_available = True\nexcept ImportError:\n pyspark_pandas_api_available = False\n\n# make sure databricks.koalas exists before using it\ntry:\n import databricks.koalas\n koalas_available = True\nexcept ImportError:\n koalas_available = False\n\n# preferentially convert pandas DataFrames to pandas-on-Spark or Koalas DataFrames first\n# since they know how to convert pandas DataFrames better than `spark.createDataFrame(df)`\n# and converting from pandas-on-Spark to Spark DataFrame has no overhead\nif pyspark_pandas_api_available and pandas_available and isinstance(df, pandas.core.frame.DataFrame):\n df = pyspark.pandas.frame.DataFrame(df)\nelif koalas_available and pandas_available and isinstance(df, pandas.core.frame.DataFrame):\n df = databricks.koalas.frame.DataFrame(df)\n\n# convert to pyspark.sql.dataframe.DataFrame\nif isinstance(df, pyspark.sql.dataframe.DataFrame):\n pass # since it is already a Spark DataFrame\nelif pyspark_pandas_api_available and isinstance(df, pyspark.pandas.frame.DataFrame):\n df = df.to_spark()\nelif koalas_available and isinstance(df, databricks.koalas.frame.DataFrame):\n df = df.to_spark()\nelif pandas_available and isinstance(df, pandas.core.frame.DataFrame):\n df = spark.createDataFrame(df)\nelse:\n msg = f\"{type(df)} is not a supported type for dbt Python materialization\"\n raise Exception(msg)\n\ndf.write \\\n .mode(\"overwrite\") \\\n .format(\"bigquery\") \\\n .option(\"writeMethod\", \"indirect\").option(\"writeDisposition\", 'WRITE_TRUNCATE') \\\n {%- if partition_config is not none %}\n {%- if partition_config.data_type | lower in ('date','timestamp','datetime') %}\n .option(\"partitionField\", \"{{- partition_config.field -}}\") \\\n {%- if partition_config.granularity is not none %}\n .option(\"partitionType\", \"{{- partition_config.granularity| upper -}}\") \\\n {%- endif %}\n {%- endif %}\n {%- endif %}\n {%- if raw_cluster_by is not none %}\n .option(\"clusteredFields\", \"{{- raw_cluster_by | join(',') -}}\") \\\n {%- endif %}\n .save(\"{{target_relation}}\")\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3392398, "supported_languages": null}, "macro.dbt_bigquery.materialization_copy_bigquery": {"name": "materialization_copy_bigquery", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/copy.sql", "original_file_path": "macros/materializations/copy.sql", "unique_id": "macro.dbt_bigquery.materialization_copy_bigquery", "macro_sql": "{% materialization copy, adapter='bigquery' -%}\n\n {# Setup #}\n {{ run_hooks(pre_hooks) }}\n\n {% set destination = this.incorporate(type='table') %}\n\n {# there can be several ref() or source() according to BQ copy API docs #}\n {# cycle over ref() and source() to create source tables array #}\n {% set source_array = [] %}\n {% for ref_table in model.refs %}\n {{ source_array.append(ref(ref_table.get('package'), ref_table.name, version=ref_table.get('version'))) }}\n {% endfor %}\n\n {% for src_table in model.sources %}\n {{ source_array.append(source(*src_table)) }}\n {% endfor %}\n\n {# Call adapter copy_table function #}\n {%- set result_str = adapter.copy_table(\n source_array,\n destination,\n config.get('copy_materialization', default = 'table')) -%}\n\n {{ store_result('main', response=result_str) }}\n\n {# Clean up #}\n {{ run_hooks(post_hooks) }}\n {%- do apply_grants(target_relation, grant_config) -%}\n {{ adapter.commit() }}\n\n {{ return({'relations': [destination]}) }}\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.34011, "supported_languages": ["sql"]}, "macro.dbt_bigquery.dbt_bigquery_validate_get_incremental_strategy": {"name": "dbt_bigquery_validate_get_incremental_strategy", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "unique_id": "macro.dbt_bigquery.dbt_bigquery_validate_get_incremental_strategy", "macro_sql": "{% macro dbt_bigquery_validate_get_incremental_strategy(config) %}\n {#-- Find and validate the incremental strategy #}\n {%- set strategy = config.get(\"incremental_strategy\") or 'merge' -%}\n\n {% set invalid_strategy_msg -%}\n Invalid incremental strategy provided: {{ strategy }}\n Expected one of: 'merge', 'insert_overwrite'\n {%- endset %}\n {% if strategy not in ['merge', 'insert_overwrite'] %}\n {% do exceptions.raise_compiler_error(invalid_strategy_msg) %}\n {% endif %}\n\n {% do return(strategy) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.341589, "supported_languages": null}, "macro.dbt_bigquery.source_sql_with_partition": {"name": "source_sql_with_partition", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "unique_id": "macro.dbt_bigquery.source_sql_with_partition", "macro_sql": "{% macro source_sql_with_partition(partition_by, source_sql) %}\n\n {%- if partition_by.time_ingestion_partitioning %}\n {{ return(wrap_with_time_ingestion_partitioning_sql(partition_by, source_sql, False)) }}\n {% else %}\n {{ return(source_sql) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.wrap_with_time_ingestion_partitioning_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.34177, "supported_languages": null}, "macro.dbt_bigquery.bq_create_table_as": {"name": "bq_create_table_as", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "unique_id": "macro.dbt_bigquery.bq_create_table_as", "macro_sql": "{% macro bq_create_table_as(partition_by, temporary, relation, compiled_code, language='sql') %}\n {%- set _dbt_max_partition = declare_dbt_max_partition(this, partition_by, compiled_code, language) -%}\n {% if partition_by.time_ingestion_partitioning and language == 'python' %}\n {% do exceptions.raise_compiler_error(\n \"Python models do not support ingestion time partitioning\"\n ) %}\n {% elif partition_by.time_ingestion_partitioning and language == 'sql' %}\n {#-- Create the table before inserting data as ingestion time partitioned tables can't be created with the transformed data --#}\n {% do run_query(create_table_as(temporary, relation, compiled_code)) %}\n {{ return(_dbt_max_partition + bq_insert_into_ingestion_time_partitioned_table_sql(relation, compiled_code)) }}\n {% else %}\n {{ return(_dbt_max_partition + create_table_as(temporary, relation, compiled_code, language)) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.declare_dbt_max_partition", "macro.dbt.run_query", "macro.dbt.create_table_as", "macro.dbt_bigquery.bq_insert_into_ingestion_time_partitioned_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.342203, "supported_languages": null}, "macro.dbt_bigquery.bq_generate_incremental_build_sql": {"name": "bq_generate_incremental_build_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "unique_id": "macro.dbt_bigquery.bq_generate_incremental_build_sql", "macro_sql": "{% macro bq_generate_incremental_build_sql(\n strategy, tmp_relation, target_relation, sql, unique_key, partition_by, partitions, dest_columns, tmp_relation_exists, copy_partitions, incremental_predicates\n) %}\n {#-- if partitioned, use BQ scripting to get the range of partition values to be updated --#}\n {% if strategy == 'insert_overwrite' %}\n\n {% set build_sql = bq_generate_incremental_insert_overwrite_build_sql(\n tmp_relation, target_relation, sql, unique_key, partition_by, partitions, dest_columns, tmp_relation_exists, copy_partitions\n ) %}\n\n {% else %} {# strategy == 'merge' #}\n\n {% set build_sql = bq_generate_incremental_merge_build_sql(\n tmp_relation, target_relation, sql, unique_key, partition_by, dest_columns, tmp_relation_exists, incremental_predicates\n ) %}\n\n {% endif %}\n\n {{ return(build_sql) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bq_generate_incremental_insert_overwrite_build_sql", "macro.dbt_bigquery.bq_generate_incremental_merge_build_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3425658, "supported_languages": null}, "macro.dbt_bigquery.materialization_incremental_bigquery": {"name": "materialization_incremental_bigquery", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental.sql", "original_file_path": "macros/materializations/incremental.sql", "unique_id": "macro.dbt_bigquery.materialization_incremental_bigquery", "macro_sql": "{% materialization incremental, adapter='bigquery', supported_languages=['sql', 'python'] -%}\n\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n {%- set language = model['language'] %}\n\n {%- set target_relation = this %}\n {%- set existing_relation = load_relation(this) %}\n {%- set tmp_relation = make_temp_relation(this) %}\n\n {#-- Validate early so we don't run SQL if the strategy is invalid --#}\n {% set strategy = dbt_bigquery_validate_get_incremental_strategy(config) -%}\n\n {%- set raw_partition_by = config.get('partition_by', none) -%}\n {%- set partition_by = adapter.parse_partition_by(raw_partition_by) -%}\n {%- set partitions = config.get('partitions', none) -%}\n {%- set cluster_by = config.get('cluster_by', none) -%}\n\n {% set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') %}\n {% set incremental_predicates = config.get('predicates', default=none) or config.get('incremental_predicates', default=none) %}\n\n -- grab current tables grants config for comparison later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n {% if partition_by.copy_partitions is true and strategy != 'insert_overwrite' %} {#-- We can't copy partitions with merge strategy --#}\n {% set wrong_strategy_msg -%}\n The 'copy_partitions' option requires the 'incremental_strategy' option to be set to 'insert_overwrite'.\n {%- endset %}\n {% do exceptions.raise_compiler_error(wrong_strategy_msg) %}\n\n {% elif existing_relation is none %}\n {%- call statement('main', language=language) -%}\n {{ bq_create_table_as(partition_by, False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% elif existing_relation.is_view %}\n {#-- There's no way to atomically replace a view with a table on BQ --#}\n {{ adapter.drop_relation(existing_relation) }}\n {%- call statement('main', language=language) -%}\n {{ bq_create_table_as(partition_by, False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% elif full_refresh_mode %}\n {#-- If the partition/cluster config has changed, then we must drop and recreate --#}\n {% if not adapter.is_replaceable(existing_relation, partition_by, cluster_by) %}\n {% do log(\"Hard refreshing \" ~ existing_relation ~ \" because it is not replaceable\") %}\n {{ adapter.drop_relation(existing_relation) }}\n {% endif %}\n {%- call statement('main', language=language) -%}\n {{ bq_create_table_as(partition_by, False, target_relation, compiled_code, language) }}\n {%- endcall -%}\n\n {% else %}\n {%- if language == 'python' and strategy == 'insert_overwrite' -%}\n {#-- This lets us move forward assuming no python will be directly templated into a query --#}\n {%- set python_unsupported_msg -%}\n The 'insert_overwrite' strategy is not yet supported for python models.\n {%- endset %}\n {% do exceptions.raise_compiler_error(python_unsupported_msg) %}\n {%- endif -%}\n\n {% set tmp_relation_exists = false %}\n {% if on_schema_change != 'ignore' or language == 'python' %}\n {#-- Check first, since otherwise we may not build a temp table --#}\n {#-- Python always needs to create a temp table --#}\n {%- call statement('create_tmp_relation', language=language) -%}\n {{ bq_create_table_as(partition_by, True, tmp_relation, compiled_code, language) }}\n {%- endcall -%}\n {% set tmp_relation_exists = true %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, tmp_relation, existing_relation) %}\n {% endif %}\n\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n {#-- Add time ingestion pseudo column to destination column as not part of the 'schema' but still need it for actual data insertion --#}\n {% if partition_by.time_ingestion_partitioning %}\n {% set dest_columns = adapter.add_time_ingestion_partition_column(partition_by, dest_columns) %}\n {% endif %}\n\n {% set build_sql = bq_generate_incremental_build_sql(\n strategy, tmp_relation, target_relation, compiled_code, unique_key, partition_by, partitions, dest_columns, tmp_relation_exists, partition_by.copy_partitions, incremental_predicates\n ) %}\n\n {%- call statement('main') -%}\n {{ build_sql }}\n {% endcall %}\n\n {%- if language == 'python' and tmp_relation -%}\n {{ adapter.drop_relation(tmp_relation) }}\n {%- endif -%}\n\n {% endif %}\n\n {{ run_hooks(post_hooks) }}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.load_relation", "macro.dbt.make_temp_relation", "macro.dbt_bigquery.dbt_bigquery_validate_get_incremental_strategy", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt_bigquery.bq_create_table_as", "macro.dbt.process_schema_changes", "macro.dbt_bigquery.bq_generate_incremental_build_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3448598, "supported_languages": ["sql", "python"]}, "macro.dbt_bigquery.bigquery__snapshot_hash_arguments": {"name": "bigquery__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/snapshot.sql", "original_file_path": "macros/materializations/snapshot.sql", "unique_id": "macro.dbt_bigquery.bigquery__snapshot_hash_arguments", "macro_sql": "{% macro bigquery__snapshot_hash_arguments(args) -%}\n to_hex(md5(concat({%- for arg in args -%}\n coalesce(cast({{ arg }} as string), ''){% if not loop.last %}, '|',{% endif -%}\n {%- endfor -%}\n )))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.345113, "supported_languages": null}, "macro.dbt_bigquery.bigquery__create_columns": {"name": "bigquery__create_columns", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/snapshot.sql", "original_file_path": "macros/materializations/snapshot.sql", "unique_id": "macro.dbt_bigquery.bigquery__create_columns", "macro_sql": "{% macro bigquery__create_columns(relation, columns) %}\n {{ adapter.alter_table_add_columns(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.345205, "supported_languages": null}, "macro.dbt_bigquery.bigquery__post_snapshot": {"name": "bigquery__post_snapshot", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/snapshot.sql", "original_file_path": "macros/materializations/snapshot.sql", "unique_id": "macro.dbt_bigquery.bigquery__post_snapshot", "macro_sql": "{% macro bigquery__post_snapshot(staging_relation) %}\n -- Clean up the snapshot temp table\n {% do drop_relation(staging_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3452868, "supported_languages": null}, "macro.dbt_bigquery.bigquery__can_clone_table": {"name": "bigquery__can_clone_table", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/clone.sql", "original_file_path": "macros/materializations/clone.sql", "unique_id": "macro.dbt_bigquery.bigquery__can_clone_table", "macro_sql": "{% macro bigquery__can_clone_table() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.345404, "supported_languages": null}, "macro.dbt_bigquery.bigquery__create_or_replace_clone": {"name": "bigquery__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/clone.sql", "original_file_path": "macros/materializations/clone.sql", "unique_id": "macro.dbt_bigquery.bigquery__create_or_replace_clone", "macro_sql": "{% macro bigquery__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace\n table {{ this_relation }}\n clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.345483, "supported_languages": null}, "macro.dbt_bigquery.bq_generate_incremental_merge_build_sql": {"name": "bq_generate_incremental_merge_build_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/merge.sql", "original_file_path": "macros/materializations/incremental_strategy/merge.sql", "unique_id": "macro.dbt_bigquery.bq_generate_incremental_merge_build_sql", "macro_sql": "{% macro bq_generate_incremental_merge_build_sql(\n tmp_relation, target_relation, sql, unique_key, partition_by, dest_columns, tmp_relation_exists, incremental_predicates\n) %}\n {%- set source_sql -%}\n {%- if tmp_relation_exists -%}\n (\n select\n {% if partition_by.time_ingestion_partitioning -%}\n {{ partition_by.insertable_time_partitioning_field() }},\n {%- endif -%}\n * from {{ tmp_relation }}\n )\n {%- else -%} {#-- wrap sql in parens to make it a subquery --#}\n (\n {%- if partition_by.time_ingestion_partitioning -%}\n {{ wrap_with_time_ingestion_partitioning_sql(partition_by, sql, True) }}\n {%- else -%}\n {{sql}}\n {%- endif %}\n )\n {%- endif -%}\n {%- endset -%}\n\n {% set build_sql = get_merge_sql(target_relation, source_sql, unique_key, dest_columns, incremental_predicates) %}\n\n {{ return(build_sql) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.wrap_with_time_ingestion_partitioning_sql", "macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3460531, "supported_languages": null}, "macro.dbt_bigquery.declare_dbt_max_partition": {"name": "declare_dbt_max_partition", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/common.sql", "original_file_path": "macros/materializations/incremental_strategy/common.sql", "unique_id": "macro.dbt_bigquery.declare_dbt_max_partition", "macro_sql": "{% macro declare_dbt_max_partition(relation, partition_by, compiled_code, language='sql') %}\n\n {#-- TODO: revisit partitioning with python models --#}\n {%- if '_dbt_max_partition' in compiled_code and language == 'sql' -%}\n\n declare _dbt_max_partition {{ partition_by.data_type_for_partition() }} default (\n select max({{ partition_by.field }}) from {{ this }}\n where {{ partition_by.field }} is not null\n );\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.346329, "supported_languages": null}, "macro.dbt_bigquery.bq_generate_incremental_insert_overwrite_build_sql": {"name": "bq_generate_incremental_insert_overwrite_build_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "original_file_path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "unique_id": "macro.dbt_bigquery.bq_generate_incremental_insert_overwrite_build_sql", "macro_sql": "{% macro bq_generate_incremental_insert_overwrite_build_sql(\n tmp_relation, target_relation, sql, unique_key, partition_by, partitions, dest_columns, tmp_relation_exists, copy_partitions\n) %}\n {% if partition_by is none %}\n {% set missing_partition_msg -%}\n The 'insert_overwrite' strategy requires the `partition_by` config.\n {%- endset %}\n {% do exceptions.raise_compiler_error(missing_partition_msg) %}\n {% endif %}\n\n {% set build_sql = bq_insert_overwrite_sql(\n tmp_relation, target_relation, sql, unique_key, partition_by, partitions, dest_columns, tmp_relation_exists, copy_partitions\n ) %}\n\n {{ return(build_sql) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bq_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.34828, "supported_languages": null}, "macro.dbt_bigquery.bq_copy_partitions": {"name": "bq_copy_partitions", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "original_file_path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "unique_id": "macro.dbt_bigquery.bq_copy_partitions", "macro_sql": "{% macro bq_copy_partitions(tmp_relation, target_relation, partitions, partition_by) %}\n\n {% for partition in partitions %}\n {% if partition_by.data_type == 'int64' %}\n {% set partition = partition | as_text %}\n {% elif partition_by.granularity == 'hour' %}\n {% set partition = partition.strftime(\"%Y%m%d%H\") %}\n {% elif partition_by.granularity == 'day' %}\n {% set partition = partition.strftime(\"%Y%m%d\") %}\n {% elif partition_by.granularity == 'month' %}\n {% set partition = partition.strftime(\"%Y%m\") %}\n {% elif partition_by.granularity == 'year' %}\n {% set partition = partition.strftime(\"%Y\") %}\n {% endif %}\n {% set tmp_relation_partitioned = api.Relation.create(database=tmp_relation.database, schema=tmp_relation.schema, identifier=tmp_relation.table ~ '$' ~ partition, type=tmp_relation.type) %}\n {% set target_relation_partitioned = api.Relation.create(database=target_relation.database, schema=target_relation.schema, identifier=target_relation.table ~ '$' ~ partition, type=target_relation.type) %}\n {% do adapter.copy_table(tmp_relation_partitioned, target_relation_partitioned, \"table\") %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.349024, "supported_languages": null}, "macro.dbt_bigquery.bq_insert_overwrite_sql": {"name": "bq_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "original_file_path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "unique_id": "macro.dbt_bigquery.bq_insert_overwrite_sql", "macro_sql": "{% macro bq_insert_overwrite_sql(\n tmp_relation, target_relation, sql, unique_key, partition_by, partitions, dest_columns, tmp_relation_exists, copy_partitions\n) %}\n {% if partitions is not none and partitions != [] %} {# static #}\n {{ bq_static_insert_overwrite_sql(tmp_relation, target_relation, sql, partition_by, partitions, dest_columns, tmp_relation_exists, copy_partitions) }}\n {% else %} {# dynamic #}\n {{ bq_dynamic_insert_overwrite_sql(tmp_relation, target_relation, sql, unique_key, partition_by, dest_columns, tmp_relation_exists, copy_partitions) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bq_static_insert_overwrite_sql", "macro.dbt_bigquery.bq_dynamic_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.349361, "supported_languages": null}, "macro.dbt_bigquery.bq_static_insert_overwrite_sql": {"name": "bq_static_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "original_file_path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "unique_id": "macro.dbt_bigquery.bq_static_insert_overwrite_sql", "macro_sql": "{% macro bq_static_insert_overwrite_sql(\n tmp_relation, target_relation, sql, partition_by, partitions, dest_columns, tmp_relation_exists, copy_partitions\n) %}\n\n {% set predicate -%}\n {{ partition_by.render_wrapped(alias='DBT_INTERNAL_DEST') }} in (\n {{ partitions | join (', ') }}\n )\n {%- endset %}\n\n {%- set source_sql -%}\n (\n {% if partition_by.time_ingestion_partitioning and tmp_relation_exists -%}\n select\n {{ partition_by.insertable_time_partitioning_field() }},\n * from {{ tmp_relation }}\n {% elif tmp_relation_exists -%}\n select\n * from {{ tmp_relation }}\n {%- elif partition_by.time_ingestion_partitioning -%}\n {{ wrap_with_time_ingestion_partitioning_sql(partition_by, sql, True) }}\n {%- else -%}\n {{sql}}\n {%- endif -%}\n\n )\n {%- endset -%}\n\n {% if copy_partitions %}\n {% do bq_copy_partitions(tmp_relation, target_relation, partitions, partition_by) %}\n {% else %}\n\n {#-- In case we're putting the model SQL _directly_ into the MERGE statement,\n we need to prepend the MERGE statement with the user-configured sql_header,\n which may be needed to resolve that model SQL (e.g. referencing a variable or UDF in the header)\n in the \"temporary table exists\" case, we save the model SQL result as a temp table first, wherein the\n sql_header is included by the create_table_as macro.\n #}\n -- 1. run the merge statement\n {{ get_insert_overwrite_merge_sql(target_relation, source_sql, dest_columns, [predicate], include_sql_header = not tmp_relation_exists) }};\n\n {%- if tmp_relation_exists -%}\n -- 2. clean up the temp table\n drop table if exists {{ tmp_relation }};\n {%- endif -%}\n\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.wrap_with_time_ingestion_partitioning_sql", "macro.dbt_bigquery.bq_copy_partitions", "macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.349958, "supported_languages": null}, "macro.dbt_bigquery.bq_dynamic_copy_partitions_insert_overwrite_sql": {"name": "bq_dynamic_copy_partitions_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "original_file_path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "unique_id": "macro.dbt_bigquery.bq_dynamic_copy_partitions_insert_overwrite_sql", "macro_sql": "{% macro bq_dynamic_copy_partitions_insert_overwrite_sql(\n tmp_relation, target_relation, sql, unique_key, partition_by, dest_columns, tmp_relation_exists, copy_partitions\n ) %}\n {%- if tmp_relation_exists is false -%}\n {# We run temp table creation in a separated script to move to partitions copy if it doesn't already exist #}\n {%- call statement('create_tmp_relation_for_copy', language='sql') -%}\n {{ bq_create_table_as(partition_by, True, tmp_relation, sql, 'sql')\n }}\n {%- endcall %}\n {%- endif -%}\n {%- set partitions_sql -%}\n select distinct {{ partition_by.render_wrapped() }}\n from {{ tmp_relation }}\n {%- endset -%}\n {%- set partitions = run_query(partitions_sql).columns[0].values() -%}\n {# We copy the partitions #}\n {%- do bq_copy_partitions(tmp_relation, target_relation, partitions, partition_by) -%}\n -- Clean up the temp table\n drop table if exists {{ tmp_relation }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_bigquery.bq_create_table_as", "macro.dbt.run_query", "macro.dbt_bigquery.bq_copy_partitions"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.35037, "supported_languages": null}, "macro.dbt_bigquery.bq_dynamic_insert_overwrite_sql": {"name": "bq_dynamic_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "original_file_path": "macros/materializations/incremental_strategy/insert_overwrite.sql", "unique_id": "macro.dbt_bigquery.bq_dynamic_insert_overwrite_sql", "macro_sql": "{% macro bq_dynamic_insert_overwrite_sql(tmp_relation, target_relation, sql, unique_key, partition_by, dest_columns, tmp_relation_exists, copy_partitions) %}\n {%- if copy_partitions is true %}\n {{ bq_dynamic_copy_partitions_insert_overwrite_sql(tmp_relation, target_relation, sql, unique_key, partition_by, dest_columns, tmp_relation_exists, copy_partitions) }}\n {% else -%}\n {% set predicate -%}\n {{ partition_by.render_wrapped(alias='DBT_INTERNAL_DEST') }} in unnest(dbt_partitions_for_replacement)\n {%- endset %}\n\n {%- set source_sql -%}\n (\n select\n {% if partition_by.time_ingestion_partitioning -%}\n {{ partition_by.insertable_time_partitioning_field() }},\n {%- endif -%}\n * from {{ tmp_relation }}\n )\n {%- endset -%}\n\n -- generated script to merge partitions into {{ target_relation }}\n declare dbt_partitions_for_replacement array<{{ partition_by.data_type_for_partition() }}>;\n\n {# have we already created the temp table to check for schema changes? #}\n {% if not tmp_relation_exists %}\n -- 1. create a temp table with model data\n {{ bq_create_table_as(partition_by, True, tmp_relation, sql, 'sql') }}\n {% else %}\n -- 1. temp table already exists, we used it to check for schema changes\n {% endif %}\n {%- set partition_field = partition_by.time_partitioning_field() if partition_by.time_ingestion_partitioning else partition_by.render_wrapped() -%}\n\n -- 2. define partitions to update\n set (dbt_partitions_for_replacement) = (\n select as struct\n -- IGNORE NULLS: this needs to be aligned to _dbt_max_partition, which ignores null\n array_agg(distinct {{ partition_field }} IGNORE NULLS)\n from {{ tmp_relation }}\n );\n\n -- 3. run the merge statement\n {{ get_insert_overwrite_merge_sql(target_relation, source_sql, dest_columns, [predicate]) }};\n\n -- 4. clean up the temp table\n drop table if exists {{ tmp_relation }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bq_dynamic_copy_partitions_insert_overwrite_sql", "macro.dbt_bigquery.bq_create_table_as", "macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.351008, "supported_languages": null}, "macro.dbt_bigquery.wrap_with_time_ingestion_partitioning_sql": {"name": "wrap_with_time_ingestion_partitioning_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/time_ingestion_tables.sql", "original_file_path": "macros/materializations/incremental_strategy/time_ingestion_tables.sql", "unique_id": "macro.dbt_bigquery.wrap_with_time_ingestion_partitioning_sql", "macro_sql": "{% macro wrap_with_time_ingestion_partitioning_sql(partition_by, sql, is_nested) %}\n\n select TIMESTAMP({{ partition_by.field }}) as {{ partition_by.insertable_time_partitioning_field() }}, * EXCEPT({{ partition_by.field }}) from (\n {{ sql }}\n ){%- if not is_nested -%};{%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.351885, "supported_languages": null}, "macro.dbt_bigquery.get_quoted_with_types_csv": {"name": "get_quoted_with_types_csv", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/time_ingestion_tables.sql", "original_file_path": "macros/materializations/incremental_strategy/time_ingestion_tables.sql", "unique_id": "macro.dbt_bigquery.get_quoted_with_types_csv", "macro_sql": "{% macro get_quoted_with_types_csv(columns) %}\n {% set quoted = [] %}\n {% for col in columns -%}\n {%- do quoted.append(adapter.quote(col.name) ~ \" \" ~ col.data_type) -%}\n {%- endfor %}\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.352127, "supported_languages": null}, "macro.dbt_bigquery.columns_without_partition_fields_csv": {"name": "columns_without_partition_fields_csv", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/time_ingestion_tables.sql", "original_file_path": "macros/materializations/incremental_strategy/time_ingestion_tables.sql", "unique_id": "macro.dbt_bigquery.columns_without_partition_fields_csv", "macro_sql": "{% macro columns_without_partition_fields_csv(partition_config, columns) -%}\n {%- set columns_no_partition = partition_config.reject_partition_field_column(columns) -%}\n {% set columns_names = get_quoted_with_types_csv(columns_no_partition) %}\n {{ return(columns_names) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_bigquery.get_quoted_with_types_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.352278, "supported_languages": null}, "macro.dbt_bigquery.bq_insert_into_ingestion_time_partitioned_table_sql": {"name": "bq_insert_into_ingestion_time_partitioned_table_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/time_ingestion_tables.sql", "original_file_path": "macros/materializations/incremental_strategy/time_ingestion_tables.sql", "unique_id": "macro.dbt_bigquery.bq_insert_into_ingestion_time_partitioned_table_sql", "macro_sql": "{% macro bq_insert_into_ingestion_time_partitioned_table_sql(target_relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n {%- set raw_partition_by = config.get('partition_by', none) -%}\n {%- set partition_by = adapter.parse_partition_by(raw_partition_by) -%}\n {% set dest_columns = adapter.get_columns_in_relation(target_relation) %}\n {%- set dest_columns_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ partition_by.insertable_time_partitioning_field() }}, {{ dest_columns_csv }})\n {{ wrap_with_time_ingestion_partitioning_sql(partition_by, sql, False) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt_bigquery.wrap_with_time_ingestion_partitioning_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.352669, "supported_languages": null}, "macro.dbt_bigquery.get_columns_with_types_in_query_sql": {"name": "get_columns_with_types_in_query_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/materializations/incremental_strategy/time_ingestion_tables.sql", "original_file_path": "macros/materializations/incremental_strategy/time_ingestion_tables.sql", "unique_id": "macro.dbt_bigquery.get_columns_with_types_in_query_sql", "macro_sql": "{% macro get_columns_with_types_in_query_sql(select_sql) %}\n {% set sql %}\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n {% endset %}\n {{ return(adapter.get_columns_in_select_sql(sql)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.352884, "supported_languages": null}, "macro.dbt_bigquery.bigquery__drop_relation": {"name": "bigquery__drop_relation", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt_bigquery.bigquery__drop_relation", "macro_sql": "{% macro bigquery__drop_relation(relation) -%}\n {% do adapter.drop_relation(relation) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3529952, "supported_languages": null}, "macro.dbt_bigquery.bigquery_options": {"name": "bigquery_options", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/options.sql", "original_file_path": "macros/relations/options.sql", "unique_id": "macro.dbt_bigquery.bigquery_options", "macro_sql": "{% macro bigquery_options(opts) %}\n {% set options -%}\n OPTIONS({% for opt_key, opt_val in opts.items() %}\n {{ opt_key }}={{ opt_val }}{{ \",\" if not loop.last }}\n {% endfor %})\n {%- endset %}\n {%- do return(options) -%}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3532548, "supported_languages": null}, "macro.dbt_bigquery.cluster_by": {"name": "cluster_by", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/cluster.sql", "original_file_path": "macros/relations/cluster.sql", "unique_id": "macro.dbt_bigquery.cluster_by", "macro_sql": "{% macro cluster_by(raw_cluster_by) %}\n {%- if raw_cluster_by is not none -%}\n cluster by {% if raw_cluster_by is string -%}\n {% set raw_cluster_by = [raw_cluster_by] %}\n {%- endif -%}\n {%- for cluster in raw_cluster_by -%}\n {{ cluster }}\n {%- if not loop.last -%}, {% endif -%}\n {%- endfor -%}\n\n {% endif %}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.353539, "supported_languages": null}, "macro.dbt_bigquery.bigquery__rename_relation": {"name": "bigquery__rename_relation", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt_bigquery.bigquery__rename_relation", "macro_sql": "{% macro bigquery__rename_relation(from_relation, to_relation) -%}\n {% do adapter.rename_relation(from_relation, to_relation) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.353656, "supported_languages": null}, "macro.dbt_bigquery.partition_by": {"name": "partition_by", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/partition.sql", "original_file_path": "macros/relations/partition.sql", "unique_id": "macro.dbt_bigquery.partition_by", "macro_sql": "{% macro partition_by(partition_config) -%}\n {%- if partition_config is none -%}\n {% do return('') %}\n {%- elif partition_config.time_ingestion_partitioning -%}\n partition by {{ partition_config.render_wrapped() }}\n {%- elif partition_config.data_type | lower in ('date','timestamp','datetime') -%}\n partition by {{ partition_config.render() }}\n {%- elif partition_config.data_type | lower in ('int64') -%}\n {%- set range = partition_config.range -%}\n partition by range_bucket(\n {{ partition_config.field }},\n generate_array({{ range.start}}, {{ range.end }}, {{ range.interval }})\n )\n {%- endif -%}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3541481, "supported_languages": null}, "macro.dbt_bigquery.bigquery__drop_materialized_view": {"name": "bigquery__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_bigquery.bigquery__drop_materialized_view", "macro_sql": "{% macro bigquery__drop_materialized_view(relation) %}\n drop materialized view if exists {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.354238, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_replace_materialized_view_as_sql": {"name": "bigquery__get_replace_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_replace_materialized_view_as_sql", "macro_sql": "{% macro bigquery__get_replace_materialized_view_as_sql(relation, sql) %}\n\n {%- set materialized_view = adapter.Relation.materialized_view_from_model_node(config.model) -%}\n\n create or replace materialized view if not exists {{ relation }}\n {% if materialized_view.partition %}{{ partition_by(materialized_view.partition) }}{% endif %}\n {% if materialized_view.cluster %}{{ cluster_by(materialized_view.cluster.fields) }}{% endif %}\n {{ bigquery_options(materialized_view.options.as_ddl_dict()) }}\n as {{ sql }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.partition_by", "macro.dbt_bigquery.cluster_by", "macro.dbt_bigquery.bigquery_options"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3546839, "supported_languages": null}, "macro.dbt_bigquery.bigquery__refresh_materialized_view": {"name": "bigquery__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_bigquery.bigquery__refresh_materialized_view", "macro_sql": "{% macro bigquery__refresh_materialized_view(relation) %}\n call bq.refresh_materialized_view('{{ relation.database }}.{{ relation.schema }}.{{ relation.identifier }}')\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3548129, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_alter_materialized_view_as_sql": {"name": "bigquery__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_alter_materialized_view_as_sql", "macro_sql": "{% macro bigquery__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n {% if configuration_changes.requires_full_refresh %}\n {{ get_replace_sql(existing_relation, relation, sql) }}\n {% else %}\n\n alter materialized view {{ relation }}\n set {{ bigquery_options(configuration_changes.options.context.as_ddl_dict()) }}\n\n {%- endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_bigquery.bigquery_options"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.355167, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_materialized_view_configuration_changes": {"name": "bigquery__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_materialized_view_configuration_changes", "macro_sql": "{% macro bigquery__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = adapter.describe_relation(existing_relation) %}\n {% set _configuration_changes = existing_relation.materialized_view_config_changeset(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.355338, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_create_materialized_view_as_sql": {"name": "bigquery__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_create_materialized_view_as_sql", "macro_sql": "{% macro bigquery__get_create_materialized_view_as_sql(relation, sql) %}\n\n {%- set materialized_view = adapter.Relation.materialized_view_from_model_node(config.model) -%}\n\n create materialized view if not exists {{ relation }}\n {% if materialized_view.partition %}{{ partition_by(materialized_view.partition) }}{% endif %}\n {% if materialized_view.cluster %}{{ cluster_by(materialized_view.cluster.fields) }}{% endif %}\n {{ bigquery_options(materialized_view.options.as_ddl_dict()) }}\n as {{ sql }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.partition_by", "macro.dbt_bigquery.cluster_by", "macro.dbt_bigquery.bigquery_options"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3557508, "supported_languages": null}, "macro.dbt_bigquery.bigquery__drop_table": {"name": "bigquery__drop_table", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_bigquery.bigquery__drop_table", "macro_sql": "{% macro bigquery__drop_table(relation) %}\n drop table if exists {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3558452, "supported_languages": null}, "macro.dbt_bigquery.bigquery_table_options": {"name": "bigquery_table_options", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/table/options.sql", "original_file_path": "macros/relations/table/options.sql", "unique_id": "macro.dbt_bigquery.bigquery_table_options", "macro_sql": "{% macro bigquery_table_options(config, node, temporary) %}\n {% set opts = adapter.get_table_options(config, node, temporary) %}\n {%- do return(bigquery_options(opts)) -%}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery_options"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.356052, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_rename_table_sql": {"name": "bigquery__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_rename_table_sql", "macro_sql": "{%- macro bigquery__get_rename_table_sql(relation, new_name) -%}\n alter table {{ relation }} rename to {{ new_name }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3561668, "supported_languages": null}, "macro.dbt_bigquery.bigquery__drop_view": {"name": "bigquery__drop_view", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_bigquery.bigquery__drop_view", "macro_sql": "{% macro bigquery__drop_view(relation) %}\n drop view if exists {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3562582, "supported_languages": null}, "macro.dbt_bigquery.bigquery__create_or_replace_view": {"name": "bigquery__create_or_replace_view", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_bigquery.bigquery__create_or_replace_view", "macro_sql": "{% macro bigquery__create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and not old_relation.is_view -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt_bigquery.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3573818, "supported_languages": null}, "macro.dbt_bigquery.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_bigquery.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3575091, "supported_languages": null}, "macro.dbt_bigquery.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_bigquery.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.357654, "supported_languages": null}, "macro.dbt_bigquery.bigquery_view_options": {"name": "bigquery_view_options", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/view/options.sql", "original_file_path": "macros/relations/view/options.sql", "unique_id": "macro.dbt_bigquery.bigquery_view_options", "macro_sql": "{% macro bigquery_view_options(config, node) %}\n {% set opts = adapter.get_view_options(config, node) %}\n {%- do return(bigquery_options(opts)) -%}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery_options"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3578382, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_rename_view_sql": {"name": "bigquery__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_rename_view_sql", "macro_sql": "{%- macro bigquery__get_rename_view_sql(relation, new_name) -%}\n alter view {{ relation }} rename to {{ new_name }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3579562, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_catalog_relations": {"name": "bigquery__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/catalog/by_relation.sql", "original_file_path": "macros/catalog/by_relation.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_catalog_relations", "macro_sql": "{% macro bigquery__get_catalog_relations(information_schema, relations) -%}\n\n {%- if (relations | length) == 0 -%}\n {# Hopefully nothing cares about the columns we return when there are no rows #}\n {%- set query = \"select 1 as id limit 0\" -%}\n\n {%- else -%}\n {%- set query -%}\n with\n table_shards_stage as ({{ _bigquery__get_table_shards_sql(information_schema) }}),\n table_shards as (\n select * from table_shards_stage\n where (\n {%- for relation in relations -%}\n (\n upper(table_schema) = upper('{{ relation.schema }}')\n and upper(table_name) = upper('{{ relation.identifier }}')\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n ),\n tables as ({{ _bigquery__get_tables_sql() }}),\n table_stats as ({{ _bigquery__get_table_stats_sql() }}),\n\n columns as ({{ _bigquery__get_columns_sql(information_schema) }}),\n column_stats as ({{ _bigquery__get_column_stats_sql() }})\n\n {{ _bigquery__get_extended_catalog_sql() }}\n {%- endset -%}\n\n {%- endif -%}\n\n {{ return(run_query(query)) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery._bigquery__get_table_shards_sql", "macro.dbt_bigquery._bigquery__get_tables_sql", "macro.dbt_bigquery._bigquery__get_table_stats_sql", "macro.dbt_bigquery._bigquery__get_columns_sql", "macro.dbt_bigquery._bigquery__get_column_stats_sql", "macro.dbt_bigquery._bigquery__get_extended_catalog_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3587341, "supported_languages": null}, "macro.dbt_bigquery._bigquery__get_table_shards_sql": {"name": "_bigquery__get_table_shards_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/catalog/catalog.sql", "original_file_path": "macros/catalog/catalog.sql", "unique_id": "macro.dbt_bigquery._bigquery__get_table_shards_sql", "macro_sql": "{% macro _bigquery__get_table_shards_sql(information_schema) %}\n select\n tables.project_id as table_catalog,\n tables.dataset_id as table_schema,\n coalesce(REGEXP_EXTRACT(tables.table_id, '^(.+)[0-9]{8}$'), tables.table_id) as table_name,\n tables.table_id as shard_name,\n REGEXP_EXTRACT(tables.table_id, '^.+([0-9]{8})$') as shard_index,\n REGEXP_CONTAINS(tables.table_id, '^.+[0-9]{8}$') and tables.type = 1 as is_date_shard,\n case\n when materialized_views.table_name is not null then 'materialized view'\n when tables.type = 1 then 'table'\n when tables.type = 2 then 'view'\n else 'external'\n end as table_type,\n tables.type = 1 as is_table,\n JSON_VALUE(table_description.option_value) as table_comment,\n tables.size_bytes,\n tables.row_count\n from {{ information_schema.replace(information_schema_view='__TABLES__') }} tables\n left join {{ information_schema.replace(information_schema_view='MATERIALIZED_VIEWS') }} materialized_views\n on materialized_views.table_catalog = tables.project_id\n and materialized_views.table_schema = tables.dataset_id\n and materialized_views.table_name = tables.table_id\n left join {{ information_schema.replace(information_schema_view='TABLE_OPTIONS') }} table_description\n on table_description.table_catalog = tables.project_id\n and table_description.table_schema = tables.dataset_id\n and table_description.table_name = tables.table_id\n and table_description.option_name = 'description'\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.360476, "supported_languages": null}, "macro.dbt_bigquery._bigquery__get_tables_sql": {"name": "_bigquery__get_tables_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/catalog/catalog.sql", "original_file_path": "macros/catalog/catalog.sql", "unique_id": "macro.dbt_bigquery._bigquery__get_tables_sql", "macro_sql": "{% macro _bigquery__get_tables_sql() %}\n select distinct\n table_catalog,\n table_schema,\n table_name,\n is_date_shard,\n table_type,\n is_table,\n table_comment\n from table_shards\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.360533, "supported_languages": null}, "macro.dbt_bigquery._bigquery__get_table_stats_sql": {"name": "_bigquery__get_table_stats_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/catalog/catalog.sql", "original_file_path": "macros/catalog/catalog.sql", "unique_id": "macro.dbt_bigquery._bigquery__get_table_stats_sql", "macro_sql": "{% macro _bigquery__get_table_stats_sql() %}\n select\n table_catalog,\n table_schema,\n table_name,\n max(shard_name) as latest_shard_name,\n min(shard_index) as shard_min,\n max(shard_index) as shard_max,\n count(shard_index) as shard_count,\n sum(size_bytes) as size_bytes,\n sum(row_count) as row_count\n from table_shards\n group by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3605912, "supported_languages": null}, "macro.dbt_bigquery._bigquery__get_columns_sql": {"name": "_bigquery__get_columns_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/catalog/catalog.sql", "original_file_path": "macros/catalog/catalog.sql", "unique_id": "macro.dbt_bigquery._bigquery__get_columns_sql", "macro_sql": "{% macro _bigquery__get_columns_sql(information_schema) %}\n select\n columns.table_catalog,\n columns.table_schema,\n columns.table_name as shard_name,\n coalesce(paths.field_path, '') as column_name,\n -- invent a row number to account for nested fields\n -- BQ does not treat these nested properties as independent fields\n row_number() over (\n partition by\n columns.table_catalog,\n columns.table_schema,\n columns.table_name\n order by\n columns.ordinal_position,\n paths.field_path\n ) as column_index,\n coalesce(paths.data_type, '') as column_type,\n paths.description as column_comment,\n case when columns.is_partitioning_column = 'YES' then 1 else 0 end as is_partitioning_column,\n case when columns.is_partitioning_column = 'YES' then paths.field_path end as partition_column,\n case when columns.clustering_ordinal_position is not null then 1 else 0 end as is_clustering_column,\n case when columns.clustering_ordinal_position is not null then paths.field_path end as cluster_column,\n columns.clustering_ordinal_position\n from {{ information_schema.replace(information_schema_view='COLUMNS') }} columns\n join {{ information_schema.replace(information_schema_view='COLUMN_FIELD_PATHS') }} paths\n on paths.table_catalog = columns.table_catalog\n and paths.table_schema = columns.table_schema\n and paths.table_name = columns.table_name\n and paths.column_name = columns.column_name\n where columns.ordinal_position is not null\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.360769, "supported_languages": null}, "macro.dbt_bigquery._bigquery__get_column_stats_sql": {"name": "_bigquery__get_column_stats_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/catalog/catalog.sql", "original_file_path": "macros/catalog/catalog.sql", "unique_id": "macro.dbt_bigquery._bigquery__get_column_stats_sql", "macro_sql": "{% macro _bigquery__get_column_stats_sql() %}\n select\n table_catalog,\n table_schema,\n shard_name,\n max(is_partitioning_column) = 1 as is_partitioned,\n max(partition_column) as partition_column,\n max(is_clustering_column) = 1 as is_clustered,\n array_to_string(\n array_agg(\n cluster_column ignore nulls\n order by clustering_ordinal_position\n ), ', '\n ) as clustering_columns\n from columns\n group by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3608441, "supported_languages": null}, "macro.dbt_bigquery._bigquery__get_extended_catalog_sql": {"name": "_bigquery__get_extended_catalog_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/catalog/catalog.sql", "original_file_path": "macros/catalog/catalog.sql", "unique_id": "macro.dbt_bigquery._bigquery__get_extended_catalog_sql", "macro_sql": "{% macro _bigquery__get_extended_catalog_sql() %}\n select\n tables.table_catalog as table_database,\n tables.table_schema,\n case\n when tables.is_date_shard then concat(tables.table_name, '*')\n else tables.table_name\n end as table_name,\n tables.table_type,\n tables.table_comment,\n -- coalesce column metadata fields to ensure they are non-null for catalog generation\n -- external table columns are not present in COLUMN_FIELD_PATHS\n coalesce(columns.column_name, '') as column_name,\n coalesce(columns.column_index, 1) as column_index,\n coalesce(columns.column_type, '') as column_type,\n coalesce(columns.column_comment, '') as column_comment,\n\n 'Shard count' as `stats__date_shards__label`,\n table_stats.shard_count as `stats__date_shards__value`,\n 'The number of date shards in this table' as `stats__date_shards__description`,\n tables.is_date_shard as `stats__date_shards__include`,\n\n 'Shard (min)' as `stats__date_shard_min__label`,\n table_stats.shard_min as `stats__date_shard_min__value`,\n 'The first date shard in this table' as `stats__date_shard_min__description`,\n tables.is_date_shard as `stats__date_shard_min__include`,\n\n 'Shard (max)' as `stats__date_shard_max__label`,\n table_stats.shard_max as `stats__date_shard_max__value`,\n 'The last date shard in this table' as `stats__date_shard_max__description`,\n tables.is_date_shard as `stats__date_shard_max__include`,\n\n '# Rows' as `stats__num_rows__label`,\n table_stats.row_count as `stats__num_rows__value`,\n 'Approximate count of rows in this table' as `stats__num_rows__description`,\n tables.is_table as `stats__num_rows__include`,\n\n 'Approximate Size' as `stats__num_bytes__label`,\n table_stats.size_bytes as `stats__num_bytes__value`,\n 'Approximate size of table as reported by BigQuery' as `stats__num_bytes__description`,\n tables.is_table as `stats__num_bytes__include`,\n\n 'Partitioned By' as `stats__partitioning_type__label`,\n column_stats.partition_column as `stats__partitioning_type__value`,\n 'The partitioning column for this table' as `stats__partitioning_type__description`,\n column_stats.is_partitioned as `stats__partitioning_type__include`,\n\n 'Clustered By' as `stats__clustering_fields__label`,\n column_stats.clustering_columns as `stats__clustering_fields__value`,\n 'The clustering columns for this table' as `stats__clustering_fields__description`,\n column_stats.is_clustered as `stats__clustering_fields__include`\n\n from tables\n join table_stats\n on table_stats.table_catalog = tables.table_catalog\n and table_stats.table_schema = tables.table_schema\n and table_stats.table_name = tables.table_name\n left join column_stats\n on column_stats.table_catalog = tables.table_catalog\n and column_stats.table_schema = tables.table_schema\n and column_stats.shard_name = table_stats.latest_shard_name\n left join columns\n on columns.table_catalog = tables.table_catalog\n and columns.table_schema = tables.table_schema\n and columns.shard_name = table_stats.latest_shard_name\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.361038, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_catalog": {"name": "bigquery__get_catalog", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/catalog/by_schema.sql", "original_file_path": "macros/catalog/by_schema.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_catalog", "macro_sql": "{% macro bigquery__get_catalog(information_schema, schemas) -%}\n\n {%- if (schemas | length) == 0 -%}\n {# Hopefully nothing cares about the columns we return when there are no rows #}\n {%- set query = \"select 1 as id limit 0\" -%}\n\n {%- else -%}\n {%- set query -%}\n with\n table_shards as (\n {{ _bigquery__get_table_shards_sql(information_schema) }}\n where (\n {%- for schema in schemas -%}\n upper(tables.dataset_id) = upper('{{ schema }}')\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n ),\n tables as ({{ _bigquery__get_tables_sql() }}),\n table_stats as ({{ _bigquery__get_table_stats_sql() }}),\n\n columns as ({{ _bigquery__get_columns_sql(information_schema) }}),\n column_stats as ({{ _bigquery__get_column_stats_sql() }})\n\n {{ _bigquery__get_extended_catalog_sql() }}\n {%- endset -%}\n\n {%- endif -%}\n\n {{ return(run_query(query)) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery._bigquery__get_table_shards_sql", "macro.dbt_bigquery._bigquery__get_tables_sql", "macro.dbt_bigquery._bigquery__get_table_stats_sql", "macro.dbt_bigquery._bigquery__get_columns_sql", "macro.dbt_bigquery._bigquery__get_column_stats_sql", "macro.dbt_bigquery._bigquery__get_extended_catalog_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3617122, "supported_languages": null}, "macro.dbt_bigquery.bigquery__except": {"name": "bigquery__except", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt_bigquery.bigquery__except", "macro_sql": "{% macro bigquery__except() %}\n\n except distinct\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3617768, "supported_languages": null}, "macro.dbt_bigquery.bigquery__dateadd": {"name": "bigquery__dateadd", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_bigquery.bigquery__dateadd", "macro_sql": "{% macro bigquery__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n datetime_add(\n cast( {{ from_date_or_timestamp }} as datetime),\n interval {{ interval }} {{ datepart }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.361909, "supported_languages": null}, "macro.dbt_bigquery.bigquery__current_timestamp": {"name": "bigquery__current_timestamp", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/timestamps.sql", "original_file_path": "macros/utils/timestamps.sql", "unique_id": "macro.dbt_bigquery.bigquery__current_timestamp", "macro_sql": "{% macro bigquery__current_timestamp() -%}\n current_timestamp()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.36201, "supported_languages": null}, "macro.dbt_bigquery.bigquery__snapshot_string_as_time": {"name": "bigquery__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/timestamps.sql", "original_file_path": "macros/utils/timestamps.sql", "unique_id": "macro.dbt_bigquery.bigquery__snapshot_string_as_time", "macro_sql": "{% macro bigquery__snapshot_string_as_time(timestamp) -%}\n {%- set result = 'TIMESTAMP(\"' ~ timestamp ~ '\")' -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3621151, "supported_languages": null}, "macro.dbt_bigquery.bigquery__current_timestamp_backcompat": {"name": "bigquery__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/timestamps.sql", "original_file_path": "macros/utils/timestamps.sql", "unique_id": "macro.dbt_bigquery.bigquery__current_timestamp_backcompat", "macro_sql": "{% macro bigquery__current_timestamp_backcompat() -%}\n current_timestamp\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3621602, "supported_languages": null}, "macro.dbt_bigquery.bigquery__intersect": {"name": "bigquery__intersect", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt_bigquery.bigquery__intersect", "macro_sql": "{% macro bigquery__intersect() %}\n\n intersect distinct\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3622231, "supported_languages": null}, "macro.dbt_bigquery.bigquery__escape_single_quotes": {"name": "bigquery__escape_single_quotes", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt_bigquery.bigquery__escape_single_quotes", "macro_sql": "{% macro bigquery__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\", \"\\\\'\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3624709, "supported_languages": null}, "macro.dbt_bigquery.bigquery__format_column": {"name": "bigquery__format_column", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/get_columns_spec_ddl.sql", "original_file_path": "macros/utils/get_columns_spec_ddl.sql", "unique_id": "macro.dbt_bigquery.bigquery__format_column", "macro_sql": "{% macro bigquery__format_column(column) -%}\n {% set data_type = column.data_type %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.363008, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_empty_schema_sql": {"name": "bigquery__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/get_columns_spec_ddl.sql", "original_file_path": "macros/utils/get_columns_spec_ddl.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_empty_schema_sql", "macro_sql": "{% macro bigquery__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {% for col in columns.values() %}\n {%- if col['data_type'] is not defined -%}\n {{ col_err.append(col['name']) }}\n {%- endif -%}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- endif -%}\n\n {%- set columns = adapter.nest_column_data_types(columns) -%}\n {{ return(dbt.default__get_empty_schema_sql(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3633778, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_select_subquery": {"name": "bigquery__get_select_subquery", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/get_columns_spec_ddl.sql", "original_file_path": "macros/utils/get_columns_spec_ddl.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_select_subquery", "macro_sql": "{% macro bigquery__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3635042, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_column_names": {"name": "bigquery__get_column_names", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/get_columns_spec_ddl.sql", "original_file_path": "macros/utils/get_columns_spec_ddl.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_column_names", "macro_sql": "{% macro bigquery__get_column_names() %}\n {#- loop through nested user_provided_columns to get column names -#}\n {%- set user_provided_columns = adapter.nest_column_data_types(model['columns']) -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.363845, "supported_languages": null}, "macro.dbt_bigquery.bigquery__right": {"name": "bigquery__right", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt_bigquery.bigquery__right", "macro_sql": "{% macro bigquery__right(string_text, length_expression) %}\n\n case when {{ length_expression }} = 0\n then ''\n else\n substr(\n {{ string_text }},\n -1 * ({{ length_expression }})\n )\n end\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.364012, "supported_languages": null}, "macro.dbt_bigquery.bigquery__listagg": {"name": "bigquery__listagg", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_bigquery.bigquery__listagg", "macro_sql": "{% macro bigquery__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n {% if limit_num -%}\n limit {{ limit_num }}\n {%- endif %}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3642821, "supported_languages": null}, "macro.dbt_bigquery.bigquery__datediff": {"name": "bigquery__datediff", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_bigquery.bigquery__datediff", "macro_sql": "{% macro bigquery__datediff(first_date, second_date, datepart) -%}\n\n {% if dbt_version[0] == 1 and dbt_version[2] >= 2 %}\n {{ return(dbt.datediff(first_date, second_date, datepart)) }}\n {% else %}\n\n datetime_diff(\n cast({{second_date}} as datetime),\n cast({{first_date}} as datetime),\n {{datepart}}\n )\n\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3646138, "supported_languages": null}, "macro.dbt_bigquery.bigquery__safe_cast": {"name": "bigquery__safe_cast", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt_bigquery.bigquery__safe_cast", "macro_sql": "{% macro bigquery__safe_cast(field, type) %}\n safe_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3647392, "supported_languages": null}, "macro.dbt_bigquery.bigquery__hash": {"name": "bigquery__hash", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt_bigquery.bigquery__hash", "macro_sql": "{% macro bigquery__hash(field) -%}\n to_hex({{dbt.default__hash(field)}})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.364844, "supported_languages": null}, "macro.dbt_bigquery.bigquery__position": {"name": "bigquery__position", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt_bigquery.bigquery__position", "macro_sql": "{% macro bigquery__position(substring_text, string_text) %}\n\n strpos(\n {{ string_text }},\n {{ substring_text }}\n\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.364961, "supported_languages": null}, "macro.dbt_bigquery.bigquery__array_concat": {"name": "bigquery__array_concat", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt_bigquery.bigquery__array_concat", "macro_sql": "{% macro bigquery__array_concat(array_1, array_2) -%}\n array_concat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.365073, "supported_languages": null}, "macro.dbt_bigquery.bigquery__bool_or": {"name": "bigquery__bool_or", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt_bigquery.bigquery__bool_or", "macro_sql": "{% macro bigquery__bool_or(expression) -%}\n\n logical_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.365161, "supported_languages": null}, "macro.dbt_bigquery.bigquery__split_part": {"name": "bigquery__split_part", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_bigquery.bigquery__split_part", "macro_sql": "{% macro bigquery__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n split(\n {{ string_text }},\n {{ delimiter_text }}\n )[safe_offset({{ part_number - 1 }})]\n {% else %}\n split(\n {{ string_text }},\n {{ delimiter_text }}\n )[safe_offset(\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 1 + {{ part_number }}\n )]\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.365531, "supported_languages": null}, "macro.dbt_bigquery.bigquery__date_trunc": {"name": "bigquery__date_trunc", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt_bigquery.bigquery__date_trunc", "macro_sql": "{% macro bigquery__date_trunc(datepart, date) -%}\n timestamp_trunc(\n cast({{date}} as timestamp),\n {{datepart}}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.365634, "supported_languages": null}, "macro.dbt_bigquery.bigquery__array_construct": {"name": "bigquery__array_construct", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt_bigquery.bigquery__array_construct", "macro_sql": "{% macro bigquery__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n [ {{ inputs|join(' , ') }} ]\n {% else %}\n ARRAY<{{data_type}}>[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.365813, "supported_languages": null}, "macro.dbt_bigquery.bigquery__array_append": {"name": "bigquery__array_append", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt_bigquery.bigquery__array_append", "macro_sql": "{% macro bigquery__array_append(array, new_element) -%}\n {{ array_concat(array, array_construct([new_element])) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.array_concat", "macro.dbt.array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.365932, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_show_grant_sql": {"name": "bigquery__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_show_grant_sql", "macro_sql": "{% macro bigquery__get_show_grant_sql(relation) %}\n {% set location = adapter.get_dataset_location(relation) %}\n {% set relation = relation.incorporate(location=location) %}\n\n select privilege_type, grantee\n from {{ relation.information_schema(\"OBJECT_PRIVILEGES\") }}\n where object_schema = \"{{ relation.dataset }}\"\n and object_name = \"{{ relation.identifier }}\"\n -- filter out current user\n and split(grantee, ':')[offset(1)] != session_user()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.366326, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_grant_sql": {"name": "bigquery__get_grant_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_grant_sql", "macro_sql": "\n\n\n{%- macro bigquery__get_grant_sql(relation, privilege, grantee) -%}\n grant `{{ privilege }}` on {{ relation.type }} {{ relation }} to {{ '\\\"' + grantee|join('\\\", \\\"') + '\\\"' }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3664708, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_revoke_sql": {"name": "bigquery__get_revoke_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_revoke_sql", "macro_sql": "{%- macro bigquery__get_revoke_sql(relation, privilege, grantee) -%}\n revoke `{{ privilege }}` on {{ relation.type }} {{ relation }} from {{ '\\\"' + grantee|join('\\\", \\\"') + '\\\"' }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.366613, "supported_languages": null}, "macro.dbt_bigquery.bigquery__get_empty_subquery_sql": {"name": "bigquery__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt_bigquery.bigquery__get_empty_subquery_sql", "macro_sql": "{% macro bigquery__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false and current_timestamp() = current_timestamp()\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.366787, "supported_languages": null}, "macro.dbt_bigquery.bigquery__resolve_model_name": {"name": "bigquery__resolve_model_name", "resource_type": "macro", "package_name": "dbt_bigquery", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt_bigquery.bigquery__resolve_model_name", "macro_sql": "{% macro bigquery__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('`', '') | replace('\"', '\\\"') }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3669279, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3675961, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.367711, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3677928, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3678741, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3679519, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.36824, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3684149, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3685918, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.368854, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.369018, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.371236, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.371337, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.371467, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.371884, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.371996, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3721101, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.373048, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.373872, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.376503, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.37668, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.376786, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.376847, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.376939, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.377011, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3771331, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.377673, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.377783, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.377931, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.378175, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.382436, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.384319, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.384647, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.384872, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3851452, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.385407, "supported_languages": null}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.388793, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.389057, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.389228, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.39029, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3904939, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3910139, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.392965, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.39487, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.396085, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.396471, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.396905, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.3970618, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.397532, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.401933, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.403091, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.403276, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4039302, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.404104, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4044979, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4049358, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.405555, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4057112, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.405837, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.406028, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.406158, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.406351, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4064791, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.406653, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.40678, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.40688, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.407064, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4103198, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.414165, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4149592, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.415735, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.416289, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.416459, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4165409, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.416734, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.416826, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.418997, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.420942, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.424428, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4249358, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4250722, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.425347, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4254599, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.425537, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4256241, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4256918, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4257832, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.425856, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.426124, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.426234, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.427006, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.427263, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4274812, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.427813, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.427968, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.428138, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4283679, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.428512, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.428948, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.429159, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.429263, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.429374, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4294882, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4299881, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.430784, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4310348, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.431179, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4313679, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4314911, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.431915, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.432158, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4322848, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.432448, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.432655, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.43281, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.433093, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.43342, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4336152, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4337332, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4338932, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.433955, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4341152, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4342022, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4343832, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.434463, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.434623, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4347079, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.43507, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.435179, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.435353, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4354382, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.435601, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4356842, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.436316, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4363892, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.43671, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.436807, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.436886, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.437597, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.437876, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.438072, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.438229, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4382899, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4384599, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.43857, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.43876, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.438855, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.439441, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4395509, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4398, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.440209, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.44049, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.440602, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.440703, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.440865, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.440928, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.441456, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4415429, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4421709, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4422872, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.442415, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.442579, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.442667, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4429162, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.443006, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.443108, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4434218, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.443636, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.443807, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4439528, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.444303, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.445153, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.445483, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4456499, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.446781, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.44755, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.447979, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.448116, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.448246, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.448292, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.448746, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.449085, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.449216, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.449431, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.449636, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.449735, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4498801, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.449953, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.450476, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.450722, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4508328, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.451209, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.451361, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.451425, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.451623, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.451721, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4518468, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.451891, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.452044, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.45214, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4523141, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.452391, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4527621, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.452998, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.453195, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4532921, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.453459, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.453542, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4536881, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.45378, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4539242, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.454015, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.454158, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.454218, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4543831, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.454467, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.454606, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.454667, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4552891, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.455394, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4554932, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.455592, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4557, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.45581, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.455918, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.456036, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4561322, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.456219, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4563122, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.456404, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4565032, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.456606, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4568682, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.457029, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4572248, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4573061, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.457695, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4579258, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4580219, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4583528, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.458462, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4585888, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4587479, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4588218, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.459041, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.459262, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4594262, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4595048, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.459727, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.459838, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.459934, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.460043, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.460381, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.460468, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4605498, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.460624, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.460717, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.460761, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.460854, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_bigquery.bigquery__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.460947, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.461458, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.461539, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4616268, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.461856, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.461966, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.462044, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4621341, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.462206, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.463475, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.46359, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.46375, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4641511, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.464291, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.464472, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.464573, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.464664, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.464799, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.46511, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4652438, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.465324, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.465569, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4657989, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4659631, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.466089, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.467128, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4671931, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.467287, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.46735, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.467548, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.467655, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.467718, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.467844, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.467957, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.468086, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.468195, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.468323, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.468776, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.468887, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.469033, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.469167, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.469808, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.470131, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.470243, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.470332, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.470737, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.470834, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.470947, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.471046, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.471199, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.471476, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.473578, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.473762, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.473888, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.474045, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.474155, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.474255, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.474357, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.474496, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.474615, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.474786, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.474893, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.474987, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.47508, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.475167, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4753711, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.475477, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.476864, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.476956, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4771378, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.477264, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.477385, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.477489, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.478144, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.478347, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.478453, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.478652, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4787788, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.479108, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.479257, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.479701, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4808052, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.480897, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.481364, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.481601, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.481934, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.482213, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4822578, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_bigquery.bigquery__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.482573, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4827101, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.482871, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4830248, "supported_languages": null}, "macro.dbt_utils.get_url_host": {"name": "get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.get_url_host", "macro_sql": "{% macro get_url_host(field) -%}\n {{ return(adapter.dispatch('get_url_host', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_host"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4832351, "supported_languages": null}, "macro.dbt_utils.default__get_url_host": {"name": "default__get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.default__get_url_host", "macro_sql": "{% macro default__get_url_host(field) -%}\n\n{%- set parsed =\n dbt.split_part(\n dbt.split_part(\n dbt.replace(\n dbt.replace(\n dbt.replace(field, \"'android-app://'\", \"''\"\n ), \"'http://'\", \"''\"\n ), \"'https://'\", \"''\"\n ), \"'/'\", 1\n ), \"'?'\", 1\n )\n\n-%}\n\n\n {{ dbt.safe_cast(\n parsed,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part", "macro.dbt.replace", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4836009, "supported_languages": null}, "macro.dbt_utils.get_url_path": {"name": "get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.get_url_path", "macro_sql": "{% macro get_url_path(field) -%}\n {{ return(adapter.dispatch('get_url_path', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_path"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.483891, "supported_languages": null}, "macro.dbt_utils.default__get_url_path": {"name": "default__get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.default__get_url_path", "macro_sql": "{% macro default__get_url_path(field) -%}\n\n {%- set stripped_url =\n dbt.replace(\n dbt.replace(field, \"'http://'\", \"''\"), \"'https://'\", \"''\")\n -%}\n\n {%- set first_slash_pos -%}\n coalesce(\n nullif({{ dbt.position(\"'/'\", stripped_url) }}, 0),\n {{ dbt.position(\"'?'\", stripped_url) }} - 1\n )\n {%- endset -%}\n\n {%- set parsed_path =\n dbt.split_part(\n dbt.right(\n stripped_url,\n dbt.length(stripped_url) ~ \"-\" ~ first_slash_pos\n ),\n \"'?'\", 1\n )\n -%}\n\n {{ dbt.safe_cast(\n parsed_path,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.position", "macro.dbt.split_part", "macro.dbt.right", "macro.dbt.length", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4843059, "supported_languages": null}, "macro.dbt_utils.get_url_parameter": {"name": "get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.get_url_parameter", "macro_sql": "{% macro get_url_parameter(field, url_parameter) -%}\n {{ return(adapter.dispatch('get_url_parameter', 'dbt_utils')(field, url_parameter)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.484492, "supported_languages": null}, "macro.dbt_utils.default__get_url_parameter": {"name": "default__get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.default__get_url_parameter", "macro_sql": "{% macro default__get_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"='\" -%}\n\n{%- set split = dbt.split_part(dbt.split_part(field, formatted_url_parameter, 2), \"'&'\", 1) -%}\n\nnullif({{ split }},'')\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.484683, "supported_languages": null}, "macro.dbt_utils.test_fewer_rows_than": {"name": "test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.test_fewer_rows_than", "macro_sql": "{% test fewer_rows_than(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_fewer_rows_than', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_fewer_rows_than"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.485354, "supported_languages": null}, "macro.dbt_utils.default__test_fewer_rows_than": {"name": "default__test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.default__test_fewer_rows_than", "macro_sql": "{% macro default__test_fewer_rows_than(model, compare_model, group_by_columns) %}\n\n{{ config(fail_calc = 'sum(coalesce(row_count_delta, 0))') }}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in equal_rowcount. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_fewer_rows_than'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_our_model \n from {{ model }}\n {{ groupby_gb_cols }}\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_comparison_model \n from {{ compare_model }}\n {{ groupby_gb_cols }}\n\n),\ncounts as (\n\n select\n\n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_our_model,\n count_comparison_model\n from a\n full join b on \n a.id_dbtutils_test_fewer_rows_than = b.id_dbtutils_test_fewer_rows_than\n {{ join_gb_cols }}\n\n),\nfinal as (\n\n select *,\n case\n -- fail the test if we have more rows than the reference model and return the row count delta\n when count_our_model > count_comparison_model then (count_our_model - count_comparison_model)\n -- fail the test if they are the same number\n when count_our_model = count_comparison_model then 1\n -- pass the test if the delta is positive (i.e. return the number 0)\n else 0\n end as row_count_delta\n from counts\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.485955, "supported_languages": null}, "macro.dbt_utils.test_equal_rowcount": {"name": "test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.test_equal_rowcount", "macro_sql": "{% test equal_rowcount(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_equal_rowcount', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equal_rowcount"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.486478, "supported_languages": null}, "macro.dbt_utils.default__test_equal_rowcount": {"name": "default__test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.default__test_equal_rowcount", "macro_sql": "{% macro default__test_equal_rowcount(model, compare_model, group_by_columns) %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = 'sum(coalesce(diff_count, 0))') }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(', ') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in fewer_rows_than. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_equal_rowcount'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_a \n from {{ model }}\n {{groupby_gb_cols}}\n\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_b \n from {{ compare_model }}\n {{groupby_gb_cols}}\n\n),\nfinal as (\n\n select\n \n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_a,\n count_b,\n abs(count_a - count_b) as diff_count\n\n from a\n full join b\n on\n a.id_dbtutils_test_equal_rowcount = b.id_dbtutils_test_equal_rowcount\n {{join_gb_cols}}\n\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4871008, "supported_languages": null}, "macro.dbt_utils.test_relationships_where": {"name": "test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.test_relationships_where", "macro_sql": "{% test relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n {{ return(adapter.dispatch('test_relationships_where', 'dbt_utils')(model, column_name, to, field, from_condition, to_condition)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_relationships_where"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.487519, "supported_languages": null}, "macro.dbt_utils.default__test_relationships_where": {"name": "default__test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.default__test_relationships_where", "macro_sql": "{% macro default__test_relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n\n{# T-SQL has no boolean data type so we use 1=1 which returns TRUE #}\n{# ref https://stackoverflow.com/a/7170753/3842610 #}\n\nwith left_table as (\n\n select\n {{column_name}} as id\n\n from {{model}}\n\n where {{column_name}} is not null\n and {{from_condition}}\n\n),\n\nright_table as (\n\n select\n {{field}} as id\n\n from {{to}}\n\n where {{field}} is not null\n and {{to_condition}}\n\n),\n\nexceptions as (\n\n select\n left_table.id,\n right_table.id as right_id\n\n from left_table\n\n left join right_table\n on left_table.id = right_table.id\n\n where right_table.id is null\n\n)\n\nselect * from exceptions\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4877498, "supported_languages": null}, "macro.dbt_utils.test_recency": {"name": "test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.test_recency", "macro_sql": "{% test recency(model, field, datepart, interval, ignore_time_component=False, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_recency', 'dbt_utils')(model, field, datepart, interval, ignore_time_component, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_recency"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4882078, "supported_languages": null}, "macro.dbt_utils.default__test_recency": {"name": "default__test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.default__test_recency", "macro_sql": "{% macro default__test_recency(model, field, datepart, interval, ignore_time_component, group_by_columns) %}\n\n{% set threshold = 'cast(' ~ dbt.dateadd(datepart, interval * -1, dbt.current_timestamp()) ~ ' as ' ~ ('date' if ignore_time_component else dbt.type_timestamp()) ~ ')' %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nwith recency as (\n\n select \n\n {{ select_gb_cols }}\n {% if ignore_time_component %}\n cast(max({{ field }}) as date) as most_recent\n {%- else %}\n max({{ field }}) as most_recent\n {%- endif %}\n\n from {{ model }}\n\n {{ groupby_gb_cols }}\n\n)\n\nselect\n\n {{ select_gb_cols }}\n most_recent,\n {{ threshold }} as threshold\n\nfrom recency\nwhere most_recent < {{ threshold }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.current_timestamp", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4886909, "supported_languages": null}, "macro.dbt_utils.test_not_constant": {"name": "test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.test_not_constant", "macro_sql": "{% test not_constant(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_constant', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_constant"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.488972, "supported_languages": null}, "macro.dbt_utils.default__test_not_constant": {"name": "default__test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.default__test_not_constant", "macro_sql": "{% macro default__test_not_constant(model, column_name, group_by_columns) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nselect\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count(distinct {{ column_name }}) as filler_column\n\nfrom {{ model }}\n\n {{groupby_gb_cols}}\n\nhaving count(distinct {{ column_name }}) = 1\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.489253, "supported_languages": null}, "macro.dbt_utils.test_accepted_range": {"name": "test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.test_accepted_range", "macro_sql": "{% test accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n {{ return(adapter.dispatch('test_accepted_range', 'dbt_utils')(model, column_name, min_value, max_value, inclusive)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_accepted_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.489644, "supported_languages": null}, "macro.dbt_utils.default__test_accepted_range": {"name": "default__test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.default__test_accepted_range", "macro_sql": "{% macro default__test_accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n\nwith meet_condition as(\n select *\n from {{ model }}\n),\n\nvalidation_errors as (\n select *\n from meet_condition\n where\n -- never true, defaults to an empty result set. Exists to ensure any combo of the `or` clauses below succeeds\n 1 = 2\n\n {%- if min_value is not none %}\n -- records with a value >= min_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} > {{- \"=\" if inclusive }} {{ min_value }}\n {%- endif %}\n\n {%- if max_value is not none %}\n -- records with a value <= max_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} < {{- \"=\" if inclusive }} {{ max_value }}\n {%- endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.489923, "supported_languages": null}, "macro.dbt_utils.test_not_accepted_values": {"name": "test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.test_not_accepted_values", "macro_sql": "{% test not_accepted_values(model, column_name, values, quote=True) %}\n {{ return(adapter.dispatch('test_not_accepted_values', 'dbt_utils')(model, column_name, values, quote)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.490241, "supported_languages": null}, "macro.dbt_utils.default__test_not_accepted_values": {"name": "default__test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.default__test_not_accepted_values", "macro_sql": "{% macro default__test_not_accepted_values(model, column_name, values, quote=True) %}\nwith all_values as (\n\n select distinct\n {{ column_name }} as value_field\n\n from {{ model }}\n\n),\n\nvalidation_errors as (\n\n select\n value_field\n\n from all_values\n where value_field in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n )\n\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.490466, "supported_languages": null}, "macro.dbt_utils.test_at_least_one": {"name": "test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.test_at_least_one", "macro_sql": "{% test at_least_one(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_at_least_one', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_at_least_one"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4908679, "supported_languages": null}, "macro.dbt_utils.default__test_at_least_one": {"name": "default__test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.default__test_at_least_one", "macro_sql": "{% macro default__test_at_least_one(model, column_name, group_by_columns) %}\n\n{% set pruned_cols = [column_name] %}\n\n{% if group_by_columns|length() > 0 %}\n\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n {% set pruned_cols = group_by_columns %}\n\n {% if column_name not in pruned_cols %}\n {% do pruned_cols.append(column_name) %}\n {% endif %}\n\n{% endif %}\n\n{% set select_pruned_cols = pruned_cols|join(' ,') %}\n\nselect *\nfrom (\n with pruned_rows as (\n select\n {{ select_pruned_cols }}\n from {{ model }}\n where {{ column_name }} is not null\n limit 1\n )\n select\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count({{ column_name }}) as filler_column\n\n from pruned_rows\n\n {{groupby_gb_cols}}\n\n having count({{ column_name }}) = 0\n\n) validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4913201, "supported_languages": null}, "macro.dbt_utils.test_unique_combination_of_columns": {"name": "test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.test_unique_combination_of_columns", "macro_sql": "{% test unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n {{ return(adapter.dispatch('test_unique_combination_of_columns', 'dbt_utils')(model, combination_of_columns, quote_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_unique_combination_of_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4916968, "supported_languages": null}, "macro.dbt_utils.default__test_unique_combination_of_columns": {"name": "default__test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.default__test_unique_combination_of_columns", "macro_sql": "{% macro default__test_unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n\n{% if not quote_columns %}\n {%- set column_list=combination_of_columns %}\n{% elif quote_columns %}\n {%- set column_list=[] %}\n {% for column in combination_of_columns -%}\n {% set column_list = column_list.append( adapter.quote(column) ) %}\n {%- endfor %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`quote_columns` argument for unique_combination_of_columns test must be one of [True, False] Got: '\" ~ quote ~\"'.'\"\n ) }}\n{% endif %}\n\n{%- set columns_csv=column_list | join(', ') %}\n\n\nwith validation_errors as (\n\n select\n {{ columns_csv }}\n from {{ model }}\n group by {{ columns_csv }}\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4920719, "supported_languages": null}, "macro.dbt_utils.test_cardinality_equality": {"name": "test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.test_cardinality_equality", "macro_sql": "{% test cardinality_equality(model, column_name, to, field) %}\n {{ return(adapter.dispatch('test_cardinality_equality', 'dbt_utils')(model, column_name, to, field)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_cardinality_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4924202, "supported_languages": null}, "macro.dbt_utils.default__test_cardinality_equality": {"name": "default__test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.default__test_cardinality_equality", "macro_sql": "{% macro default__test_cardinality_equality(model, column_name, to, field) %}\n\n{# T-SQL does not let you use numbers as aliases for columns #}\n{# Thus, no \"GROUP BY 1\" #}\n\nwith table_a as (\nselect\n {{ column_name }},\n count(*) as num_rows\nfrom {{ model }}\ngroup by {{ column_name }}\n),\n\ntable_b as (\nselect\n {{ field }},\n count(*) as num_rows\nfrom {{ to }}\ngroup by {{ field }}\n),\n\nexcept_a as (\n select *\n from table_a\n {{ dbt.except() }}\n select *\n from table_b\n),\n\nexcept_b as (\n select *\n from table_b\n {{ dbt.except() }}\n select *\n from table_a\n),\n\nunioned as (\n select *\n from except_a\n union all\n select *\n from except_b\n)\n\nselect *\nfrom unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4926379, "supported_languages": null}, "macro.dbt_utils.test_expression_is_true": {"name": "test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.test_expression_is_true", "macro_sql": "{% test expression_is_true(model, expression, column_name=None) %}\n {{ return(adapter.dispatch('test_expression_is_true', 'dbt_utils')(model, expression, column_name)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_expression_is_true"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.492887, "supported_languages": null}, "macro.dbt_utils.default__test_expression_is_true": {"name": "default__test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.default__test_expression_is_true", "macro_sql": "{% macro default__test_expression_is_true(model, expression, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else \"1\" %}\n\nselect\n {{ column_list }}\nfrom {{ model }}\n{% if column_name is none %}\nwhere not({{ expression }})\n{%- else %}\nwhere not({{ column_name }} {{ expression }})\n{%- endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4931798, "supported_languages": null}, "macro.dbt_utils.test_not_null_proportion": {"name": "test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.test_not_null_proportion", "macro_sql": "{% macro test_not_null_proportion(model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_null_proportion', 'dbt_utils')(model, group_by_columns, **kwargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_null_proportion"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4935832, "supported_languages": null}, "macro.dbt_utils.default__test_not_null_proportion": {"name": "default__test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.default__test_not_null_proportion", "macro_sql": "{% macro default__test_not_null_proportion(model, group_by_columns) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('arg')) %}\n{% set at_least = kwargs.get('at_least', kwargs.get('arg')) %}\n{% set at_most = kwargs.get('at_most', kwargs.get('arg', 1)) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith validation as (\n select\n {{select_gb_cols}}\n sum(case when {{ column_name }} is null then 0 else 1 end) / cast(count(*) as numeric) as not_null_proportion\n from {{ model }}\n {{groupby_gb_cols}}\n),\nvalidation_errors as (\n select\n {{select_gb_cols}}\n not_null_proportion\n from validation\n where not_null_proportion < {{ at_least }} or not_null_proportion > {{ at_most }}\n)\nselect\n *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.494051, "supported_languages": null}, "macro.dbt_utils.test_sequential_values": {"name": "test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.test_sequential_values", "macro_sql": "{% test sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n {{ return(adapter.dispatch('test_sequential_values', 'dbt_utils')(model, column_name, interval, datepart, group_by_columns)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_sequential_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.494601, "supported_languages": null}, "macro.dbt_utils.default__test_sequential_values": {"name": "default__test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.default__test_sequential_values", "macro_sql": "{% macro default__test_sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n{% set previous_column_name = \"previous_\" ~ dbt_utils.slugify(column_name) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(',') + ', ' %}\n {% set partition_gb_cols = 'partition by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith windowed as (\n\n select\n {{ select_gb_cols }}\n {{ column_name }},\n lag({{ column_name }}) over (\n {{partition_gb_cols}}\n order by {{ column_name }}\n ) as {{ previous_column_name }}\n from {{ model }}\n),\n\nvalidation_errors as (\n select\n *\n from windowed\n {% if datepart %}\n where not(cast({{ column_name }} as {{ dbt.type_timestamp() }})= cast({{ dbt.dateadd(datepart, interval, previous_column_name) }} as {{ dbt.type_timestamp() }}))\n {% else %}\n where not({{ column_name }} = {{ previous_column_name }} + {{ interval }})\n {% endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.slugify", "macro.dbt.type_timestamp", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.495127, "supported_languages": null}, "macro.dbt_utils.test_equality": {"name": "test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.test_equality", "macro_sql": "{% test equality(model, compare_model, compare_columns=None) %}\n {{ return(adapter.dispatch('test_equality', 'dbt_utils')(model, compare_model, compare_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.495608, "supported_languages": null}, "macro.dbt_utils.default__test_equality": {"name": "default__test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.default__test_equality", "macro_sql": "{% macro default__test_equality(model, compare_model, compare_columns=None) %}\n\n{% set set_diff %}\n count(*) + coalesce(abs(\n sum(case when which_diff = 'a_minus_b' then 1 else 0 end) -\n sum(case when which_diff = 'b_minus_a' then 1 else 0 end)\n ), 0)\n{% endset %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = set_diff) }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n-- setup\n{%- do dbt_utils._is_relation(model, 'test_equality') -%}\n\n{#-\nIf the compare_cols arg is provided, we can run this test without querying the\ninformation schema\u00a0\u2014 this allows the model to be an ephemeral model\n-#}\n\n{%- if not compare_columns -%}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set compare_columns = adapter.get_columns_in_relation(model) | map(attribute='quoted') -%}\n{%- endif -%}\n\n{% set compare_cols_csv = compare_columns | join(', ') %}\n\nwith a as (\n\n select * from {{ model }}\n\n),\n\nb as (\n\n select * from {{ compare_model }}\n\n),\n\na_minus_b as (\n\n select {{compare_cols_csv}} from a\n {{ dbt.except() }}\n select {{compare_cols_csv}} from b\n\n),\n\nb_minus_a as (\n\n select {{compare_cols_csv}} from b\n {{ dbt.except() }}\n select {{compare_cols_csv}} from a\n\n),\n\nunioned as (\n\n select 'a_minus_b' as which_diff, a_minus_b.* from a_minus_b\n union all\n select 'b_minus_a' as which_diff, b_minus_a.* from b_minus_a\n\n)\n\nselect * from unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.496139, "supported_languages": null}, "macro.dbt_utils.test_not_empty_string": {"name": "test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.test_not_empty_string", "macro_sql": "{% test not_empty_string(model, column_name, trim_whitespace=true) %}\n\n {{ return(adapter.dispatch('test_not_empty_string', 'dbt_utils')(model, column_name, trim_whitespace)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_empty_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.496454, "supported_languages": null}, "macro.dbt_utils.default__test_not_empty_string": {"name": "default__test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.default__test_not_empty_string", "macro_sql": "{% macro default__test_not_empty_string(model, column_name, trim_whitespace=true) %}\n\n with\n \n all_values as (\n\n select \n\n\n {% if trim_whitespace == true -%}\n\n trim({{ column_name }}) as {{ column_name }}\n\n {%- else -%}\n\n {{ column_name }}\n\n {%- endif %}\n \n from {{ model }}\n\n ),\n\n errors as (\n\n select * from all_values\n where {{ column_name }} = ''\n\n )\n\n select * from errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.496646, "supported_languages": null}, "macro.dbt_utils.test_mutually_exclusive_ranges": {"name": "test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.test_mutually_exclusive_ranges", "macro_sql": "{% test mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n {{ return(adapter.dispatch('test_mutually_exclusive_ranges', 'dbt_utils')(model, lower_bound_column, upper_bound_column, partition_by, gaps, zero_length_range_allowed)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_mutually_exclusive_ranges"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4988952, "supported_languages": null}, "macro.dbt_utils.default__test_mutually_exclusive_ranges": {"name": "default__test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.default__test_mutually_exclusive_ranges", "macro_sql": "{% macro default__test_mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n{% if gaps == 'not_allowed' %}\n {% set allow_gaps_operator='=' %}\n {% set allow_gaps_operator_in_words='equal_to' %}\n{% elif gaps == 'allowed' %}\n {% set allow_gaps_operator='<=' %}\n {% set allow_gaps_operator_in_words='less_than_or_equal_to' %}\n{% elif gaps == 'required' %}\n {% set allow_gaps_operator='<' %}\n {% set allow_gaps_operator_in_words='less_than' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`gaps` argument for mutually_exclusive_ranges test must be one of ['not_allowed', 'allowed', 'required'] Got: '\" ~ gaps ~\"'.'\"\n ) }}\n{% endif %}\n{% if not zero_length_range_allowed %}\n {% set allow_zero_length_operator='<' %}\n {% set allow_zero_length_operator_in_words='less_than' %}\n{% elif zero_length_range_allowed %}\n {% set allow_zero_length_operator='<=' %}\n {% set allow_zero_length_operator_in_words='less_than_or_equal_to' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`zero_length_range_allowed` argument for mutually_exclusive_ranges test must be one of [true, false] Got: '\" ~ zero_length_range_allowed ~\"'.'\"\n ) }}\n{% endif %}\n\n{% set partition_clause=\"partition by \" ~ partition_by if partition_by else '' %}\n\nwith window_functions as (\n\n select\n {% if partition_by %}\n {{ partition_by }} as partition_by_col,\n {% endif %}\n {{ lower_bound_column }} as lower_bound,\n {{ upper_bound_column }} as upper_bound,\n\n lead({{ lower_bound_column }}) over (\n {{ partition_clause }}\n order by {{ lower_bound_column }}, {{ upper_bound_column }}\n ) as next_lower_bound,\n\n row_number() over (\n {{ partition_clause }}\n order by {{ lower_bound_column }} desc, {{ upper_bound_column }} desc\n ) = 1 as is_last_record\n\n from {{ model }}\n\n),\n\ncalc as (\n -- We want to return records where one of our assumptions fails, so we'll use\n -- the `not` function with `and` statements so we can write our assumptions more cleanly\n select\n *,\n\n -- For each record: lower_bound should be < upper_bound.\n -- Coalesce it to return an error on the null case (implicit assumption\n -- these columns are not_null)\n coalesce(\n lower_bound {{ allow_zero_length_operator }} upper_bound,\n false\n ) as lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound,\n\n -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound.\n -- Coalesce it to handle null cases for the last record.\n coalesce(\n upper_bound {{ allow_gaps_operator }} next_lower_bound,\n is_last_record,\n false\n ) as upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n\n from window_functions\n\n),\n\nvalidation_errors as (\n\n select\n *\n from calc\n\n where not(\n -- THE FOLLOWING SHOULD BE TRUE --\n lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound\n and upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n )\n)\n\nselect * from validation_errors\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.499777, "supported_languages": null}, "macro.dbt_utils.pretty_log_format": {"name": "pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.pretty_log_format", "macro_sql": "{% macro pretty_log_format(message) %}\n {{ return(adapter.dispatch('pretty_log_format', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.4999502, "supported_languages": null}, "macro.dbt_utils.default__pretty_log_format": {"name": "default__pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.default__pretty_log_format", "macro_sql": "{% macro default__pretty_log_format(message) %}\n {{ return( dbt_utils.pretty_time() ~ ' + ' ~ message) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.500051, "supported_languages": null}, "macro.dbt_utils._is_relation": {"name": "_is_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_relation.sql", "original_file_path": "macros/jinja_helpers/_is_relation.sql", "unique_id": "macro.dbt_utils._is_relation", "macro_sql": "{% macro _is_relation(obj, macro) %}\n {%- if not (obj is mapping and obj.get('metadata', {}).get('type', '').endswith('Relation')) -%}\n {%- do exceptions.raise_compiler_error(\"Macro \" ~ macro ~ \" expected a Relation but received the value: \" ~ obj) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5003252, "supported_languages": null}, "macro.dbt_utils.pretty_time": {"name": "pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.pretty_time", "macro_sql": "{% macro pretty_time(format='%H:%M:%S') %}\n {{ return(adapter.dispatch('pretty_time', 'dbt_utils')(format)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5004852, "supported_languages": null}, "macro.dbt_utils.default__pretty_time": {"name": "default__pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.default__pretty_time", "macro_sql": "{% macro default__pretty_time(format='%H:%M:%S') %}\n {{ return(modules.datetime.datetime.now().strftime(format)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.500594, "supported_languages": null}, "macro.dbt_utils.log_info": {"name": "log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.log_info", "macro_sql": "{% macro log_info(message) %}\n {{ return(adapter.dispatch('log_info', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__log_info"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.500737, "supported_languages": null}, "macro.dbt_utils.default__log_info": {"name": "default__log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.default__log_info", "macro_sql": "{% macro default__log_info(message) %}\n {{ log(dbt_utils.pretty_log_format(message), info=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5008361, "supported_languages": null}, "macro.dbt_utils.slugify": {"name": "slugify", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/slugify.sql", "original_file_path": "macros/jinja_helpers/slugify.sql", "unique_id": "macro.dbt_utils.slugify", "macro_sql": "{% macro slugify(string) %}\n\n{#- Lower case the string -#}\n{% set string = string | lower %}\n{#- Replace spaces and dashes with underscores -#}\n{% set string = modules.re.sub('[ -]+', '_', string) %}\n{#- Only take letters, numbers, and underscores -#}\n{% set string = modules.re.sub('[^a-z0-9_]+', '', string) %}\n{#- Prepends \"_\" if string begins with a number -#}\n{% set string = modules.re.sub('^[0-9]', '_' + string[0], string) %}\n\n{{ return(string) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.501187, "supported_languages": null}, "macro.dbt_utils._is_ephemeral": {"name": "_is_ephemeral", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_ephemeral.sql", "original_file_path": "macros/jinja_helpers/_is_ephemeral.sql", "unique_id": "macro.dbt_utils._is_ephemeral", "macro_sql": "{% macro _is_ephemeral(obj, macro) %}\n {%- if obj.is_cte -%}\n {% set ephemeral_prefix = api.Relation.add_ephemeral_prefix('') %}\n {% if obj.name.startswith(ephemeral_prefix) %}\n {% set model_name = obj.name[(ephemeral_prefix|length):] %}\n {% else %}\n {% set model_name = obj.name %}\n {%- endif -%}\n {% set error_message %}\nThe `{{ macro }}` macro cannot be used with ephemeral models, as it relies on the information schema.\n\n`{{ model_name }}` is an ephemeral model. Consider making it a view or table instead.\n {% endset %}\n {%- do exceptions.raise_compiler_error(error_message) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.501684, "supported_languages": null}, "macro.dbt_utils.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_utils')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.502141, "supported_languages": null}, "macro.dbt_utils.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5025, "supported_languages": null}, "macro.dbt_utils.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_utils')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.502642, "supported_languages": null}, "macro.dbt_utils.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{dbt_utils.generate_series(\n dbt_utils.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.generate_series", "macro.dbt_utils.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.502857, "supported_languages": null}, "macro.dbt_utils.safe_subtract": {"name": "safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.safe_subtract", "macro_sql": "{%- macro safe_subtract(field_list) -%}\n {{ return(adapter.dispatch('safe_subtract', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_subtract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5030882, "supported_languages": null}, "macro.dbt_utils.default__safe_subtract": {"name": "default__safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.default__safe_subtract", "macro_sql": "\n\n{%- macro default__safe_subtract(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_subtract` macro takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' -\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.503403, "supported_languages": null}, "macro.dbt_utils.nullcheck_table": {"name": "nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.nullcheck_table", "macro_sql": "{% macro nullcheck_table(relation) %}\n {{ return(adapter.dispatch('nullcheck_table', 'dbt_utils')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5035899, "supported_languages": null}, "macro.dbt_utils.default__nullcheck_table": {"name": "default__nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.default__nullcheck_table", "macro_sql": "{% macro default__nullcheck_table(relation) %}\n\n {%- do dbt_utils._is_relation(relation, 'nullcheck_table') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'nullcheck_table') -%}\n {% set cols = adapter.get_columns_in_relation(relation) %}\n\n select {{ dbt_utils.nullcheck(cols) }}\n from {{relation}}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.50384, "supported_languages": null}, "macro.dbt_utils.get_relations_by_pattern": {"name": "get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.get_relations_by_pattern", "macro_sql": "{% macro get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_pattern', 'dbt_utils')(schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.504242, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_pattern": {"name": "default__get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_pattern", "macro_sql": "{% macro default__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.504763, "supported_languages": null}, "macro.dbt_utils.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.505298, "supported_languages": null}, "macro.dbt_utils.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5055418, "supported_languages": null}, "macro.dbt_utils.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.505676, "supported_languages": null}, "macro.dbt_utils.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_utils.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.506025, "supported_languages": null}, "macro.dbt_utils.get_relations_by_prefix": {"name": "get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.get_relations_by_prefix", "macro_sql": "{% macro get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_prefix', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.506436, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_prefix": {"name": "default__get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_prefix", "macro_sql": "{% macro default__get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_prefix_sql(schema, prefix, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5069308, "supported_languages": null}, "macro.dbt_utils.get_tables_by_prefix_sql": {"name": "get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_prefix_sql", "macro_sql": "{% macro get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_prefix_sql', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5071821, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_prefix_sql": {"name": "default__get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_prefix_sql", "macro_sql": "{% macro default__get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(\n schema_pattern = schema,\n table_pattern = prefix ~ '%',\n exclude = exclude,\n database = database\n ) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5073621, "supported_languages": null}, "macro.dbt_utils.star": {"name": "star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.star", "macro_sql": "{% macro star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {{ return(adapter.dispatch('star', 'dbt_utils')(from, relation_alias, except, prefix, suffix, quote_identifiers)) }}\r\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__star"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.508139, "supported_languages": null}, "macro.dbt_utils.default__star": {"name": "default__star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.default__star", "macro_sql": "{% macro default__star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {%- do dbt_utils._is_relation(from, 'star') -%}\r\n {%- do dbt_utils._is_ephemeral(from, 'star') -%}\r\n\r\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\r\n {%- if not execute -%}\r\n {% do return('*') %}\r\n {%- endif -%}\r\n\r\n {% set cols = dbt_utils.get_filtered_columns_in_relation(from, except) %}\r\n\r\n {%- if cols|length <= 0 -%}\r\n {% if flags.WHICH == 'compile' %}\r\n {% set response %}\r\n*\r\n/* No columns were returned. Maybe the relation doesn't exist yet \r\nor all columns were excluded. This star is only output during \r\ndbt compile, and exists to keep SQLFluff happy. */\r\n {% endset %}\r\n {% do return(response) %}\r\n {% else %}\r\n {% do return(\"/* no columns returned from star() macro */\") %}\r\n {% endif %}\r\n {%- else -%}\r\n {%- for col in cols %}\r\n {%- if relation_alias %}{{ relation_alias }}.{% else %}{%- endif -%}\r\n {%- if quote_identifiers -%}\r\n {{ adapter.quote(col)|trim }} {%- if prefix!='' or suffix!='' %} as {{ adapter.quote(prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {%- else -%}\r\n {{ col|trim }} {%- if prefix!='' or suffix!='' %} as {{ (prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {% endif %}\r\n {%- if not loop.last %},{{ '\\n ' }}{%- endif -%}\r\n {%- endfor -%}\r\n {% endif %}\r\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5089338, "supported_languages": null}, "macro.dbt_utils.unpivot": {"name": "unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.unpivot", "macro_sql": "{% macro unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value') -%}\n {{ return(adapter.dispatch('unpivot', 'dbt_utils')(relation, cast_to, exclude, remove, field_name, value_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__unpivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.509768, "supported_languages": null}, "macro.dbt_utils.default__unpivot": {"name": "default__unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.default__unpivot", "macro_sql": "{% macro default__unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value') -%}\n\n {% if not relation %}\n {{ exceptions.raise_compiler_error(\"Error: argument `relation` is required for `unpivot` macro.\") }}\n {% endif %}\n\n {%- set exclude = exclude if exclude is not none else [] %}\n {%- set remove = remove if remove is not none else [] %}\n\n {%- set include_cols = [] %}\n\n {%- set table_columns = {} %}\n\n {%- do table_columns.update({relation: []}) %}\n\n {%- do dbt_utils._is_relation(relation, 'unpivot') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'unpivot') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) %}\n\n {%- for col in cols -%}\n {%- if col.column.lower() not in remove|map('lower') and col.column.lower() not in exclude|map('lower') -%}\n {% do include_cols.append(col) %}\n {%- endif %}\n {%- endfor %}\n\n\n {%- for col in include_cols -%}\n select\n {%- for exclude_col in exclude %}\n {{ exclude_col }},\n {%- endfor %}\n\n cast('{{ col.column }}' as {{ dbt.type_string() }}) as {{ field_name }},\n cast( {% if col.data_type == 'boolean' %}\n {{ dbt.cast_bool_to_text(col.column) }}\n {% else %}\n {{ col.column }}\n {% endif %}\n as {{ cast_to }}) as {{ value_name }}\n\n from {{ relation }}\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n {%- endfor -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_string", "macro.dbt.cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.51073, "supported_languages": null}, "macro.dbt_utils.safe_divide": {"name": "safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.safe_divide", "macro_sql": "{% macro safe_divide(numerator, denominator) -%}\n {{ return(adapter.dispatch('safe_divide', 'dbt_utils')(numerator, denominator)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_divide"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.510902, "supported_languages": null}, "macro.dbt_utils.default__safe_divide": {"name": "default__safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.default__safe_divide", "macro_sql": "{% macro default__safe_divide(numerator, denominator) %}\n ( {{ numerator }} ) / nullif( ( {{ denominator }} ), 0)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.510978, "supported_languages": null}, "macro.dbt_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n {{ return(adapter.dispatch('union_relations', 'dbt_utils')(relations, column_override, include, exclude, source_column_name, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.51294, "supported_languages": null}, "macro.dbt_utils.default__union_relations": {"name": "default__union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.default__union_relations", "macro_sql": "\n\n{%- macro default__union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n {%- set all_excludes = [] -%}\n {%- set all_includes = [] -%}\n\n {%- if exclude -%}\n {%- for exc in exclude -%}\n {%- do all_excludes.append(exc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- if include -%}\n {%- for inc in include -%}\n {%- do all_includes.append(inc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column | lower in all_excludes -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column | lower not in all_includes -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n {%- set dbt_command = flags.WHICH -%}\n\n\n {% if dbt_command in ['run', 'build'] %}\n {% if (include | length > 0 or exclude | length > 0) and not column_superset.keys() %}\n {%- set relations_string -%}\n {%- for relation in relations -%}\n {{ relation.name }}\n {%- if not loop.last %}, {% endif -%}\n {%- endfor -%}\n {%- endset -%}\n\n {%- set error_message -%}\n There were no columns found to union for relations {{ relations_string }}\n {%- endset -%}\n\n {{ exceptions.raise_compiler_error(error_message) }}\n {%- endif -%}\n {%- endif -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n {%- if source_column_name is not none %}\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {%- endif %}\n\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ relation }}\n\n {% if where -%}\n where {{ where }}\n {%- endif %}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5149922, "supported_languages": null}, "macro.dbt_utils.group_by": {"name": "group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.group_by", "macro_sql": "{%- macro group_by(n) -%}\n {{ return(adapter.dispatch('group_by', 'dbt_utils')(n)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__group_by"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5151842, "supported_languages": null}, "macro.dbt_utils.default__group_by": {"name": "default__group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.default__group_by", "macro_sql": "\n\n{%- macro default__group_by(n) -%}\n\n group by {% for i in range(1, n + 1) -%}\n {{ i }}{{ ',' if not loop.last }} \n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.515339, "supported_languages": null}, "macro.dbt_utils.deduplicate": {"name": "deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.deduplicate", "macro_sql": "{%- macro deduplicate(relation, partition_by, order_by) -%}\n {{ return(adapter.dispatch('deduplicate', 'dbt_utils')(relation, partition_by, order_by)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.bigquery__deduplicate"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5158348, "supported_languages": null}, "macro.dbt_utils.default__deduplicate": {"name": "default__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.default__deduplicate", "macro_sql": "\n\n{%- macro default__deduplicate(relation, partition_by, order_by) -%}\n\n with row_numbered as (\n select\n _inner.*,\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) as rn\n from {{ relation }} as _inner\n )\n\n select\n distinct data.*\n from {{ relation }} as data\n {#\n -- Not all DBs will support natural joins but the ones that do include:\n -- Oracle, MySQL, SQLite, Redshift, Teradata, Materialize, Databricks\n -- Apache Spark, SingleStore, Vertica\n -- Those that do not appear to support natural joins include:\n -- SQLServer, Trino, Presto, Rockset, Athena\n #}\n natural join row_numbered\n where row_numbered.rn = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.515967, "supported_languages": null}, "macro.dbt_utils.redshift__deduplicate": {"name": "redshift__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.redshift__deduplicate", "macro_sql": "{% macro redshift__deduplicate(relation, partition_by, order_by) -%}\n\n {{ return(dbt_utils.default__deduplicate(relation, partition_by, order_by=order_by)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__deduplicate"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5160809, "supported_languages": null}, "macro.dbt_utils.postgres__deduplicate": {"name": "postgres__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.postgres__deduplicate", "macro_sql": "\n{%- macro postgres__deduplicate(relation, partition_by, order_by) -%}\n\n select\n distinct on ({{ partition_by }}) *\n from {{ relation }}\n order by {{ partition_by }}{{ ',' ~ order_by }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.516192, "supported_languages": null}, "macro.dbt_utils.snowflake__deduplicate": {"name": "snowflake__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.snowflake__deduplicate", "macro_sql": "\n{%- macro snowflake__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.516288, "supported_languages": null}, "macro.dbt_utils.bigquery__deduplicate": {"name": "bigquery__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.bigquery__deduplicate", "macro_sql": "\n{%- macro bigquery__deduplicate(relation, partition_by, order_by) -%}\n\n select unique.*\n from (\n select\n array_agg (\n original\n order by {{ order_by }}\n limit 1\n )[offset(0)] unique\n from {{ relation }} original\n group by {{ partition_by }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5163891, "supported_languages": null}, "macro.dbt_utils.surrogate_key": {"name": "surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.surrogate_key", "macro_sql": "{%- macro surrogate_key(field_list) -%}\n {% set frustrating_jinja_feature = varargs %}\n {{ return(adapter.dispatch('surrogate_key', 'dbt_utils')(field_list, *varargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.516618, "supported_languages": null}, "macro.dbt_utils.default__surrogate_key": {"name": "default__surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.default__surrogate_key", "macro_sql": "\n\n{%- macro default__surrogate_key(field_list) -%}\n\n{%- set error_message = '\nWarning: `dbt_utils.surrogate_key` has been replaced by \\\n`dbt_utils.generate_surrogate_key`. The new macro treats null values \\\ndifferently to empty strings. To restore the behaviour of the original \\\nmacro, add a global variable in dbt_project.yml called \\\n`surrogate_key_treat_nulls_as_empty_strings` to your \\\ndbt_project.yml file with a value of True. \\\nThe {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.516761, "supported_languages": null}, "macro.dbt_utils.safe_add": {"name": "safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.safe_add", "macro_sql": "{%- macro safe_add(field_list) -%}\n {{ return(adapter.dispatch('safe_add', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.516989, "supported_languages": null}, "macro.dbt_utils.default__safe_add": {"name": "default__safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.default__safe_add", "macro_sql": "\n\n{%- macro default__safe_add(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_add` macro now takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.warn(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' +\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.517303, "supported_languages": null}, "macro.dbt_utils.nullcheck": {"name": "nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.nullcheck", "macro_sql": "{% macro nullcheck(cols) %}\n {{ return(adapter.dispatch('nullcheck', 'dbt_utils')(cols)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.51752, "supported_languages": null}, "macro.dbt_utils.default__nullcheck": {"name": "default__nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.default__nullcheck", "macro_sql": "{% macro default__nullcheck(cols) %}\n{%- for col in cols %}\n\n {% if col.is_string() -%}\n\n nullif({{col.name}},'') as {{col.name}}\n\n {%- else -%}\n\n {{col.name}}\n\n {%- endif -%}\n\n{%- if not loop.last -%} , {%- endif -%}\n\n{%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5177228, "supported_languages": null}, "macro.dbt_utils.get_tables_by_pattern_sql": {"name": "get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_pattern_sql", "macro_sql": "{% macro get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_pattern_sql', 'dbt_utils')\n (schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.bigquery__get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.518771, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_pattern_sql": {"name": "default__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_pattern_sql", "macro_sql": "{% macro default__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from {{ database }}.information_schema.tables\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.518983, "supported_languages": null}, "macro.dbt_utils.bigquery__get_tables_by_pattern_sql": {"name": "bigquery__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.bigquery__get_tables_by_pattern_sql", "macro_sql": "{% macro bigquery__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% if '%' in schema_pattern %}\n {% set schemata=dbt_utils._bigquery__get_matching_schemata(schema_pattern, database) %}\n {% else %}\n {% set schemata=[schema_pattern] %}\n {% endif %}\n\n {% set sql %}\n {% for schema in schemata %}\n select distinct\n table_schema,\n table_name,\n {{ dbt_utils.get_table_types_sql() }}\n\n from {{ adapter.quote(database) }}.{{ schema }}.INFORMATION_SCHEMA.TABLES\n where lower(table_name) like lower ('{{ table_pattern }}')\n and lower(table_name) not like lower ('{{ exclude }}')\n\n {% if not loop.last %} union all {% endif %}\n\n {% endfor %}\n {% endset %}\n\n {{ return(sql) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._bigquery__get_matching_schemata", "macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5193899, "supported_languages": null}, "macro.dbt_utils._bigquery__get_matching_schemata": {"name": "_bigquery__get_matching_schemata", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils._bigquery__get_matching_schemata", "macro_sql": "{% macro _bigquery__get_matching_schemata(schema_pattern, database) %}\n {% if execute %}\n\n {% set sql %}\n select schema_name from {{ adapter.quote(database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like lower('{{ schema_pattern }}')\n {% endset %}\n\n {% set results=run_query(sql) %}\n\n {% set schemata=results.columns['schema_name'].values() %}\n\n {{ return(schemata) }}\n\n {% else %}\n\n {{ return([]) }}\n\n {% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.519679, "supported_languages": null}, "macro.dbt_utils.get_column_values": {"name": "get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.get_column_values", "macro_sql": "{% macro get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {{ return(adapter.dispatch('get_column_values', 'dbt_utils')(table, column, order_by, max_records, default, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_column_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5203788, "supported_languages": null}, "macro.dbt_utils.default__get_column_values": {"name": "default__get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.default__get_column_values", "macro_sql": "{% macro default__get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {% set default = [] if not default %}\n {{ return(default) }}\n {% endif %}\n\n {%- do dbt_utils._is_ephemeral(table, 'get_column_values') -%}\n\n {# Not all relations are tables. Renaming for internal clarity without breaking functionality for anyone using named arguments #}\n {# TODO: Change the method signature in a future 0.x.0 release #}\n {%- set target_relation = table -%}\n\n {# adapter.load_relation is a convenience wrapper to avoid building a Relation when we already have one #}\n {% set relation_exists = (load_relation(target_relation)) is not none %}\n\n {%- call statement('get_column_values', fetch_result=true) %}\n\n {%- if not relation_exists and default is none -%}\n\n {{ exceptions.raise_compiler_error(\"In get_column_values(): relation \" ~ target_relation ~ \" does not exist and no default value was provided.\") }}\n\n {%- elif not relation_exists and default is not none -%}\n\n {{ log(\"Relation \" ~ target_relation ~ \" does not exist. Returning the default value: \" ~ default) }}\n\n {{ return(default) }}\n\n {%- else -%}\n\n\n select\n {{ column }} as value\n\n from {{ target_relation }}\n\n {% if where is not none %}\n where {{ where }}\n {% endif %}\n\n group by {{ column }}\n order by {{ order_by }}\n\n {% if max_records is not none %}\n limit {{ max_records }}\n {% endif %}\n\n {% endif %}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_column_values') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values) }}\n {%- else -%}\n {{ return(default) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_ephemeral", "macro.dbt.load_relation", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.52126, "supported_languages": null}, "macro.dbt_utils.pivot": {"name": "pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.pivot", "macro_sql": "{% macro pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {{ return(adapter.dispatch('pivot', 'dbt_utils')(column, values, alias, agg, cmp, prefix, suffix, then_value, else_value, quote_identifiers, distinct)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.521919, "supported_languages": null}, "macro.dbt_utils.default__pivot": {"name": "default__pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.default__pivot", "macro_sql": "{% macro default__pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {% for value in values %}\n {{ agg }}(\n {% if distinct %} distinct {% endif %}\n case\n when {{ column }} {{ cmp }} '{{ dbt.escape_single_quotes(value) }}'\n then {{ then_value }}\n else {{ else_value }}\n end\n )\n {% if alias %}\n {% if quote_identifiers %}\n as {{ adapter.quote(prefix ~ value ~ suffix) }}\n {% else %}\n as {{ dbt_utils.slugify(prefix ~ value ~ suffix) }}\n {% endif %}\n {% endif %}\n {% if not loop.last %},{% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.escape_single_quotes", "macro.dbt_utils.slugify"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.522434, "supported_languages": null}, "macro.dbt_utils.get_filtered_columns_in_relation": {"name": "get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.get_filtered_columns_in_relation", "macro_sql": "{% macro get_filtered_columns_in_relation(from, except=[]) -%}\n {{ return(adapter.dispatch('get_filtered_columns_in_relation', 'dbt_utils')(from, except)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.522708, "supported_languages": null}, "macro.dbt_utils.default__get_filtered_columns_in_relation": {"name": "default__get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.default__get_filtered_columns_in_relation", "macro_sql": "{% macro default__get_filtered_columns_in_relation(from, except=[]) -%}\n {%- do dbt_utils._is_relation(from, 'get_filtered_columns_in_relation') -%}\n {%- do dbt_utils._is_ephemeral(from, 'get_filtered_columns_in_relation') -%}\n\n {# -- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {{ return('') }}\n {% endif %}\n\n {%- set include_cols = [] %}\n {%- set cols = adapter.get_columns_in_relation(from) -%}\n {%- set except = except | map(\"lower\") | list %}\n {%- for col in cols -%}\n {%- if col.column|lower not in except -%}\n {% do include_cols.append(col.column) %}\n {%- endif %}\n {%- endfor %}\n\n {{ return(include_cols) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.523139, "supported_languages": null}, "macro.dbt_utils.width_bucket": {"name": "width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.width_bucket", "macro_sql": "{% macro width_bucket(expr, min_value, max_value, num_buckets) %}\n {{ return(adapter.dispatch('width_bucket', 'dbt_utils') (expr, min_value, max_value, num_buckets)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__width_bucket"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5236359, "supported_languages": null}, "macro.dbt_utils.default__width_bucket": {"name": "default__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.default__width_bucket", "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.523971, "supported_languages": null}, "macro.dbt_utils.snowflake__width_bucket": {"name": "snowflake__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.snowflake__width_bucket", "macro_sql": "{% macro snowflake__width_bucket(expr, min_value, max_value, num_buckets) %}\n width_bucket({{ expr }}, {{ min_value }}, {{ max_value }}, {{ num_buckets }} )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.524083, "supported_languages": null}, "macro.dbt_utils.get_query_results_as_dict": {"name": "get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.get_query_results_as_dict", "macro_sql": "{% macro get_query_results_as_dict(query) %}\n {{ return(adapter.dispatch('get_query_results_as_dict', 'dbt_utils')(query)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_query_results_as_dict"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.524313, "supported_languages": null}, "macro.dbt_utils.default__get_query_results_as_dict": {"name": "default__get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.default__get_query_results_as_dict", "macro_sql": "{% macro default__get_query_results_as_dict(query) %}\n\n{# This macro returns a dictionary of the form {column_name: (tuple_of_results)} #}\n\n {%- call statement('get_query_results', fetch_result=True,auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {% set sql_results={} %}\n\n {%- if execute -%}\n {% set sql_results_table = load_result('get_query_results').table.columns %}\n {% for column_name, column in sql_results_table.items() %}\n {% do sql_results.update({column_name: column.values()}) %}\n {% endfor %}\n {%- endif -%}\n\n {{ return(sql_results) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.524656, "supported_languages": null}, "macro.dbt_utils.generate_surrogate_key": {"name": "generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.generate_surrogate_key", "macro_sql": "{%- macro generate_surrogate_key(field_list) -%}\n {{ return(adapter.dispatch('generate_surrogate_key', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.524949, "supported_languages": null}, "macro.dbt_utils.default__generate_surrogate_key": {"name": "default__generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.default__generate_surrogate_key", "macro_sql": "\n\n{%- macro default__generate_surrogate_key(field_list) -%}\n\n{%- if var('surrogate_key_treat_nulls_as_empty_strings', False) -%}\n {%- set default_null_value = \"\" -%}\n{%- else -%}\n {%- set default_null_value = '_dbt_utils_surrogate_key_null_' -%}\n{%- endif -%}\n\n{%- set fields = [] -%}\n\n{%- for field in field_list -%}\n\n {%- do fields.append(\n \"coalesce(cast(\" ~ field ~ \" as \" ~ dbt.type_string() ~ \"), '\" ~ default_null_value ~\"')\"\n ) -%}\n\n {%- if not loop.last %}\n {%- do fields.append(\"'-'\") -%}\n {%- endif -%}\n\n{%- endfor -%}\n\n{{ dbt.hash(dbt.concat(fields)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.hash", "macro.dbt.concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.525337, "supported_languages": null}, "macro.dbt_utils.get_table_types_sql": {"name": "get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.get_table_types_sql", "macro_sql": "{%- macro get_table_types_sql() -%}\n {{ return(adapter.dispatch('get_table_types_sql', 'dbt_utils')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils.default__get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5256698, "supported_languages": null}, "macro.dbt_utils.default__get_table_types_sql": {"name": "default__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.default__get_table_types_sql", "macro_sql": "{% macro default__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'EXTERNAL TABLE' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5257611, "supported_languages": null}, "macro.dbt_utils.postgres__get_table_types_sql": {"name": "postgres__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.postgres__get_table_types_sql", "macro_sql": "{% macro postgres__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'FOREIGN' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.525843, "supported_languages": null}, "macro.dbt_utils.databricks__get_table_types_sql": {"name": "databricks__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.databricks__get_table_types_sql", "macro_sql": "{% macro databricks__get_table_types_sql() %}\n case table_type\n when 'MANAGED' then 'table'\n when 'BASE TABLE' then 'table'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5259268, "supported_languages": null}, "macro.dbt_utils.get_single_value": {"name": "get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.get_single_value", "macro_sql": "{% macro get_single_value(query, default=none) %}\n {{ return(adapter.dispatch('get_single_value', 'dbt_utils')(query, default)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_single_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.526228, "supported_languages": null}, "macro.dbt_utils.default__get_single_value": {"name": "default__get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.default__get_single_value", "macro_sql": "{% macro default__get_single_value(query, default) %}\n\n{# This macro returns the (0, 0) record in a query, i.e. the first row of the first column #}\n\n {%- call statement('get_query_result', fetch_result=True, auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {%- if execute -%}\n\n {% set r = load_result('get_query_result').table.columns[0].values() %}\n {% if r | length == 0 %}\n {% do print('Query `' ~ query ~ '` returned no rows. Using the default value: ' ~ default) %}\n {% set sql_result = default %}\n {% else %}\n {% set sql_result = r[0] %}\n {% endif %}\n \n {%- else -%}\n \n {% set sql_result = default %}\n \n {%- endif -%}\n\n {% do return(sql_result) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5266402, "supported_languages": null}, "macro.dbt_utils.degrees_to_radians": {"name": "degrees_to_radians", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.degrees_to_radians", "macro_sql": "{% macro degrees_to_radians(degrees) -%}\n acos(-1) * {{degrees}} / 180\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.527363, "supported_languages": null}, "macro.dbt_utils.haversine_distance": {"name": "haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.haversine_distance", "macro_sql": "{% macro haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n {{ return(adapter.dispatch('haversine_distance', 'dbt_utils')(lat1,lon1,lat2,lon2,unit)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.bigquery__haversine_distance"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.527531, "supported_languages": null}, "macro.dbt_utils.default__haversine_distance": {"name": "default__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.default__haversine_distance", "macro_sql": "{% macro default__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n\n 2 * 3961 * asin(sqrt(power((sin(radians(({{ lat2 }} - {{ lat1 }}) / 2))), 2) +\n cos(radians({{lat1}})) * cos(radians({{lat2}})) *\n power((sin(radians(({{ lon2 }} - {{ lon1 }}) / 2))), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.52789, "supported_languages": null}, "macro.dbt_utils.bigquery__haversine_distance": {"name": "bigquery__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.bigquery__haversine_distance", "macro_sql": "{% macro bigquery__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{% set radians_lat1 = dbt_utils.degrees_to_radians(lat1) %}\n{% set radians_lat2 = dbt_utils.degrees_to_radians(lat2) %}\n{% set radians_lon1 = dbt_utils.degrees_to_radians(lon1) %}\n{% set radians_lon2 = dbt_utils.degrees_to_radians(lon2) %}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n 2 * 3961 * asin(sqrt(power(sin(({{ radians_lat2 }} - {{ radians_lat1 }}) / 2), 2) +\n cos({{ radians_lat1 }}) * cos({{ radians_lat2 }}) *\n power(sin(({{ radians_lon2 }} - {{ radians_lon1 }}) / 2), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.degrees_to_radians"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.528363, "supported_languages": null}, "macro.spark_utils.get_tables": {"name": "get_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_tables", "macro_sql": "{% macro get_tables(table_regex_pattern='.*') %}\n\n {% set tables = [] %}\n {% for database in spark__list_schemas('not_used') %}\n {% for table in spark__list_relations_without_caching(database[0]) %}\n {% set db_tablename = database[0] ~ \".\" ~ table[1] %}\n {% set is_match = modules.re.match(table_regex_pattern, db_tablename) %}\n {% if is_match %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('type', 'TYPE', 'Type'))|first %}\n {% if table_type[1]|lower != 'view' %}\n {{ tables.append(db_tablename) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% endfor %}\n {{ return(tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.531761, "supported_languages": null}, "macro.spark_utils.get_delta_tables": {"name": "get_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_delta_tables", "macro_sql": "{% macro get_delta_tables(table_regex_pattern='.*') %}\n\n {% set delta_tables = [] %}\n {% for db_tablename in get_tables(table_regex_pattern) %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('provider', 'PROVIDER', 'Provider'))|first %}\n {% if table_type[1]|lower == 'delta' %}\n {{ delta_tables.append(db_tablename) }}\n {% endif %}\n {% endfor %}\n {{ return(delta_tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.532187, "supported_languages": null}, "macro.spark_utils.get_statistic_columns": {"name": "get_statistic_columns", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_statistic_columns", "macro_sql": "{% macro get_statistic_columns(table) %}\n\n {% call statement('input_columns', fetch_result=True) %}\n SHOW COLUMNS IN {{ table }}\n {% endcall %}\n {% set input_columns = load_result('input_columns').table %}\n\n {% set output_columns = [] %}\n {% for column in input_columns %}\n {% call statement('column_information', fetch_result=True) %}\n DESCRIBE TABLE {{ table }} `{{ column[0] }}`\n {% endcall %}\n {% if not load_result('column_information').table[1][1].startswith('struct') and not load_result('column_information').table[1][1].startswith('array') %}\n {{ output_columns.append('`' ~ column[0] ~ '`') }}\n {% endif %}\n {% endfor %}\n {{ return(output_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.532728, "supported_languages": null}, "macro.spark_utils.spark_optimize_delta_tables": {"name": "spark_optimize_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_optimize_delta_tables", "macro_sql": "{% macro spark_optimize_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Optimizing \" ~ table) }}\n {% do run_query(\"optimize \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5331788, "supported_languages": null}, "macro.spark_utils.spark_vacuum_delta_tables": {"name": "spark_vacuum_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_vacuum_delta_tables", "macro_sql": "{% macro spark_vacuum_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Vacuuming \" ~ table) }}\n {% do run_query(\"vacuum \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.533621, "supported_languages": null}, "macro.spark_utils.spark_analyze_tables": {"name": "spark_analyze_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_analyze_tables", "macro_sql": "{% macro spark_analyze_tables(table_regex_pattern='.*') %}\n\n {% for table in get_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set columns = get_statistic_columns(table) | join(',') %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Analyzing \" ~ table) }}\n {% if columns != '' %}\n {% do run_query(\"analyze table \" ~ table ~ \" compute statistics for columns \" ~ columns) %}\n {% endif %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.spark_utils.get_statistic_columns", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5342011, "supported_languages": null}, "macro.spark_utils.spark__concat": {"name": "spark__concat", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/concat.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/concat.sql", "unique_id": "macro.spark_utils.spark__concat", "macro_sql": "{% macro spark__concat(fields) -%}\n concat({{ fields|join(', ') }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.534313, "supported_languages": null}, "macro.spark_utils.spark__type_numeric": {"name": "spark__type_numeric", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "unique_id": "macro.spark_utils.spark__type_numeric", "macro_sql": "{% macro spark__type_numeric() %}\n decimal(28, 6)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.534382, "supported_languages": null}, "macro.spark_utils.spark__dateadd": {"name": "spark__dateadd", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "unique_id": "macro.spark_utils.spark__dateadd", "macro_sql": "{% macro spark__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {%- set clock_component -%}\n {# make sure the dates + timestamps are real, otherwise raise an error asap #}\n to_unix_timestamp({{ spark_utils.assert_not_null('to_timestamp', from_date_or_timestamp) }})\n - to_unix_timestamp({{ spark_utils.assert_not_null('date', from_date_or_timestamp) }})\n {%- endset -%}\n\n {%- if datepart in ['day', 'week'] -%}\n \n {%- set multiplier = 7 if datepart == 'week' else 1 -%}\n\n to_timestamp(\n to_unix_timestamp(\n date_add(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ['month', 'quarter', 'year'] -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'month' -%} 1\n {%- elif datepart == 'quarter' -%} 3\n {%- elif datepart == 'year' -%} 12\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n to_unix_timestamp(\n add_months(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n {{ spark_utils.assert_not_null('to_unix_timestamp', from_date_or_timestamp) }}\n + cast({{interval}} * {{multiplier}} as int)\n )\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro dateadd not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5360792, "supported_languages": null}, "macro.spark_utils.spark__datediff": {"name": "spark__datediff", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datediff.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datediff.sql", "unique_id": "macro.spark_utils.spark__datediff", "macro_sql": "{% macro spark__datediff(first_date, second_date, datepart) %}\n\n {%- if datepart in ['day', 'week', 'month', 'quarter', 'year'] -%}\n \n {# make sure the dates are real, otherwise raise an error asap #}\n {% set first_date = spark_utils.assert_not_null('date', first_date) %}\n {% set second_date = spark_utils.assert_not_null('date', second_date) %}\n \n {%- endif -%}\n \n {%- if datepart == 'day' -%}\n \n datediff({{second_date}}, {{first_date}})\n \n {%- elif datepart == 'week' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(datediff({{second_date}}, {{first_date}})/7)\n else ceil(datediff({{second_date}}, {{first_date}})/7)\n end\n \n -- did we cross a week boundary (Sunday)?\n + case\n when {{first_date}} < {{second_date}} and dayofweek({{second_date}}) < dayofweek({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofweek({{second_date}}) > dayofweek({{first_date}}) then -1\n else 0 end\n\n {%- elif datepart == 'month' -%}\n\n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}})))\n else ceil(months_between(date({{second_date}}), date({{first_date}})))\n end\n \n -- did we cross a month boundary?\n + case\n when {{first_date}} < {{second_date}} and dayofmonth({{second_date}}) < dayofmonth({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofmonth({{second_date}}) > dayofmonth({{first_date}}) then -1\n else 0 end\n \n {%- elif datepart == 'quarter' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}}))/3)\n else ceil(months_between(date({{second_date}}), date({{first_date}}))/3)\n end\n \n -- did we cross a quarter boundary?\n + case\n when {{first_date}} < {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n < (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then 1\n when {{first_date}} > {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n > (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then -1\n else 0 end\n\n {%- elif datepart == 'year' -%}\n \n year({{second_date}}) - year({{first_date}})\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set divisor -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n case when {{first_date}} < {{second_date}}\n then ceil((\n {# make sure the timestamps are real, otherwise raise an error asap #}\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n else floor((\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n end\n \n {% if datepart == 'millisecond' %}\n + cast(date_format({{second_date}}, 'SSS') as int)\n - cast(date_format({{first_date}}, 'SSS') as int)\n {% endif %}\n \n {% if datepart == 'microsecond' %} \n {% set capture_str = '[0-9]{4}-[0-9]{2}-[0-9]{2}.[0-9]{2}:[0-9]{2}:[0-9]{2}.([0-9]{6})' %}\n -- Spark doesn't really support microseconds, so this is a massive hack!\n -- It will only work if the timestamp-string is of the format\n -- 'yyyy-MM-dd-HH mm.ss.SSSSSS'\n + cast(regexp_extract({{second_date}}, '{{capture_str}}', 1) as int)\n - cast(regexp_extract({{first_date}}, '{{capture_str}}', 1) as int) \n {% endif %}\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro datediff not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.540579, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp": {"name": "spark__current_timestamp", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp", "macro_sql": "{% macro spark__current_timestamp() %}\n current_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.540661, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp_in_utc": {"name": "spark__current_timestamp_in_utc", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp_in_utc", "macro_sql": "{% macro spark__current_timestamp_in_utc() %}\n unix_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5407062, "supported_languages": null}, "macro.spark_utils.spark__split_part": {"name": "spark__split_part", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/split_part.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/split_part.sql", "unique_id": "macro.spark_utils.spark__split_part", "macro_sql": "{% macro spark__split_part(string_text, delimiter_text, part_number) %}\n\n {% set delimiter_expr %}\n \n -- escape if starts with a special character\n case when regexp_extract({{ delimiter_text }}, '([^A-Za-z0-9])(.*)', 1) != '_'\n then concat('\\\\', {{ delimiter_text }})\n else {{ delimiter_text }} end\n \n {% endset %}\n\n {% set split_part_expr %}\n \n split(\n {{ string_text }},\n {{ delimiter_expr }}\n )[({{ part_number - 1 }})]\n \n {% endset %}\n \n {{ return(split_part_expr) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5410602, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_pattern": {"name": "spark__get_relations_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_pattern", "macro_sql": "{% macro spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n show table extended in {{ schema_pattern }} like '{{ table_pattern }}'\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=None,\n schema=row[0],\n identifier=row[1],\n type=('view' if 'Type: VIEW' in row[3] else 'table')\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.542069, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_prefix": {"name": "spark__get_relations_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_prefix", "macro_sql": "{% macro spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {% set table_pattern = table_pattern ~ '*' %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5422661, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_pattern": {"name": "spark__get_tables_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_pattern", "macro_sql": "{% macro spark__get_tables_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.542422, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_prefix": {"name": "spark__get_tables_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_prefix", "macro_sql": "{% macro spark__get_tables_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.542573, "supported_languages": null}, "macro.spark_utils.assert_not_null": {"name": "assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.assert_not_null", "macro_sql": "{% macro assert_not_null(function, arg) -%}\n {{ return(adapter.dispatch('assert_not_null', 'spark_utils')(function, arg)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.spark_utils.default__assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.542763, "supported_languages": null}, "macro.spark_utils.default__assert_not_null": {"name": "default__assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.default__assert_not_null", "macro_sql": "{% macro default__assert_not_null(function, arg) %}\n\n coalesce({{function}}({{arg}}), nvl2({{function}}({{arg}}), assert_true({{function}}({{arg}}) is not null), null))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5428748, "supported_languages": null}, "macro.spark_utils.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/snowplow/convert_timezone.sql", "original_file_path": "macros/snowplow/convert_timezone.sql", "unique_id": "macro.spark_utils.spark__convert_timezone", "macro_sql": "{% macro spark__convert_timezone(in_tz, out_tz, in_timestamp) %}\n from_utc_timestamp(to_utc_timestamp({{in_timestamp}}, {{in_tz}}), {{out_tz}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5429919, "supported_languages": null}, "macro.dbt_date.get_date_dimension": {"name": "get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.get_date_dimension", "macro_sql": "{% macro get_date_dimension(start_date, end_date) %}\n {{ adapter.dispatch('get_date_dimension', 'dbt_date') (start_date, end_date) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_date_dimension"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.548885, "supported_languages": null}, "macro.dbt_date.default__get_date_dimension": {"name": "default__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.default__get_date_dimension", "macro_sql": "{% macro default__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=false) }} as day_of_week,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week_iso,\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n cast({{ last_day('d.date_day', 'quarter') }} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.550207, "supported_languages": null}, "macro.dbt_date.postgres__get_date_dimension": {"name": "postgres__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.postgres__get_date_dimension", "macro_sql": "{% macro postgres__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week,\n\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n {# last_day does not support quarter because postgresql does not support quarter interval. #}\n cast({{dbt.dateadd('day', '-1', dbt.dateadd('month', '3', dbt.date_trunc('quarter', 'd.date_day')))}} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.551598, "supported_languages": null}, "macro.dbt_date.get_base_dates": {"name": "get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.get_base_dates", "macro_sql": "{% macro get_base_dates(start_date=None, end_date=None, n_dateparts=None, datepart=\"day\") %}\n {{ adapter.dispatch('get_base_dates', 'dbt_date') (start_date, end_date, n_dateparts, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.bigquery__get_base_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.552696, "supported_languages": null}, "macro.dbt_date.default__get_base_dates": {"name": "default__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.default__get_base_dates", "macro_sql": "{% macro default__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.553129, "supported_languages": null}, "macro.dbt_date.bigquery__get_base_dates": {"name": "bigquery__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.bigquery__get_base_dates", "macro_sql": "{% macro bigquery__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as datetime )\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as datetime )\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.553531, "supported_languages": null}, "macro.dbt_date.trino__get_base_dates": {"name": "trino__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.trino__get_base_dates", "macro_sql": "{% macro trino__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.now()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.now", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5539908, "supported_languages": null}, "macro.dbt_date.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_date')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5544362, "supported_languages": null}, "macro.dbt_date.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.554788, "supported_languages": null}, "macro.dbt_date.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_date')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.554927, "supported_languages": null}, "macro.dbt_date.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{\n dbt_date.generate_series(\n dbt_date.get_intervals_between(start_date, end_date, datepart)\n )\n }}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"(row_number() over (order by 1) - 1)\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.generate_series", "macro.dbt_date.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.555161, "supported_languages": null}, "macro.dbt_date.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.555723, "supported_languages": null}, "macro.dbt_date.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5559602, "supported_languages": null}, "macro.dbt_date.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5560691, "supported_languages": null}, "macro.dbt_date.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_date.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.55637, "supported_languages": null}, "macro.dbt_date.date": {"name": "date", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.date", "macro_sql": "{% macro date(year, month, day) %}\n {{ return(modules.datetime.date(year, month, day)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5565681, "supported_languages": null}, "macro.dbt_date.datetime": {"name": "datetime", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.datetime", "macro_sql": "{% macro datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tz=None) %}\n {% set tz = tz if tz else var(\"dbt_date:time_zone\") %}\n {{ return(\n modules.datetime.datetime(\n year=year, month=month, day=day, hour=hour,\n minute=minute, second=second, microsecond=microsecond,\n tzinfo=modules.pytz.timezone(tz)\n )\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.55689, "supported_languages": null}, "macro.dbt_date.get_fiscal_year_dates": {"name": "get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.get_fiscal_year_dates", "macro_sql": "{% macro get_fiscal_year_dates(dates, year_end_month=12, week_start_day=1, shift_year=1) %}\n{{ adapter.dispatch('get_fiscal_year_dates', 'dbt_date') (dates, year_end_month, week_start_day, shift_year) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_fiscal_year_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.558047, "supported_languages": null}, "macro.dbt_date.default__get_fiscal_year_dates": {"name": "default__get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.default__get_fiscal_year_dates", "macro_sql": "{% macro default__get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) %}\n-- this gets all the dates within a fiscal year\n-- determined by the given year-end-month\n-- ending on the saturday closest to that month's end date\nwith fsc_date_dimension as (\n select * from {{ dates }}\n),\nyear_month_end as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.month_end_date\n from\n fsc_date_dimension d\n where\n d.month_of_year = {{ year_end_month }}\n group by 1,2\n\n),\nweeks as (\n\n select\n d.year_number,\n d.month_of_year,\n d.date_day as week_start_date,\n cast({{ dbt.dateadd('day', 6, 'd.date_day') }} as date) as week_end_date\n from\n fsc_date_dimension d\n where\n d.day_of_week = {{ week_start_day }}\n\n),\n-- get all the weeks that start in the month the year ends\nyear_week_ends as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.week_end_date\n from\n weeks d\n where\n d.month_of_year = {{ year_end_month }}\n group by\n 1,2\n\n),\n-- then calculate which Saturday is closest to month end\nweeks_at_month_end as (\n\n select\n d.fiscal_year_number,\n d.week_end_date,\n m.month_end_date,\n rank() over\n (partition by d.fiscal_year_number\n order by\n abs({{ dbt.datediff('d.week_end_date', 'm.month_end_date', 'day') }})\n\n ) as closest_to_month_end\n from\n year_week_ends d\n join\n year_month_end m on d.fiscal_year_number = m.fiscal_year_number\n),\nfiscal_year_range as (\n\n select\n w.fiscal_year_number,\n cast(\n {{ dbt.dateadd('day', 1,\n 'lag(w.week_end_date) over(order by w.week_end_date)') }}\n as date) as fiscal_year_start_date,\n w.week_end_date as fiscal_year_end_date\n from\n weeks_at_month_end w\n where\n w.closest_to_month_end = 1\n\n),\nfiscal_year_dates as (\n\n select\n d.date_day,\n m.fiscal_year_number,\n m.fiscal_year_start_date,\n m.fiscal_year_end_date,\n w.week_start_date,\n w.week_end_date,\n -- we reset the weeks of the year starting with the merch year start date\n dense_rank()\n over(\n partition by m.fiscal_year_number\n order by w.week_start_date\n ) as fiscal_week_of_year\n from\n fsc_date_dimension d\n join\n fiscal_year_range m on d.date_day between m.fiscal_year_start_date and m.fiscal_year_end_date\n join\n weeks w on d.date_day between w.week_start_date and w.week_end_date\n\n)\nselect * from fiscal_year_dates order by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.558413, "supported_languages": null}, "macro.dbt_date.get_fiscal_periods": {"name": "get_fiscal_periods", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_periods.sql", "original_file_path": "macros/fiscal_date/get_fiscal_periods.sql", "unique_id": "macro.dbt_date.get_fiscal_periods", "macro_sql": "{% macro get_fiscal_periods(dates, year_end_month, week_start_day, shift_year=1) %}\n{#\nThis macro requires you to pass in a ref to a date dimension, created via\ndbt_date.get_date_dimension()s\n#}\nwith fscl_year_dates_for_periods as (\n {{ dbt_date.get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) }}\n),\nfscl_year_w13 as (\n\n select\n f.*,\n -- We count the weeks in a 13 week period\n -- and separate the 4-5-4 week sequences\n mod(cast(\n (f.fiscal_week_of_year-1) as {{ dbt.type_int() }}\n ), 13) as w13_number,\n -- Chop weeks into 13 week merch quarters\n cast(\n least(\n floor((f.fiscal_week_of_year-1)/13.0)\n , 3)\n as {{ dbt.type_int() }}) as quarter_number\n from\n fscl_year_dates_for_periods f\n\n),\nfscl_periods as (\n\n select\n f.date_day,\n f.fiscal_year_number,\n f.week_start_date,\n f.week_end_date,\n f.fiscal_week_of_year,\n case\n -- we move week 53 into the 3rd period of the quarter\n when f.fiscal_week_of_year = 53 then 3\n when f.w13_number between 0 and 3 then 1\n when f.w13_number between 4 and 8 then 2\n when f.w13_number between 9 and 12 then 3\n end as period_of_quarter,\n f.quarter_number\n from\n fscl_year_w13 f\n\n),\nfscl_periods_quarters as (\n\n select\n f.*,\n cast((\n (f.quarter_number * 3) + f.period_of_quarter\n ) as {{ dbt.type_int() }}) as fiscal_period_number\n from\n fscl_periods f\n\n)\nselect\n date_day,\n fiscal_year_number,\n week_start_date,\n week_end_date,\n fiscal_week_of_year,\n dense_rank() over(partition by fiscal_period_number order by fiscal_week_of_year) as fiscal_week_of_period,\n fiscal_period_number,\n quarter_number+1 as fiscal_quarter_number,\n period_of_quarter as fiscal_period_of_quarter\nfrom\n fscl_periods_quarters\norder by 1,2\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_fiscal_year_dates", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5590029, "supported_languages": null}, "macro.dbt_date.tomorrow": {"name": "tomorrow", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/tomorrow.sql", "original_file_path": "macros/calendar_date/tomorrow.sql", "unique_id": "macro.dbt_date.tomorrow", "macro_sql": "{%- macro tomorrow(date=None, tz=None) -%}\n{{ dbt_date.n_days_away(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.559128, "supported_languages": null}, "macro.dbt_date.next_week": {"name": "next_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_week.sql", "original_file_path": "macros/calendar_date/next_week.sql", "unique_id": "macro.dbt_date.next_week", "macro_sql": "{%- macro next_week(tz=None) -%}\n{{ dbt_date.n_weeks_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5592299, "supported_languages": null}, "macro.dbt_date.next_month_name": {"name": "next_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_name.sql", "original_file_path": "macros/calendar_date/next_month_name.sql", "unique_id": "macro.dbt_date.next_month_name", "macro_sql": "{%- macro next_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.next_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.559364, "supported_languages": null}, "macro.dbt_date.next_month": {"name": "next_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month.sql", "original_file_path": "macros/calendar_date/next_month.sql", "unique_id": "macro.dbt_date.next_month", "macro_sql": "{%- macro next_month(tz=None) -%}\n{{ dbt_date.n_months_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5594702, "supported_languages": null}, "macro.dbt_date.day_name": {"name": "day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.day_name", "macro_sql": "{%- macro day_name(date, short=True) -%}\n {{ adapter.dispatch('day_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.bigquery__day_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.559968, "supported_languages": null}, "macro.dbt_date.default__day_name": {"name": "default__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.default__day_name", "macro_sql": "\n\n{%- macro default__day_name(date, short) -%}\n{%- set f = 'Dy' if short else 'Day' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.560082, "supported_languages": null}, "macro.dbt_date.snowflake__day_name": {"name": "snowflake__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.snowflake__day_name", "macro_sql": "\n\n{%- macro snowflake__day_name(date, short) -%}\n {%- if short -%}\n dayname({{ date }})\n {%- else -%}\n -- long version not implemented on Snowflake so we're doing it manually :/\n case dayname({{ date }})\n when 'Mon' then 'Monday'\n when 'Tue' then 'Tuesday'\n when 'Wed' then 'Wednesday'\n when 'Thu' then 'Thursday'\n when 'Fri' then 'Friday'\n when 'Sat' then 'Saturday'\n when 'Sun' then 'Sunday'\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5602062, "supported_languages": null}, "macro.dbt_date.bigquery__day_name": {"name": "bigquery__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.bigquery__day_name", "macro_sql": "\n\n{%- macro bigquery__day_name(date, short) -%}\n{%- set f = '%a' if short else '%A' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.56033, "supported_languages": null}, "macro.dbt_date.postgres__day_name": {"name": "postgres__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.postgres__day_name", "macro_sql": "\n\n{%- macro postgres__day_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMDy' if short else 'FMDay' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.560517, "supported_languages": null}, "macro.dbt_date.duckdb__day_name": {"name": "duckdb__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.duckdb__day_name", "macro_sql": "\n\n{%- macro duckdb__day_name(date, short) -%}\n {%- if short -%}\n substr(dayname({{ date }}), 1, 3)\n {%- else -%}\n dayname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5606341, "supported_languages": null}, "macro.dbt_date.spark__day_name": {"name": "spark__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.spark__day_name", "macro_sql": "\n\n{%- macro spark__day_name(date, short) -%}\n{%- set f = 'E' if short else 'EEEE' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.560758, "supported_languages": null}, "macro.dbt_date.trino__day_name": {"name": "trino__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.trino__day_name", "macro_sql": "\n\n{%- macro trino__day_name(date, short) -%}\n{%- set f = 'a' if short else 'W' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.560877, "supported_languages": null}, "macro.dbt_date.to_unixtimestamp": {"name": "to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.to_unixtimestamp", "macro_sql": "{%- macro to_unixtimestamp(timestamp) -%}\n {{ adapter.dispatch('to_unixtimestamp', 'dbt_date') (timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.bigquery__to_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.561141, "supported_languages": null}, "macro.dbt_date.default__to_unixtimestamp": {"name": "default__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__to_unixtimestamp", "macro_sql": "\n\n{%- macro default__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.561225, "supported_languages": null}, "macro.dbt_date.snowflake__to_unixtimestamp": {"name": "snowflake__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__to_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch_seconds', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.56131, "supported_languages": null}, "macro.dbt_date.bigquery__to_unixtimestamp": {"name": "bigquery__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__to_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__to_unixtimestamp(timestamp) -%}\n unix_seconds({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.561368, "supported_languages": null}, "macro.dbt_date.spark__to_unixtimestamp": {"name": "spark__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.spark__to_unixtimestamp", "macro_sql": "\n\n{%- macro spark__to_unixtimestamp(timestamp) -%}\n unix_timestamp({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5614269, "supported_languages": null}, "macro.dbt_date.trino__to_unixtimestamp": {"name": "trino__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__to_unixtimestamp", "macro_sql": "\n\n{%- macro trino__to_unixtimestamp(timestamp) -%}\n to_unixtime({{ timestamp }} AT TIME ZONE 'UTC')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.561483, "supported_languages": null}, "macro.dbt_date.n_days_away": {"name": "n_days_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_away.sql", "original_file_path": "macros/calendar_date/n_days_away.sql", "unique_id": "macro.dbt_date.n_days_away", "macro_sql": "{%- macro n_days_away(n, date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(-1 * n, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.561621, "supported_languages": null}, "macro.dbt_date.week_start": {"name": "week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.week_start", "macro_sql": "{%- macro week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5619142, "supported_languages": null}, "macro.dbt_date.default__week_start": {"name": "default__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.default__week_start", "macro_sql": "{%- macro default__week_start(date) -%}\ncast({{ dbt.date_trunc('week', date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.562002, "supported_languages": null}, "macro.dbt_date.snowflake__week_start": {"name": "snowflake__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.snowflake__week_start", "macro_sql": "\n\n{%- macro snowflake__week_start(date) -%}\n {#\n Get the day of week offset: e.g. if the date is a Sunday,\n dbt_date.day_of_week returns 1, so we subtract 1 to get a 0 offset\n #}\n {% set off_set = dbt_date.day_of_week(date, isoweek=False) ~ \" - 1\" %}\n cast({{ dbt.dateadd(\"day\", \"-1 * (\" ~ off_set ~ \")\", date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.day_of_week", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5621681, "supported_languages": null}, "macro.dbt_date.postgres__week_start": {"name": "postgres__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.postgres__week_start", "macro_sql": "\n\n{%- macro postgres__week_start(date) -%}\n-- Sunday as week start date\ncast({{ dbt.dateadd('day', -1, dbt.date_trunc('week', dbt.dateadd('day', 1, date))) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.562326, "supported_languages": null}, "macro.dbt_date.duckdb__week_start": {"name": "duckdb__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.duckdb__week_start", "macro_sql": "\n\n{%- macro duckdb__week_start(date) -%}\n{{ return(dbt_date.postgres__week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5624168, "supported_languages": null}, "macro.dbt_date.iso_week_start": {"name": "iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.iso_week_start", "macro_sql": "{%- macro iso_week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.562837, "supported_languages": null}, "macro.dbt_date._iso_week_start": {"name": "_iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date._iso_week_start", "macro_sql": "{%- macro _iso_week_start(date, week_type) -%}\ncast({{ dbt.date_trunc(week_type, date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.562936, "supported_languages": null}, "macro.dbt_date.default__iso_week_start": {"name": "default__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.default__iso_week_start", "macro_sql": "\n\n{%- macro default__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.56302, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_start": {"name": "snowflake__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_start", "macro_sql": "\n\n{%- macro snowflake__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5631082, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_start": {"name": "postgres__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.postgres__iso_week_start", "macro_sql": "\n\n{%- macro postgres__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.563193, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_start": {"name": "duckdb__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_start", "macro_sql": "\n\n{%- macro duckdb__iso_week_start(date) -%}\n{{ return(dbt_date.postgres__iso_week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5632808, "supported_languages": null}, "macro.dbt_date.spark__iso_week_start": {"name": "spark__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.spark__iso_week_start", "macro_sql": "\n\n{%- macro spark__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.563363, "supported_languages": null}, "macro.dbt_date.trino__iso_week_start": {"name": "trino__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.trino__iso_week_start", "macro_sql": "\n\n{%- macro trino__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5634592, "supported_languages": null}, "macro.dbt_date.n_days_ago": {"name": "n_days_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_ago.sql", "original_file_path": "macros/calendar_date/n_days_ago.sql", "unique_id": "macro.dbt_date.n_days_ago", "macro_sql": "{%- macro n_days_ago(n, date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{%- set n = n|int -%}\ncast({{ dbt.dateadd('day', -1 * n, dt) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.563702, "supported_languages": null}, "macro.dbt_date.last_week": {"name": "last_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_week.sql", "original_file_path": "macros/calendar_date/last_week.sql", "unique_id": "macro.dbt_date.last_week", "macro_sql": "{%- macro last_week(tz=None) -%}\n{{ dbt_date.n_weeks_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.563812, "supported_languages": null}, "macro.dbt_date.now": {"name": "now", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/now.sql", "original_file_path": "macros/calendar_date/now.sql", "unique_id": "macro.dbt_date.now", "macro_sql": "{%- macro now(tz=None) -%}\n{{ dbt_date.convert_timezone(dbt.current_timestamp(), tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.convert_timezone", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.563932, "supported_languages": null}, "macro.dbt_date.periods_since": {"name": "periods_since", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/periods_since.sql", "original_file_path": "macros/calendar_date/periods_since.sql", "unique_id": "macro.dbt_date.periods_since", "macro_sql": "{%- macro periods_since(date_col, period_name='day', tz=None) -%}\n{{ dbt.datediff(date_col, dbt_date.now(tz), period_name) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.564081, "supported_languages": null}, "macro.dbt_date.today": {"name": "today", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/today.sql", "original_file_path": "macros/calendar_date/today.sql", "unique_id": "macro.dbt_date.today", "macro_sql": "{%- macro today(tz=None) -%}\ncast({{ dbt_date.now(tz) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5641868, "supported_languages": null}, "macro.dbt_date.last_month": {"name": "last_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month.sql", "original_file_path": "macros/calendar_date/last_month.sql", "unique_id": "macro.dbt_date.last_month", "macro_sql": "{%- macro last_month(tz=None) -%}\n{{ dbt_date.n_months_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.564354, "supported_languages": null}, "macro.dbt_date.day_of_year": {"name": "day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.day_of_year", "macro_sql": "{%- macro day_of_year(date) -%}\n{{ adapter.dispatch('day_of_year', 'dbt_date') (date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__day_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.564609, "supported_languages": null}, "macro.dbt_date.default__day_of_year": {"name": "default__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.default__day_of_year", "macro_sql": "\n\n{%- macro default__day_of_year(date) -%}\n {{ dbt_date.date_part('dayofyear', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5646908, "supported_languages": null}, "macro.dbt_date.postgres__day_of_year": {"name": "postgres__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.postgres__day_of_year", "macro_sql": "\n\n{%- macro postgres__day_of_year(date) -%}\n {{ dbt_date.date_part('doy', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5647712, "supported_languages": null}, "macro.dbt_date.redshift__day_of_year": {"name": "redshift__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.redshift__day_of_year", "macro_sql": "\n\n{%- macro redshift__day_of_year(date) -%}\n cast({{ dbt_date.date_part('dayofyear', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5648792, "supported_languages": null}, "macro.dbt_date.spark__day_of_year": {"name": "spark__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.spark__day_of_year", "macro_sql": "\n\n{%- macro spark__day_of_year(date) -%}\n dayofyear({{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5649412, "supported_languages": null}, "macro.dbt_date.trino__day_of_year": {"name": "trino__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.trino__day_of_year", "macro_sql": "\n\n{%- macro trino__day_of_year(date) -%}\n {{ dbt_date.date_part('day_of_year', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.565021, "supported_languages": null}, "macro.dbt_date.round_timestamp": {"name": "round_timestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/round_timestamp.sql", "original_file_path": "macros/calendar_date/round_timestamp.sql", "unique_id": "macro.dbt_date.round_timestamp", "macro_sql": "{% macro round_timestamp(timestamp) %}\n {{ dbt.date_trunc(\"day\", dbt.dateadd(\"hour\", 12, timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.565161, "supported_languages": null}, "macro.dbt_date.from_unixtimestamp": {"name": "from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.from_unixtimestamp", "macro_sql": "{%- macro from_unixtimestamp(epochs, format=\"seconds\") -%}\n {{ adapter.dispatch('from_unixtimestamp', 'dbt_date') (epochs, format) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.bigquery__from_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.566908, "supported_languages": null}, "macro.dbt_date.default__from_unixtimestamp": {"name": "default__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__from_unixtimestamp", "macro_sql": "\n\n{%- macro default__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp({{ epochs }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.567064, "supported_languages": null}, "macro.dbt_date.postgres__from_unixtimestamp": {"name": "postgres__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.postgres__from_unixtimestamp", "macro_sql": "\n\n{%- macro postgres__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5672178, "supported_languages": null}, "macro.dbt_date.snowflake__from_unixtimestamp": {"name": "snowflake__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__from_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n {%- set scale = 0 -%}\n {%- elif format == \"milliseconds\" -%}\n {%- set scale = 3 -%}\n {%- elif format == \"microseconds\" -%}\n {%- set scale = 6 -%}\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp_ntz({{ epochs }}, {{ scale }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5674949, "supported_languages": null}, "macro.dbt_date.bigquery__from_unixtimestamp": {"name": "bigquery__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__from_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n timestamp_seconds({{ epochs }})\n {%- elif format == \"milliseconds\" -%}\n timestamp_millis({{ epochs }})\n {%- elif format == \"microseconds\" -%}\n timestamp_micros({{ epochs }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.567728, "supported_languages": null}, "macro.dbt_date.trino__from_unixtimestamp": {"name": "trino__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__from_unixtimestamp", "macro_sql": "\n\n{%- macro trino__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n cast(from_unixtime({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"milliseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 6)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"microseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 3)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"nanoseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.568081, "supported_languages": null}, "macro.dbt_date.duckdb__from_unixtimestamp": {"name": "duckdb__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.duckdb__from_unixtimestamp", "macro_sql": "\n\n\n{%- macro duckdb__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5682418, "supported_languages": null}, "macro.dbt_date.n_months_ago": {"name": "n_months_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_ago.sql", "original_file_path": "macros/calendar_date/n_months_ago.sql", "unique_id": "macro.dbt_date.n_months_ago", "macro_sql": "{%- macro n_months_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5684779, "supported_languages": null}, "macro.dbt_date.date_part": {"name": "date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.date_part", "macro_sql": "{% macro date_part(datepart, date) -%}\n {{ adapter.dispatch('date_part', 'dbt_date') (datepart, date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.bigquery__date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.568701, "supported_languages": null}, "macro.dbt_date.default__date_part": {"name": "default__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.default__date_part", "macro_sql": "{% macro default__date_part(datepart, date) -%}\n date_part('{{ datepart }}', {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.568786, "supported_languages": null}, "macro.dbt_date.bigquery__date_part": {"name": "bigquery__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.bigquery__date_part", "macro_sql": "{% macro bigquery__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.568867, "supported_languages": null}, "macro.dbt_date.trino__date_part": {"name": "trino__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.trino__date_part", "macro_sql": "{% macro trino__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.568946, "supported_languages": null}, "macro.dbt_date.n_weeks_away": {"name": "n_weeks_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_away.sql", "original_file_path": "macros/calendar_date/n_weeks_away.sql", "unique_id": "macro.dbt_date.n_weeks_away", "macro_sql": "{%- macro n_weeks_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.569174, "supported_languages": null}, "macro.dbt_date.day_of_month": {"name": "day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.day_of_month", "macro_sql": "{%- macro day_of_month(date) -%}\n{{ dbt_date.date_part('day', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5693069, "supported_languages": null}, "macro.dbt_date.redshift__day_of_month": {"name": "redshift__day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.redshift__day_of_month", "macro_sql": "\n\n{%- macro redshift__day_of_month(date) -%}\ncast({{ dbt_date.date_part('day', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5694132, "supported_languages": null}, "macro.dbt_date.yesterday": {"name": "yesterday", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/yesterday.sql", "original_file_path": "macros/calendar_date/yesterday.sql", "unique_id": "macro.dbt_date.yesterday", "macro_sql": "{%- macro yesterday(date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.569539, "supported_languages": null}, "macro.dbt_date.day_of_week": {"name": "day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.day_of_week", "macro_sql": "{%- macro day_of_week(date, isoweek=true) -%}\n{{ adapter.dispatch('day_of_week', 'dbt_date') (date, isoweek) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.bigquery__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.571266, "supported_languages": null}, "macro.dbt_date.default__day_of_week": {"name": "default__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.default__day_of_week", "macro_sql": "\n\n{%- macro default__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else {{ dow }}\n end\n {%- else -%}\n {{ dow }} + 1\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.571458, "supported_languages": null}, "macro.dbt_date.snowflake__day_of_week": {"name": "snowflake__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.snowflake__day_of_week", "macro_sql": "\n\n{%- macro snowflake__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'dayofweekiso' -%}\n {{ dbt_date.date_part(dow_part, date) }}\n {%- else -%}\n {%- set dow_part = 'dayofweek' -%}\n case\n when {{ dbt_date.date_part(dow_part, date) }} = 7 then 1\n else {{ dbt_date.date_part(dow_part, date) }} + 1\n end\n {%- endif -%}\n\n\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5719428, "supported_languages": null}, "macro.dbt_date.bigquery__day_of_week": {"name": "bigquery__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.bigquery__day_of_week", "macro_sql": "\n\n{%- macro bigquery__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (1) to Monday (2)\n when {{ dow }} = 1 then 7\n else {{ dow }} - 1\n end\n {%- else -%}\n {{ dow }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5721228, "supported_languages": null}, "macro.dbt_date.postgres__day_of_week": {"name": "postgres__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.postgres__day_of_week", "macro_sql": "\n\n\n{%- macro postgres__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'isodow' -%}\n -- Monday(1) to Sunday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} as {{ dbt.type_int() }})\n {%- else -%}\n {%- set dow_part = 'dow' -%}\n -- Sunday(1) to Saturday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} + 1 as {{ dbt.type_int() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.572414, "supported_languages": null}, "macro.dbt_date.redshift__day_of_week": {"name": "redshift__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.redshift__day_of_week", "macro_sql": "\n\n\n{%- macro redshift__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else cast({{ dow }} as {{ dbt.type_bigint() }})\n end\n {%- else -%}\n cast({{ dow }} + 1 as {{ dbt.type_bigint() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5726311, "supported_languages": null}, "macro.dbt_date.duckdb__day_of_week": {"name": "duckdb__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.duckdb__day_of_week", "macro_sql": "\n\n{%- macro duckdb__day_of_week(date, isoweek) -%}\n{{ return(dbt_date.postgres__day_of_week(date, isoweek)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.572733, "supported_languages": null}, "macro.dbt_date.spark__day_of_week": {"name": "spark__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.spark__day_of_week", "macro_sql": "\n\n\n{%- macro spark__day_of_week(date, isoweek) -%}\n\n {%- set dow = \"dayofweek_iso\" if isoweek else \"dayofweek\" -%}\n\n {{ dbt_date.date_part(dow, date) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5728588, "supported_languages": null}, "macro.dbt_date.trino__day_of_week": {"name": "trino__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.trino__day_of_week", "macro_sql": "\n\n\n{%- macro trino__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('day_of_week', date) -%}\n\n {%- if isoweek -%}\n {{ dow }}\n {%- else -%}\n case\n when {{ dow }} = 7 then 1\n else {{ dow }} + 1\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.573025, "supported_languages": null}, "macro.dbt_date.iso_week_end": {"name": "iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.iso_week_end", "macro_sql": "{%- macro iso_week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.573298, "supported_languages": null}, "macro.dbt_date._iso_week_end": {"name": "_iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date._iso_week_end", "macro_sql": "{%- macro _iso_week_end(date, week_type) -%}\n{%- set dt = dbt_date.iso_week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.iso_week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5734231, "supported_languages": null}, "macro.dbt_date.default__iso_week_end": {"name": "default__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.default__iso_week_end", "macro_sql": "\n\n{%- macro default__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.573505, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_end": {"name": "snowflake__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_end", "macro_sql": "\n\n{%- macro snowflake__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.573586, "supported_languages": null}, "macro.dbt_date.n_weeks_ago": {"name": "n_weeks_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_ago.sql", "original_file_path": "macros/calendar_date/n_weeks_ago.sql", "unique_id": "macro.dbt_date.n_weeks_ago", "macro_sql": "{%- macro n_weeks_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.573813, "supported_languages": null}, "macro.dbt_date.month_name": {"name": "month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.month_name", "macro_sql": "{%- macro month_name(date, short=True) -%}\n {{ adapter.dispatch('month_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.bigquery__month_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5742369, "supported_languages": null}, "macro.dbt_date.default__month_name": {"name": "default__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.default__month_name", "macro_sql": "\n\n{%- macro default__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MONTH' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.574356, "supported_languages": null}, "macro.dbt_date.bigquery__month_name": {"name": "bigquery__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.bigquery__month_name", "macro_sql": "\n\n{%- macro bigquery__month_name(date, short) -%}\n{%- set f = '%b' if short else '%B' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.574476, "supported_languages": null}, "macro.dbt_date.snowflake__month_name": {"name": "snowflake__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.snowflake__month_name", "macro_sql": "\n\n{%- macro snowflake__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MMMM' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.574601, "supported_languages": null}, "macro.dbt_date.postgres__month_name": {"name": "postgres__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.postgres__month_name", "macro_sql": "\n\n{%- macro postgres__month_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMMon' if short else 'FMMonth' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5747352, "supported_languages": null}, "macro.dbt_date.duckdb__month_name": {"name": "duckdb__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.duckdb__month_name", "macro_sql": "\n\n\n{%- macro duckdb__month_name(date, short) -%}\n {%- if short -%}\n substr(monthname({{ date }}), 1, 3)\n {%- else -%}\n monthname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5748532, "supported_languages": null}, "macro.dbt_date.spark__month_name": {"name": "spark__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.spark__month_name", "macro_sql": "\n\n{%- macro spark__month_name(date, short) -%}\n{%- set f = 'MMM' if short else 'MMMM' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.574977, "supported_languages": null}, "macro.dbt_date.trino__month_name": {"name": "trino__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.trino__month_name", "macro_sql": "\n\n{%- macro trino__month_name(date, short) -%}\n{%- set f = 'b' if short else 'M' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.575095, "supported_languages": null}, "macro.dbt_date.last_month_name": {"name": "last_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_name.sql", "original_file_path": "macros/calendar_date/last_month_name.sql", "unique_id": "macro.dbt_date.last_month_name", "macro_sql": "{%- macro last_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.last_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.575237, "supported_languages": null}, "macro.dbt_date.week_of_year": {"name": "week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.week_of_year", "macro_sql": "{%- macro week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5755038, "supported_languages": null}, "macro.dbt_date.default__week_of_year": {"name": "default__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.default__week_of_year", "macro_sql": "{%- macro default__week_of_year(date) -%}\ncast({{ dbt_date.date_part('week', date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.575614, "supported_languages": null}, "macro.dbt_date.postgres__week_of_year": {"name": "postgres__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.postgres__week_of_year", "macro_sql": "\n\n{%- macro postgres__week_of_year(date) -%}\n{# postgresql 'week' returns isoweek. Use to_char instead.\n WW = the first week starts on the first day of the year #}\ncast(to_char({{ date }}, 'WW') as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.575705, "supported_languages": null}, "macro.dbt_date.duckdb__week_of_year": {"name": "duckdb__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__week_of_year", "macro_sql": "\n\n{%- macro duckdb__week_of_year(date) -%}\ncast(ceil(dayofyear({{ date }}) / 7) as int)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5757692, "supported_languages": null}, "macro.dbt_date.convert_timezone": {"name": "convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.convert_timezone", "macro_sql": "{%- macro convert_timezone(column, target_tz=None, source_tz=None) -%}\n{%- set source_tz = \"UTC\" if not source_tz else source_tz -%}\n{%- set target_tz = var(\"dbt_date:time_zone\") if not target_tz else target_tz -%}\n{{ adapter.dispatch('convert_timezone', 'dbt_date') (column, target_tz, source_tz) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.bigquery__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.576628, "supported_languages": null}, "macro.dbt_date.default__convert_timezone": {"name": "default__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.default__convert_timezone", "macro_sql": "{% macro default__convert_timezone(column, target_tz, source_tz) -%}\nconvert_timezone('{{ source_tz }}', '{{ target_tz }}',\n cast({{ column }} as {{ dbt.type_timestamp() }})\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5767648, "supported_languages": null}, "macro.dbt_date.bigquery__convert_timezone": {"name": "bigquery__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.bigquery__convert_timezone", "macro_sql": "{%- macro bigquery__convert_timezone(column, target_tz, source_tz=None) -%}\ntimestamp(datetime({{ column }}, '{{ target_tz}}'))\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.576865, "supported_languages": null}, "macro.dbt_date.postgres__convert_timezone": {"name": "postgres__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.postgres__convert_timezone", "macro_sql": "{% macro postgres__convert_timezone(column, target_tz, source_tz) -%}\ncast(\n cast({{ column }} as {{ dbt.type_timestamp() }})\n at time zone '{{ source_tz }}' at time zone '{{ target_tz }}' as {{ dbt.type_timestamp() }}\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.577066, "supported_languages": null}, "macro.dbt_date.redshift__convert_timezone": {"name": "redshift__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.redshift__convert_timezone", "macro_sql": "{%- macro redshift__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.default__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.default__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.577177, "supported_languages": null}, "macro.dbt_date.duckdb__convert_timezone": {"name": "duckdb__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.duckdb__convert_timezone", "macro_sql": "{% macro duckdb__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.postgres__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.577284, "supported_languages": null}, "macro.dbt_date.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.spark__convert_timezone", "macro_sql": "{%- macro spark__convert_timezone(column, target_tz, source_tz) -%}\nfrom_utc_timestamp(\n to_utc_timestamp({{ column }}, '{{ source_tz }}'),\n '{{ target_tz }}'\n )\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5773811, "supported_languages": null}, "macro.dbt_date.trino__convert_timezone": {"name": "trino__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.trino__convert_timezone", "macro_sql": "{%- macro trino__convert_timezone(column, target_tz, source_tz) -%}\n cast((at_timezone(with_timezone(cast({{ column }} as {{ dbt.type_timestamp() }}), '{{ source_tz }}'), '{{ target_tz }}')) as {{ dbt.type_timestamp() }})\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.577514, "supported_languages": null}, "macro.dbt_date.n_months_away": {"name": "n_months_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_away.sql", "original_file_path": "macros/calendar_date/n_months_away.sql", "unique_id": "macro.dbt_date.n_months_away", "macro_sql": "{%- macro n_months_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5777159, "supported_languages": null}, "macro.dbt_date.iso_week_of_year": {"name": "iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.iso_week_of_year", "macro_sql": "{%- macro iso_week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.578154, "supported_languages": null}, "macro.dbt_date._iso_week_of_year": {"name": "_iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date._iso_week_of_year", "macro_sql": "{%- macro _iso_week_of_year(date, week_type) -%}\ncast({{ dbt_date.date_part(week_type, date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.578271, "supported_languages": null}, "macro.dbt_date.default__iso_week_of_year": {"name": "default__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.default__iso_week_of_year", "macro_sql": "\n\n{%- macro default__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.578357, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_of_year": {"name": "snowflake__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_of_year", "macro_sql": "\n\n{%- macro snowflake__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.578441, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_of_year": {"name": "postgres__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.postgres__iso_week_of_year", "macro_sql": "\n\n{%- macro postgres__iso_week_of_year(date) -%}\n-- postgresql week is isoweek, the first week of a year containing January 4 of that year.\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.578532, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_of_year": {"name": "duckdb__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_of_year", "macro_sql": "\n\n{%- macro duckdb__iso_week_of_year(date) -%}\n{{ return(dbt_date.postgres__iso_week_of_year(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.578617, "supported_languages": null}, "macro.dbt_date.spark__iso_week_of_year": {"name": "spark__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.spark__iso_week_of_year", "macro_sql": "\n\n{%- macro spark__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5787, "supported_languages": null}, "macro.dbt_date.trino__iso_week_of_year": {"name": "trino__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.trino__iso_week_of_year", "macro_sql": "\n\n{%- macro trino__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.578783, "supported_languages": null}, "macro.dbt_date.week_end": {"name": "week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.week_end", "macro_sql": "{%- macro week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.579123, "supported_languages": null}, "macro.dbt_date.default__week_end": {"name": "default__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.default__week_end", "macro_sql": "{%- macro default__week_end(date) -%}\n{{ last_day(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.579206, "supported_languages": null}, "macro.dbt_date.snowflake__week_end": {"name": "snowflake__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.snowflake__week_end", "macro_sql": "\n\n{%- macro snowflake__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.579324, "supported_languages": null}, "macro.dbt_date.postgres__week_end": {"name": "postgres__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.postgres__week_end", "macro_sql": "\n\n{%- macro postgres__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.579436, "supported_languages": null}, "macro.dbt_date.duckdb__week_end": {"name": "duckdb__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.duckdb__week_end", "macro_sql": "\n\n{%- macro duckdb__week_end(date) -%}\n{{ return(dbt_date.postgres__week_end(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.579519, "supported_languages": null}, "macro.dbt_date.next_month_number": {"name": "next_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_number.sql", "original_file_path": "macros/calendar_date/next_month_number.sql", "unique_id": "macro.dbt_date.next_month_number", "macro_sql": "{%- macro next_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.next_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.579644, "supported_languages": null}, "macro.dbt_date.last_month_number": {"name": "last_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_number.sql", "original_file_path": "macros/calendar_date/last_month_number.sql", "unique_id": "macro.dbt_date.last_month_number", "macro_sql": "{%- macro last_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.last_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.579766, "supported_languages": null}, "macro.fivetran_utils.enabled_vars": {"name": "enabled_vars", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars.sql", "original_file_path": "macros/enabled_vars.sql", "unique_id": "macro.fivetran_utils.enabled_vars", "macro_sql": "{% macro enabled_vars(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, True) == False %}\n {{ return(False) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(True) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.580003, "supported_languages": null}, "macro.fivetran_utils.percentile": {"name": "percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.percentile", "macro_sql": "{% macro percentile(percentile_field, partition_field, percent) -%}\n\n{{ adapter.dispatch('percentile', 'fivetran_utils') (percentile_field, partition_field, percent) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.bigquery__percentile"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.58059, "supported_languages": null}, "macro.fivetran_utils.default__percentile": {"name": "default__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.default__percentile", "macro_sql": "{% macro default__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.580689, "supported_languages": null}, "macro.fivetran_utils.redshift__percentile": {"name": "redshift__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.redshift__percentile", "macro_sql": "{% macro redshift__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.580786, "supported_languages": null}, "macro.fivetran_utils.bigquery__percentile": {"name": "bigquery__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.bigquery__percentile", "macro_sql": "{% macro bigquery__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.580893, "supported_languages": null}, "macro.fivetran_utils.postgres__percentile": {"name": "postgres__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.postgres__percentile", "macro_sql": "{% macro postgres__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n /* have to group by partition field */\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.580977, "supported_languages": null}, "macro.fivetran_utils.spark__percentile": {"name": "spark__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.spark__percentile", "macro_sql": "{% macro spark__percentile(percentile_field, partition_field, percent) %}\n\n percentile( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.581066, "supported_languages": null}, "macro.fivetran_utils.pivot_json_extract": {"name": "pivot_json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/pivot_json_extract.sql", "original_file_path": "macros/pivot_json_extract.sql", "unique_id": "macro.fivetran_utils.pivot_json_extract", "macro_sql": "{% macro pivot_json_extract(string, list_of_properties) %}\n\n{%- for property in list_of_properties -%}\n{%- if property is mapping -%}\nreplace( {{ fivetran_utils.json_extract(string, property.name) }}, '\"', '') as {{ property.alias if property.alias else property.name | replace(' ', '_') | replace('.', '_') | lower }}\n\n{%- else -%}\nreplace( {{ fivetran_utils.json_extract(string, property) }}, '\"', '') as {{ property | replace(' ', '_') | lower }}\n\n{%- endif -%}\n{%- if not loop.last -%},{%- endif %}\n{% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.581631, "supported_languages": null}, "macro.fivetran_utils.persist_pass_through_columns": {"name": "persist_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/persist_pass_through_columns.sql", "original_file_path": "macros/persist_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.persist_pass_through_columns", "macro_sql": "{% macro persist_pass_through_columns(pass_through_variable, identifier=none, transform='') %}\n\n{% if var(pass_through_variable, none) %}\n {% for field in var(pass_through_variable) %}\n , {{ transform ~ '(' ~ (identifier ~ '.' if identifier else '') ~ (field.alias if field.alias else field.name) ~ ')' }} as {{ field.alias if field.alias else field.name }}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.582036, "supported_languages": null}, "macro.fivetran_utils.json_parse": {"name": "json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.json_parse", "macro_sql": "{% macro json_parse(string, string_path) -%}\n\n{{ adapter.dispatch('json_parse', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.bigquery__json_parse"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5829198, "supported_languages": null}, "macro.fivetran_utils.default__json_parse": {"name": "default__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.default__json_parse", "macro_sql": "{% macro default__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.583064, "supported_languages": null}, "macro.fivetran_utils.redshift__json_parse": {"name": "redshift__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.redshift__json_parse", "macro_sql": "{% macro redshift__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.583213, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_parse": {"name": "bigquery__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.bigquery__json_parse", "macro_sql": "{% macro bigquery__json_parse(string, string_path) %}\n\n \n json_extract_scalar({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.583353, "supported_languages": null}, "macro.fivetran_utils.postgres__json_parse": {"name": "postgres__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.postgres__json_parse", "macro_sql": "{% macro postgres__json_parse(string, string_path) %}\n\n {{string}}::json #>> '{ {%- for s in string_path -%}{{ s }}{%- if not loop.last -%},{%- endif -%}{%- endfor -%} }'\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5834932, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_parse": {"name": "snowflake__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.snowflake__json_parse", "macro_sql": "{% macro snowflake__json_parse(string, string_path) %}\n\n parse_json( {{string}} ) {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.583651, "supported_languages": null}, "macro.fivetran_utils.spark__json_parse": {"name": "spark__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.spark__json_parse", "macro_sql": "{% macro spark__json_parse(string, string_path) %}\n\n {{string}} : {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.583806, "supported_languages": null}, "macro.fivetran_utils.sqlserver__json_parse": {"name": "sqlserver__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.sqlserver__json_parse", "macro_sql": "{% macro sqlserver__json_parse(string, string_path) %}\n\n json_value({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.583947, "supported_languages": null}, "macro.fivetran_utils.max_bool": {"name": "max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.max_bool", "macro_sql": "{% macro max_bool(boolean_field) -%}\n\n{{ adapter.dispatch('max_bool', 'fivetran_utils') (boolean_field) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.bigquery__max_bool"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.584148, "supported_languages": null}, "macro.fivetran_utils.default__max_bool": {"name": "default__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.default__max_bool", "macro_sql": "{% macro default__max_bool(boolean_field) %}\n\n bool_or( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.584212, "supported_languages": null}, "macro.fivetran_utils.snowflake__max_bool": {"name": "snowflake__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.snowflake__max_bool", "macro_sql": "{% macro snowflake__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.584272, "supported_languages": null}, "macro.fivetran_utils.bigquery__max_bool": {"name": "bigquery__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.bigquery__max_bool", "macro_sql": "{% macro bigquery__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.584328, "supported_languages": null}, "macro.fivetran_utils.calculated_fields": {"name": "calculated_fields", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/calculated_fields.sql", "original_file_path": "macros/calculated_fields.sql", "unique_id": "macro.fivetran_utils.calculated_fields", "macro_sql": "{% macro calculated_fields(variable) -%}\n\n{% if var(variable, none) %}\n {% for field in var(variable) %}\n , {{ field.transform_sql }} as {{ field.name }} \n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.584547, "supported_languages": null}, "macro.fivetran_utils.drop_schemas_automation": {"name": "drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.drop_schemas_automation", "macro_sql": "{% macro drop_schemas_automation(drop_target_schema=true) %}\n {{ return(adapter.dispatch('drop_schemas_automation', 'fivetran_utils')(drop_target_schema)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__drop_schemas_automation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5849862, "supported_languages": null}, "macro.fivetran_utils.default__drop_schemas_automation": {"name": "default__drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.default__drop_schemas_automation", "macro_sql": "{% macro default__drop_schemas_automation(drop_target_schema=true) %}\n\n{% set fetch_list_sql %}\n {% if target.type not in ('databricks', 'spark') %}\n select schema_name\n from \n {{ wrap_in_quotes(target.database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like '{{ target.schema | lower }}{%- if not drop_target_schema -%}_{%- endif -%}%'\n {% else %}\n SHOW SCHEMAS LIKE '{{ target.schema }}{%- if not drop_target_schema -%}_{%- endif -%}*'\n {% endif %}\n{% endset %}\n\n{% set results = run_query(fetch_list_sql) %}\n\n{% if execute %}\n {% set results_list = results.columns[0].values() %}\n{% else %}\n {% set results_list = [] %}\n{% endif %}\n\n{% for schema_to_drop in results_list %}\n {% do adapter.drop_schema(api.Relation.create(database=target.database, schema=schema_to_drop)) %}\n {{ print('Schema ' ~ schema_to_drop ~ ' successfully dropped from the ' ~ target.database ~ ' database.\\n')}}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.wrap_in_quotes", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.585613, "supported_languages": null}, "macro.fivetran_utils.seed_data_helper": {"name": "seed_data_helper", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/seed_data_helper.sql", "original_file_path": "macros/seed_data_helper.sql", "unique_id": "macro.fivetran_utils.seed_data_helper", "macro_sql": "{% macro seed_data_helper(seed_name, warehouses) %}\n\n{% if target.type in warehouses %}\n {% for w in warehouses %}\n {% if target.type == w %}\n {{ return(ref(seed_name ~ \"_\" ~ w ~ \"\")) }}\n {% endif %}\n {% endfor %}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5859761, "supported_languages": null}, "macro.fivetran_utils.fill_pass_through_columns": {"name": "fill_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_pass_through_columns.sql", "original_file_path": "macros/fill_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.fill_pass_through_columns", "macro_sql": "{% macro fill_pass_through_columns(pass_through_variable) %}\n\n{% if var(pass_through_variable) %}\n {% for field in var(pass_through_variable) %}\n {% if field is mapping %}\n {% if field.transform_sql %}\n , {{ field.transform_sql }} as {{ field.alias if field.alias else field.name }}\n {% else %}\n , {{ field.alias if field.alias else field.name }}\n {% endif %}\n {% else %}\n , {{ field }}\n {% endif %}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.586455, "supported_languages": null}, "macro.fivetran_utils.string_agg": {"name": "string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.string_agg", "macro_sql": "{% macro string_agg(field_to_agg, delimiter) -%}\n\n{{ adapter.dispatch('string_agg', 'fivetran_utils') (field_to_agg, delimiter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__string_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5867531, "supported_languages": null}, "macro.fivetran_utils.default__string_agg": {"name": "default__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.default__string_agg", "macro_sql": "{% macro default__string_agg(field_to_agg, delimiter) %}\n string_agg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5868318, "supported_languages": null}, "macro.fivetran_utils.snowflake__string_agg": {"name": "snowflake__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.snowflake__string_agg", "macro_sql": "{% macro snowflake__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.586905, "supported_languages": null}, "macro.fivetran_utils.redshift__string_agg": {"name": "redshift__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.redshift__string_agg", "macro_sql": "{% macro redshift__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.58698, "supported_languages": null}, "macro.fivetran_utils.spark__string_agg": {"name": "spark__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.spark__string_agg", "macro_sql": "{% macro spark__string_agg(field_to_agg, delimiter) %}\n -- collect set will remove duplicates\n replace(replace(replace(cast( collect_set({{ field_to_agg }}) as string), '[', ''), ']', ''), ', ', {{ delimiter }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.587061, "supported_languages": null}, "macro.fivetran_utils.timestamp_diff": {"name": "timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.timestamp_diff", "macro_sql": "{% macro timestamp_diff(first_date, second_date, datepart) %}\n {{ adapter.dispatch('timestamp_diff', 'fivetran_utils')(first_date, second_date, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.bigquery__timestamp_diff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.589041, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_diff": {"name": "default__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.default__timestamp_diff", "macro_sql": "{% macro default__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.589144, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_diff": {"name": "redshift__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_diff", "macro_sql": "{% macro redshift__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.589244, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_diff": {"name": "bigquery__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_diff", "macro_sql": "{% macro bigquery__timestamp_diff(first_date, second_date, datepart) %}\n\n timestamp_diff(\n {{second_date}},\n {{first_date}},\n {{datepart}}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.589392, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_diff": {"name": "postgres__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_diff", "macro_sql": "{% macro postgres__timestamp_diff(first_date, second_date, datepart) %}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ dbt.datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.590476, "supported_languages": null}, "macro.fivetran_utils.try_cast": {"name": "try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.try_cast", "macro_sql": "{% macro try_cast(field, type) %}\n {{ adapter.dispatch('try_cast', 'fivetran_utils') (field, type) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.bigquery__try_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.591097, "supported_languages": null}, "macro.fivetran_utils.default__try_cast": {"name": "default__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.default__try_cast", "macro_sql": "{% macro default__try_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.591184, "supported_languages": null}, "macro.fivetran_utils.redshift__try_cast": {"name": "redshift__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.redshift__try_cast", "macro_sql": "{% macro redshift__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when trim({{field}}) ~ '^(0|[1-9][0-9]*)$' then trim({{field}})\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5913641, "supported_languages": null}, "macro.fivetran_utils.postgres__try_cast": {"name": "postgres__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.postgres__try_cast", "macro_sql": "{% macro postgres__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar)) ~ '^(0|[1-9][0-9]*)$' \n then replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar))\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.591549, "supported_languages": null}, "macro.fivetran_utils.snowflake__try_cast": {"name": "snowflake__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.snowflake__try_cast", "macro_sql": "{% macro snowflake__try_cast(field, type) %}\n try_cast(cast({{field}} as varchar) as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.591632, "supported_languages": null}, "macro.fivetran_utils.bigquery__try_cast": {"name": "bigquery__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.bigquery__try_cast", "macro_sql": "{% macro bigquery__try_cast(field, type) %}\n safe_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.591706, "supported_languages": null}, "macro.fivetran_utils.spark__try_cast": {"name": "spark__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.spark__try_cast", "macro_sql": "{% macro spark__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5917819, "supported_languages": null}, "macro.fivetran_utils.sqlserver__try_cast": {"name": "sqlserver__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.sqlserver__try_cast", "macro_sql": "{% macro sqlserver__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.591871, "supported_languages": null}, "macro.fivetran_utils.source_relation": {"name": "source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.source_relation", "macro_sql": "{% macro source_relation(union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('source_relation', 'fivetran_utils') (union_schema_variable, union_database_variable) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__source_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.592174, "supported_languages": null}, "macro.fivetran_utils.default__source_relation": {"name": "default__source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.default__source_relation", "macro_sql": "{% macro default__source_relation(union_schema_variable, union_database_variable) %}\n\n{% if var(union_schema_variable, none) %}\n, case\n {% for schema in var(union_schema_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%.{{ schema|lower }}.%' then '{{ schema|lower }}'\n {% endfor %}\n end as source_relation\n{% elif var(union_database_variable, none) %}\n, case\n {% for database in var(union_database_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%{{ database|lower }}.%' then '{{ database|lower }}'\n {% endfor %}\n end as source_relation\n{% else %}\n, cast('' as {{ dbt.type_string() }}) as source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.592509, "supported_languages": null}, "macro.fivetran_utils.first_value": {"name": "first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.first_value", "macro_sql": "{% macro first_value(first_value_field, partition_field, order_by_field, order=\"asc\") -%}\n\n{{ adapter.dispatch('first_value', 'fivetran_utils') (first_value_field, partition_field, order_by_field, order) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__first_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5928152, "supported_languages": null}, "macro.fivetran_utils.default__first_value": {"name": "default__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.default__first_value", "macro_sql": "{% macro default__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.592939, "supported_languages": null}, "macro.fivetran_utils.redshift__first_value": {"name": "redshift__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.redshift__first_value", "macro_sql": "{% macro redshift__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} , {{ partition_field }} rows unbounded preceding )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.593076, "supported_languages": null}, "macro.fivetran_utils.add_dbt_source_relation": {"name": "add_dbt_source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_dbt_source_relation.sql", "original_file_path": "macros/add_dbt_source_relation.sql", "unique_id": "macro.fivetran_utils.add_dbt_source_relation", "macro_sql": "{% macro add_dbt_source_relation() %}\n\n{% if var('union_schemas', none) or var('union_databases', none) %}\n, _dbt_source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5932438, "supported_languages": null}, "macro.fivetran_utils.add_pass_through_columns": {"name": "add_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_pass_through_columns.sql", "original_file_path": "macros/add_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.add_pass_through_columns", "macro_sql": "{% macro add_pass_through_columns(base_columns, pass_through_var) %}\n\n {% if pass_through_var %}\n\n {% for column in pass_through_var %}\n\n {% if column is mapping %}\n\n {% if column.alias %}\n\n {% do base_columns.append({ \"name\": column.name, \"alias\": column.alias, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column.name, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n \n {% endif %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column, \"datatype\": dbt.type_string()}) %}\n\n {% endif %}\n\n {% endfor %}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.593972, "supported_languages": null}, "macro.fivetran_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, aliases=none, column_override=none, include=[], exclude=[], source_column_name=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n {%- set source_column_name = source_column_name if source_column_name is not none else '_dbt_source_relation' -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column in exclude -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column not in include -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ aliases[loop.index0] if aliases else relation }}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5964339, "supported_languages": null}, "macro.fivetran_utils.union_tables": {"name": "union_tables", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_tables", "macro_sql": "{%- macro union_tables(tables, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_table') -%}\n\n {%- do exceptions.warn(\"Warning: the `union_tables` macro is no longer supported and will be deprecated in a future release of dbt-utils. Use the `union_relations` macro instead\") -%}\n\n {{ return(dbt_utils.union_relations(tables, column_override, include, exclude, source_column_name)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.596731, "supported_languages": null}, "macro.fivetran_utils.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.fivetran_utils.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.596982, "supported_languages": null}, "macro.fivetran_utils.fill_staging_columns": {"name": "fill_staging_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.fill_staging_columns", "macro_sql": "{% macro fill_staging_columns(source_columns, staging_columns) -%}\n\n{%- set source_column_names = source_columns|map(attribute='name')|map('lower')|list -%}\n\n{%- for column in staging_columns %}\n {% if column.name|lower in source_column_names -%}\n {{ fivetran_utils.quote_column(column) }} as \n {%- if 'alias' in column %} {{ column.alias }} {% else %} {{ fivetran_utils.quote_column(column) }} {%- endif -%}\n {%- else -%}\n cast(null as {{ column.datatype }})\n {%- if 'alias' in column %} as {{ column.alias }} {% else %} as {{ fivetran_utils.quote_column(column) }} {% endif -%}\n {%- endif -%}\n {%- if not loop.last -%} , {% endif -%}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.quote_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.598068, "supported_languages": null}, "macro.fivetran_utils.quote_column": {"name": "quote_column", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.quote_column", "macro_sql": "{% macro quote_column(column) %}\n {% if 'quote' in column %}\n {% if column.quote %}\n {% if target.type in ('bigquery', 'spark', 'databricks') %}\n `{{ column.name }}`\n {% elif target.type == 'snowflake' %}\n \"{{ column.name | upper }}\"\n {% else %}\n \"{{ column.name }}\"\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.598476, "supported_languages": null}, "macro.fivetran_utils.json_extract": {"name": "json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.json_extract", "macro_sql": "{% macro json_extract(string, string_path) -%}\n\n{{ adapter.dispatch('json_extract', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.bigquery__json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.598892, "supported_languages": null}, "macro.fivetran_utils.default__json_extract": {"name": "default__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.default__json_extract", "macro_sql": "{% macro default__json_extract(string, string_path) %}\n\n json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} )\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5989969, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_extract": {"name": "snowflake__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.snowflake__json_extract", "macro_sql": "{% macro snowflake__json_extract(string, string_path) %}\n\n json_extract_path_text(try_parse_json( {{string}} ), {{ \"'\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5990999, "supported_languages": null}, "macro.fivetran_utils.redshift__json_extract": {"name": "redshift__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.redshift__json_extract", "macro_sql": "{% macro redshift__json_extract(string, string_path) %}\n\n case when is_valid_json( {{string}} ) then json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} ) else null end\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.599203, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_extract": {"name": "bigquery__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.bigquery__json_extract", "macro_sql": "{% macro bigquery__json_extract(string, string_path) %}\n\n json_extract_scalar({{string}}, {{ \"'$.\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5992908, "supported_languages": null}, "macro.fivetran_utils.postgres__json_extract": {"name": "postgres__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.postgres__json_extract", "macro_sql": "{% macro postgres__json_extract(string, string_path) %}\n\n {{string}}::json->>{{\"'\" ~ string_path ~ \"'\" }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.599377, "supported_languages": null}, "macro.fivetran_utils.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.5999, "supported_languages": null}, "macro.fivetran_utils.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n\n {%- set enabled_array = [] -%}\n {% for node in graph.sources.values() %}\n {% if node.identifier == source.identifier %}\n {% if (node.meta['is_enabled'] | default(true)) %}\n {%- do enabled_array.append(1) -%}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% set is_enabled = (enabled_array != []) %}\n\n select\n {% if is_enabled %}\n max({{ loaded_at_field }})\n {% else %} \n {{ current_timestamp() }} {% endif %} as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n\n {% if is_enabled %}\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endif %}\n\n {% endcall %}\n\n {% if dbt_version.split('.') | map('int') | list >= [1, 5, 0] %}\n {{ return(load_result('collect_freshness')) }}\n {% else %}\n {{ return(load_result('collect_freshness').table) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.600589, "supported_languages": null}, "macro.fivetran_utils.timestamp_add": {"name": "timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.timestamp_add", "macro_sql": "{% macro timestamp_add(datepart, interval, from_timestamp) -%}\n\n{{ adapter.dispatch('timestamp_add', 'fivetran_utils') (datepart, interval, from_timestamp) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.bigquery__timestamp_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6010282, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_add": {"name": "default__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.default__timestamp_add", "macro_sql": "{% macro default__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestampadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6011221, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_add": {"name": "bigquery__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_add", "macro_sql": "{% macro bigquery__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestamp_add({{ from_timestamp }}, interval {{ interval }} {{ datepart }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6012118, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_add": {"name": "redshift__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_add", "macro_sql": "{% macro redshift__timestamp_add(datepart, interval, from_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.601307, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_add": {"name": "postgres__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_add", "macro_sql": "{% macro postgres__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ from_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.601401, "supported_languages": null}, "macro.fivetran_utils.spark__timestamp_add": {"name": "spark__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.spark__timestamp_add", "macro_sql": "{% macro spark__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ dbt.dateadd(datepart, interval, from_timestamp) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.601513, "supported_languages": null}, "macro.fivetran_utils.ceiling": {"name": "ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.ceiling", "macro_sql": "{% macro ceiling(num) -%}\n\n{{ adapter.dispatch('ceiling', 'fivetran_utils') (num) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__ceiling"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.601676, "supported_languages": null}, "macro.fivetran_utils.default__ceiling": {"name": "default__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.default__ceiling", "macro_sql": "{% macro default__ceiling(num) %}\n ceiling({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.601741, "supported_languages": null}, "macro.fivetran_utils.snowflake__ceiling": {"name": "snowflake__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.snowflake__ceiling", "macro_sql": "{% macro snowflake__ceiling(num) %}\n ceil({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.601807, "supported_languages": null}, "macro.fivetran_utils.remove_prefix_from_columns": {"name": "remove_prefix_from_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/remove_prefix_from_columns.sql", "original_file_path": "macros/remove_prefix_from_columns.sql", "unique_id": "macro.fivetran_utils.remove_prefix_from_columns", "macro_sql": "{% macro remove_prefix_from_columns(columns, prefix='', exclude=[]) %}\n\n {%- for col in columns if col.name not in exclude -%}\n {%- if col.name[:prefix|length]|lower == prefix -%}\n {{ col.name }} as {{ col.name[prefix|length:] }}\n {%- else -%}\n {{ col.name }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6022131, "supported_languages": null}, "macro.fivetran_utils.fivetran_date_spine": {"name": "fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.fivetran_date_spine", "macro_sql": "{% macro fivetran_date_spine(datepart, start_date, end_date) -%}\n\n{{ return(adapter.dispatch('fivetran_date_spine', 'fivetran_utils') (datepart, start_date, end_date)) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__fivetran_date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6030688, "supported_languages": null}, "macro.fivetran_utils.default__fivetran_date_spine": {"name": "default__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.default__fivetran_date_spine", "macro_sql": "{% macro default__fivetran_date_spine(datepart, start_date, end_date) %}\n\n {{ dbt_utils.date_spine(datepart, start_date, end_date) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.603174, "supported_languages": null}, "macro.fivetran_utils.sqlserver__fivetran_date_spine": {"name": "sqlserver__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.sqlserver__fivetran_date_spine", "macro_sql": "{% macro sqlserver__fivetran_date_spine(datepart, start_date, end_date) -%}\n\n {% set date_spine_query %}\n with\n\n l0 as (\n\n select c\n from (select 1 union all select 1) as d(c)\n\n ),\n l1 as (\n\n select\n 1 as c\n from l0 as a\n cross join l0 as b\n\n ),\n\n l2 as (\n\n select 1 as c\n from l1 as a\n cross join l1 as b\n ),\n\n l3 as (\n\n select 1 as c\n from l2 as a\n cross join l2 as b\n ),\n\n l4 as (\n\n select 1 as c\n from l3 as a\n cross join l3 as b\n ),\n\n l5 as (\n\n select 1 as c\n from l4 as a\n cross join l4 as b\n ),\n\n nums as (\n\n select row_number() over (order by (select null)) as rownum\n from l5\n ),\n\n rawdata as (\n\n select top ({{dbt.datediff(start_date, end_date, datepart)}}) rownum -1 as n\n from nums\n order by rownum\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n 'n',\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n order by 1\n\n {% endset %}\n\n {% set results = run_query(date_spine_query) %}\n\n {% if execute %}\n\n {% set results_list = results.columns[0].values() %}\n \n {% else %}\n\n {% set results_list = [] %}\n\n {% endif %}\n\n {%- for date_field in results_list %}\n select cast('{{ date_field }}' as date) as date_{{datepart}} {{ 'union all ' if not loop.last else '' }}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt.dateadd", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.603724, "supported_languages": null}, "macro.fivetran_utils.union_data": {"name": "union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.union_data", "macro_sql": "{%- macro union_data(table_identifier, database_variable, schema_variable, default_database, default_schema, default_variable, union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('union_data', 'fivetran_utils') (\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.default__union_data"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6061, "supported_languages": null}, "macro.fivetran_utils.default__union_data": {"name": "default__union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.default__union_data", "macro_sql": "{%- macro default__union_data(\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) -%}\n\n{%- if var(union_schema_variable, none) -%}\n\n {%- set relations = [] -%}\n \n {%- if var(union_schema_variable) is string -%}\n {%- set trimmed = var(union_schema_variable)|trim('[')|trim(']') -%}\n {%- set schemas = trimmed.split(',')|map('trim',\" \")|map('trim','\"')|map('trim',\"'\") -%}\n {%- else -%}\n {%- set schemas = var(union_schema_variable) -%}\n {%- endif -%}\n\n {%- for schema in var(union_schema_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else var(database_variable, default_database),\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else schema,\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n \n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n \n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- elif var(union_database_variable, none) -%}\n\n {%- set relations = [] -%}\n\n {%- for database in var(union_database_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else database,\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else var(schema_variable, default_schema),\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n\n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n\n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- else -%}\n {% set exception_schemas = {\"linkedin_company_pages\": \"linkedin_pages\", \"instagram_business_pages\": \"instagram_business\"} %}\n {% set relation = namespace(value=\"\") %}\n {% if default_schema in exception_schemas.keys() %}\n {% for corrected_schema_name in exception_schemas.items() %} \n {% if default_schema in corrected_schema_name %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = corrected_schema_name[1] + \"_\" + table_identifier + \"_identifier\" %}\n {%- set relation.value=adapter.get_relation(\n database=source(corrected_schema_name[1], table_identifier).database,\n schema=source(corrected_schema_name[1], table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n {% endfor %}\n {% else %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifier\" %}\n {# Unfortunately the Twitter Organic identifiers were misspelled. As such, we will need to account for this in the model. This will be adjusted in the Twitter Organic package, but to ensure backwards compatibility, this needs to be included. #}\n {% if var(identifier_var, none) is none %} \n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifer\" %}\n {% endif %}\n {%- set relation.value=adapter.get_relation(\n database=source(default_schema, table_identifier).database,\n schema=source(default_schema, table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n{%- set table_exists=relation.value is not none -%}\n\n{%- if table_exists -%}\n select * \n from {{ relation.value }}\n{%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n{%- endif -%}\n{%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.608819, "supported_languages": null}, "macro.fivetran_utils.dummy_coalesce_value": {"name": "dummy_coalesce_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/dummy_coalesce_value.sql", "original_file_path": "macros/dummy_coalesce_value.sql", "unique_id": "macro.fivetran_utils.dummy_coalesce_value", "macro_sql": "{% macro dummy_coalesce_value(column) %}\n\n{% set coalesce_value = {\n 'STRING': \"'DUMMY_STRING'\",\n 'BOOLEAN': 'null',\n 'INT': 999999999,\n 'FLOAT': 999999999.99,\n 'TIMESTAMP': 'cast(\"2099-12-31\" as timestamp)',\n 'DATE': 'cast(\"2099-12-31\" as date)',\n} %}\n\n{% if column.is_float() %}\n{{ return(coalesce_value['FLOAT']) }}\n\n{% elif column.is_numeric() %}\n{{ return(coalesce_value['INT']) }}\n\n{% elif column.is_string() %}\n{{ return(coalesce_value['STRING']) }}\n\n{% elif column.data_type|lower == 'boolean' %}\n{{ return(coalesce_value['BOOLEAN']) }}\n\n{% elif 'timestamp' in column.data_type|lower %}\n{{ return(coalesce_value['TIMESTAMP']) }}\n\n{% elif 'date' in column.data_type|lower %}\n{{ return(coalesce_value['DATE']) }}\n\n{% elif 'int' in column.data_type|lower %}\n{{ return(coalesce_value['INT']) }}\n\n{% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6097322, "supported_languages": null}, "macro.fivetran_utils.extract_url_parameter": {"name": "extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.extract_url_parameter", "macro_sql": "{% macro extract_url_parameter(field, url_parameter) -%}\n\n{{ adapter.dispatch('extract_url_parameter', 'fivetran_utils') (field, url_parameter) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__extract_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.609958, "supported_languages": null}, "macro.fivetran_utils.default__extract_url_parameter": {"name": "default__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.default__extract_url_parameter", "macro_sql": "{% macro default__extract_url_parameter(field, url_parameter) -%}\n\n{{ dbt_utils.get_url_parameter(field, url_parameter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.610055, "supported_languages": null}, "macro.fivetran_utils.spark__extract_url_parameter": {"name": "spark__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.spark__extract_url_parameter", "macro_sql": "{% macro spark__extract_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"=([^&]+)'\" -%}\nnullif(regexp_extract({{ field }}, {{ formatted_url_parameter }}, 1), '')\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6101701, "supported_languages": null}, "macro.fivetran_utils.wrap_in_quotes": {"name": "wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.wrap_in_quotes", "macro_sql": "{%- macro wrap_in_quotes(object_to_quote) -%}\n\n{{ return(adapter.dispatch('wrap_in_quotes', 'fivetran_utils')(object_to_quote)) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.default__wrap_in_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6103702, "supported_languages": null}, "macro.fivetran_utils.default__wrap_in_quotes": {"name": "default__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.default__wrap_in_quotes", "macro_sql": "{%- macro default__wrap_in_quotes(object_to_quote) -%}\n{# bigquery, spark, databricks #}\n `{{ object_to_quote }}`\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6104362, "supported_languages": null}, "macro.fivetran_utils.snowflake__wrap_in_quotes": {"name": "snowflake__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.snowflake__wrap_in_quotes", "macro_sql": "{%- macro snowflake__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote | upper }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.610501, "supported_languages": null}, "macro.fivetran_utils.redshift__wrap_in_quotes": {"name": "redshift__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.redshift__wrap_in_quotes", "macro_sql": "{%- macro redshift__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6105611, "supported_languages": null}, "macro.fivetran_utils.postgres__wrap_in_quotes": {"name": "postgres__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.postgres__wrap_in_quotes", "macro_sql": "{%- macro postgres__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.610616, "supported_languages": null}, "macro.fivetran_utils.array_agg": {"name": "array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.array_agg", "macro_sql": "{% macro array_agg(field_to_agg) -%}\n\n{{ adapter.dispatch('array_agg', 'fivetran_utils') (field_to_agg) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__array_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.61077, "supported_languages": null}, "macro.fivetran_utils.default__array_agg": {"name": "default__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.default__array_agg", "macro_sql": "{% macro default__array_agg(field_to_agg) %}\n array_agg({{ field_to_agg }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.610829, "supported_languages": null}, "macro.fivetran_utils.redshift__array_agg": {"name": "redshift__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.redshift__array_agg", "macro_sql": "{% macro redshift__array_agg(field_to_agg) %}\n listagg({{ field_to_agg }}, ',')\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.610889, "supported_languages": null}, "macro.fivetran_utils.empty_variable_warning": {"name": "empty_variable_warning", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/empty_variable_warning.sql", "original_file_path": "macros/empty_variable_warning.sql", "unique_id": "macro.fivetran_utils.empty_variable_warning", "macro_sql": "{% macro empty_variable_warning(variable, downstream_model) %}\n\n{% if not var(variable) %}\n{{ log(\n \"\"\"\n Warning: You have passed an empty list to the \"\"\" ~ variable ~ \"\"\".\n As a result, you won't see the history of any columns in the \"\"\" ~ downstream_model ~ \"\"\" model.\n \"\"\",\n info=True\n) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.611128, "supported_languages": null}, "macro.fivetran_utils.enabled_vars_one_true": {"name": "enabled_vars_one_true", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars_one_true.sql", "original_file_path": "macros/enabled_vars_one_true.sql", "unique_id": "macro.fivetran_utils.enabled_vars_one_true", "macro_sql": "{% macro enabled_vars_one_true(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, False) == True %}\n {{ return(True) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(False) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.611354, "supported_languages": null}, "macro.zendesk_source.get_domain_name_columns": {"name": "get_domain_name_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_domain_name_columns.sql", "original_file_path": "macros/get_domain_name_columns.sql", "unique_id": "macro.zendesk_source.get_domain_name_columns", "macro_sql": "{% macro get_domain_name_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"domain_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"index\", \"datatype\": dbt.type_int()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.611695, "supported_languages": null}, "macro.zendesk_source.get_user_tag_columns": {"name": "get_user_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_tag_columns.sql", "original_file_path": "macros/get_user_tag_columns.sql", "unique_id": "macro.zendesk_source.get_user_tag_columns", "macro_sql": "{% macro get_user_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.612414, "supported_languages": null}, "macro.zendesk_source.get_ticket_form_history_columns": {"name": "get_ticket_form_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_form_history_columns.sql", "original_file_path": "macros/get_ticket_form_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_form_history_columns", "macro_sql": "{% macro get_ticket_form_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"display_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"end_user_visible\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.613023, "supported_languages": null}, "macro.zendesk_source.get_schedule_columns": {"name": "get_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_columns.sql", "original_file_path": "macros/get_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_columns", "macro_sql": "{% macro get_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"end_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"start_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.613675, "supported_languages": null}, "macro.zendesk_source.get_daylight_time_columns": {"name": "get_daylight_time_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_daylight_time_columns.sql", "original_file_path": "macros/get_daylight_time_columns.sql", "unique_id": "macro.zendesk_source.get_daylight_time_columns", "macro_sql": "{% macro get_daylight_time_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"daylight_end_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"daylight_offset\", \"datatype\": dbt.type_int()},\n {\"name\": \"daylight_start_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"year\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.614126, "supported_languages": null}, "macro.zendesk_source.get_time_zone_columns": {"name": "get_time_zone_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_time_zone_columns.sql", "original_file_path": "macros/get_time_zone_columns.sql", "unique_id": "macro.zendesk_source.get_time_zone_columns", "macro_sql": "{% macro get_time_zone_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"standard_offset\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.614423, "supported_languages": null}, "macro.zendesk_source.get_ticket_tag_columns": {"name": "get_ticket_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_tag_columns.sql", "original_file_path": "macros/get_ticket_tag_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_tag_columns", "macro_sql": "{% macro get_ticket_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.615048, "supported_languages": null}, "macro.zendesk_source.get_organization_tag_columns": {"name": "get_organization_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_tag_columns.sql", "original_file_path": "macros/get_organization_tag_columns.sql", "unique_id": "macro.zendesk_source.get_organization_tag_columns", "macro_sql": "{% macro get_organization_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.615654, "supported_languages": null}, "macro.zendesk_source.get_schedule_holiday_columns": {"name": "get_schedule_holiday_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_holiday_columns.sql", "original_file_path": "macros/get_schedule_holiday_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_holiday_columns", "macro_sql": "{% macro get_schedule_holiday_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_date\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_date\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.616174, "supported_languages": null}, "macro.zendesk_source.get_group_columns": {"name": "get_group_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_group_columns.sql", "original_file_path": "macros/get_group_columns.sql", "unique_id": "macro.zendesk_source.get_group_columns", "macro_sql": "{% macro get_group_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.616757, "supported_languages": null}, "macro.zendesk_source.get_user_columns": {"name": "get_user_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_columns.sql", "original_file_path": "macros/get_user_columns.sql", "unique_id": "macro.zendesk_source.get_user_columns", "macro_sql": "{% macro get_user_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"alias\", \"datatype\": dbt.type_string()},\n {\"name\": \"authenticity_token\", \"datatype\": dbt.type_int()},\n {\"name\": \"chat_only\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"email\", \"datatype\": dbt.type_string()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"last_login_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"locale\", \"datatype\": dbt.type_string()},\n {\"name\": \"locale_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"moderator\", \"datatype\": \"boolean\"},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"only_private_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"phone\", \"datatype\": dbt.type_string()},\n {\"name\": \"remote_photo_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"restricted_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"role\", \"datatype\": dbt.type_string()},\n {\"name\": \"shared\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"signature\", \"datatype\": dbt.type_int()},\n {\"name\": \"suspended\", \"datatype\": \"boolean\"},\n {\"name\": \"ticket_restriction\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"two_factor_auth_enabled\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"verified\", \"datatype\": \"boolean\"}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__user_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.618902, "supported_languages": null}, "macro.zendesk_source.get_ticket_columns": {"name": "get_ticket_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_columns.sql", "original_file_path": "macros/get_ticket_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_columns", "macro_sql": "{% macro get_ticket_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"allow_channelback\", \"datatype\": \"boolean\"},\n {\"name\": \"assignee_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"brand_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"description\", \"datatype\": dbt.type_string()},\n {\"name\": \"due_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"forum_topic_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"has_incidents\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"is_public\", \"datatype\": \"boolean\"},\n {\"name\": \"merged_ticket_ids\", \"datatype\": dbt.type_string()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"priority\", \"datatype\": dbt.type_string()},\n {\"name\": \"problem_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"recipient\", \"datatype\": dbt.type_int()},\n {\"name\": \"requester_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"status\", \"datatype\": dbt.type_string()},\n {\"name\": \"subject\", \"datatype\": dbt.type_string()},\n {\"name\": \"submitter_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_ccs\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_client\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_ip_address\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_json_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_latitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_location\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_longitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_machine_generated\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_message_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_raw_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_form_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"type\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_channel\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_source_from_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_title\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_rel\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_name\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__ticket_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string", "macro.dbt.type_float", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.622056, "supported_languages": null}, "macro.zendesk_source.get_ticket_field_history_columns": {"name": "get_ticket_field_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_field_history_columns.sql", "original_file_path": "macros/get_ticket_field_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_field_history_columns", "macro_sql": "{% macro get_ticket_field_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"field_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"updated\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"value\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.622562, "supported_languages": null}, "macro.zendesk_source.get_ticket_schedule_columns": {"name": "get_ticket_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_schedule_columns.sql", "original_file_path": "macros/get_ticket_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_schedule_columns", "macro_sql": "{% macro get_ticket_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6229131, "supported_languages": null}, "macro.zendesk_source.get_organization_columns": {"name": "get_organization_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_columns.sql", "original_file_path": "macros/get_organization_columns.sql", "unique_id": "macro.zendesk_source.get_organization_columns", "macro_sql": "{% macro get_organization_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"shared_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_tickets\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__organization_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.623808, "supported_languages": null}, "macro.zendesk_source.get_ticket_comment_columns": {"name": "get_ticket_comment_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_comment_columns.sql", "original_file_path": "macros/get_ticket_comment_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_comment_columns", "macro_sql": "{% macro get_ticket_comment_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_string()},\n {\"name\": \"body\", \"datatype\": dbt.type_string()},\n {\"name\": \"call_duration\", \"datatype\": dbt.type_int()},\n {\"name\": \"call_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"facebook_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"location\", \"datatype\": dbt.type_int()},\n {\"name\": \"public\", \"datatype\": \"boolean\"},\n {\"name\": \"recording_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"started_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_status\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_text\", \"datatype\": dbt.type_int()},\n {\"name\": \"trusted\", \"datatype\": dbt.type_int()},\n {\"name\": \"tweet\", \"datatype\": \"boolean\"},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"voice_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"voice_comment_transcription_visible\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.type_int", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.6250482, "supported_languages": null}, "macro.zendesk_source.get_brand_columns": {"name": "get_brand_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_brand_columns.sql", "original_file_path": "macros/get_brand_columns.sql", "unique_id": "macro.zendesk_source.get_brand_columns", "macro_sql": "{% macro get_brand_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"brand_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"has_help_center\", \"datatype\": \"boolean\"},\n {\"name\": \"help_center_state\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_content_type\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_file_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_height\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_inline\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_mapped_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_size\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_width\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"subdomain\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1715700423.626424, "supported_languages": null}}, "docs": {"doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {}, "groups": {}, "selectors": {}, "disabled": {"test.zendesk_integration_tests.consistency_ticket_metrics": [{"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "consistency_ticket_metrics", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_ticket_metrics.sql", "original_file_path": "tests/consistency/consistency_ticket_metrics.sql", "unique_id": "test.zendesk_integration_tests.consistency_ticket_metrics", "fqn": ["zendesk_integration_tests", "consistency", "consistency_ticket_metrics"], "alias": "consistency_ticket_metrics", "checksum": {"name": "sha256", "checksum": "e630be25d326f99cdad0ebc1d29e71dcd7514aa3e56c999e56d1ed15bc6c10e0"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1715700423.87579, "config_call_dict": {"tags": ["fivetran_validations"], "enabled": false}, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_prod.zendesk__ticket_metrics\n),\n\ndev as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics\n),\n\nfinal as (\n select \n prod.ticket_id,\n prod.first_reply_time_business_minutes as prod_first_reply_time_business_minutes,\n dev.first_reply_time_business_minutes as dev_first_reply_time_business_minutes,\n prod.first_reply_time_calendar_minutes as prod_first_reply_time_calendar_minutes,\n dev.first_reply_time_calendar_minutes as dev_first_reply_time_calendar_minutes\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere (abs(prod_first_reply_time_business_minutes - dev_first_reply_time_business_minutes) >= 5\n or abs(prod_first_reply_time_calendar_minutes - dev_first_reply_time_calendar_minutes) >= 5)\n {{ \"and ticket_id not in \" ~ var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policy_count": [{"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "consistency_sla_policy_count", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policy_count.sql", "original_file_path": "tests/consistency/consistency_sla_policy_count.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policy_count", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policy_count"], "alias": "consistency_sla_policy_count", "checksum": {"name": "sha256", "checksum": "077cf7371f60ca66cfcecde8010f56edbb33a12caab75dc80c44ec375bdb41f1"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1715700423.8822951, "config_call_dict": {"tags": ["fivetran_validations"], "enabled": false}, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n 1 as join_key,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n group by 1\n),\n\ndev as (\n select\n 1 as join_key,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n group by 1\n),\n\nfinal as (\n select \n prod.join_key,\n prod.total_slas as prod_sla_total,\n dev.total_slas as dev_sla_total\n from prod\n full outer join dev \n on dev.join_key = prod.join_key\n)\n\nselect *\nfrom final\nwhere prod_sla_total != dev_sla_total", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policies": [{"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "consistency_sla_policies", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policies.sql", "original_file_path": "tests/consistency/consistency_sla_policies.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policies", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policies"], "alias": "consistency_sla_policies", "checksum": {"name": "sha256", "checksum": "23d8dea3ba888005d225ac9c8f5a8882c1369d8a3039c4c63c8602d0a049ee37"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1715700423.885547, "config_call_dict": {"tags": ["fivetran_validations"], "enabled": false}, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n metric, \n sla_applied_at,\n sla_elapsed_time,\n is_sla_breach\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n),\n\ndev as (\n select\n ticket_id,\n metric, \n sla_applied_at,\n sla_elapsed_time,\n is_sla_breach\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n),\n\nfinal as (\n select \n prod.ticket_id,\n prod.metric,\n prod.sla_applied_at,\n prod.sla_elapsed_time as prod_sla_elapsed_time,\n dev.sla_elapsed_time as dev_sla_elapsed_time,\n prod.is_sla_breach as prod_is_sla_breach,\n dev.is_sla_breach as dev_is_sla_breach\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n and dev.metric = prod.metric\n and dev.sla_applied_at = prod.sla_applied_at\n)\n\nselect *\nfrom final\nwhere (abs(prod_sla_elapsed_time - dev_sla_elapsed_time) >= 5\n or prod_is_sla_breach != dev_is_sla_breach)\n {{ \"and prod.ticket_id not in \" ~ var('fivetran_consistency_sla_policies_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policies_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_first_reply_time_match": [{"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "sla_first_reply_time_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_first_reply_time_match.sql", "original_file_path": "tests/integrity/sla_first_reply_time_match.sql", "unique_id": "test.zendesk_integration_tests.sla_first_reply_time_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_first_reply_time_match"], "alias": "sla_first_reply_time_match", "checksum": {"name": "sha256", "checksum": "a94e41e1bdbc5f4cb6268590d22f37692a708dd7471344b09e2d29a4edf4ccea"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1715700423.888155, "config_call_dict": {"tags": ["fivetran_validations"], "enabled": false}, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith ticket_metrics as (\n select\n ticket_id,\n first_reply_time_business_minutes\n from {{ ref('zendesk__ticket_metrics') }}\n),\n\nsla_policies as (\n select\n ticket_id,\n sla_elapsed_time\n from {{ ref('zendesk__sla_policies') }}\n where metric = 'first_reply_time'\n and in_business_hours\n),\n\nmatch_check as (\n select \n ticket_metrics.ticket_id,\n ticket_metrics.first_reply_time_business_minutes,\n sla_policies.sla_elapsed_time\n from ticket_metrics\n full outer join sla_policies \n on ticket_metrics.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere abs(round(first_reply_time_business_minutes,0) - round(sla_elapsed_time,0)) >= 2\n {{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_count_match": [{"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50_dbt_test__audit", "name": "sla_count_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_count_match.sql", "original_file_path": "tests/integrity/sla_count_match.sql", "unique_id": "test.zendesk_integration_tests.sla_count_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_count_match"], "alias": "sla_count_match", "checksum": {"name": "sha256", "checksum": "b1f23baf0d04729d4855197e4e5f6e76bf72502c3739371ebee1a6d626a6d8b8"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1715700423.89025, "config_call_dict": {"tags": ["fivetran_validations"], "enabled": false}, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n-- The necessary source and source_filter adjustments used below originate from the int_zendesk__sla_policy_applied model\nwith source as (\n select\n *,\n case when field_name = 'first_reply_time' then row_number() over (partition by ticket_id, field_name order by valid_starting_at desc) else 1 end as latest_sla\n from {{ ref('stg_zendesk__ticket_field_history') }}\n),\n\nsource_filter as (\n select\n ticket_id,\n count(*) as source_row_count\n from source\n where field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n and value is not null\n and latest_sla = 1\n group by 1\n),\n\nsla_policies as (\n select\n ticket_id,\n count(*) as end_model_row_count\n from {{ ref('zendesk__sla_policies') }}\n group by 1\n),\n\nmatch_check as (\n select \n sla_policies.ticket_id,\n end_model_row_count,\n source_row_count\n from sla_policies\n full outer join source_filter\n on source_filter.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere end_model_row_count != source_row_count\n{{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_count_match_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_count_match_tickets',[]) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "seed.zendesk_integration_tests.organization_tag_data_snowflake": [{"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "organization_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data_snowflake.csv", "original_file_path": "seeds/organization_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "organization_tag_data_snowflake"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "d9219b78d44b8b4620100b064a3af350fb5fa2046bdb0c376a09bade7a99f6f7"}, "config": {"enabled": false, "alias": "organization_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "organization_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1715700423.9209971, "config_call_dict": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`organization_tag_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}, "defer_relation": null}], "seed.zendesk_integration_tests.user_data_snowflake": [{"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "user_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data_snowflake.csv", "original_file_path": "seeds/user_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_data_snowflake", "fqn": ["zendesk_integration_tests", "user_data_snowflake"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "1d7712839e43bb49c4fb8a2bba60a98e8c3ea558c91a3d4fb4f4db6e1425f178"}, "config": {"enabled": false, "alias": "user_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64", "external_id": "int64", "locale_id": "int64", "organization_id": "int64", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "alias": "user_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1715700423.926336, "config_call_dict": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`user_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}, "defer_relation": null}], "seed.zendesk_integration_tests.user_tag_data_snowflake": [{"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "user_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data_snowflake.csv", "original_file_path": "seeds/user_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "user_tag_data_snowflake"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "7c2274e05f81c1f9906a6a4a217c4493bf003a151402391069f49c64cf9ec5fb"}, "config": {"enabled": false, "alias": "user_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "user_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1715700423.9278378, "config_call_dict": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`user_tag_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}, "defer_relation": null}], "seed.zendesk_integration_tests.brand_data_postgres": [{"database": "dbt-package-testing", "schema": "zendesk_integration_tests_50", "name": "brand_data_postgres", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data_postgres.csv", "original_file_path": "seeds/brand_data_postgres.csv", "unique_id": "seed.zendesk_integration_tests.brand_data_postgres", "fqn": ["zendesk_integration_tests", "brand_data_postgres"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "aa338ab31e4a221da8a0ed5040ec921a4d39a7377ae37a7e79b49e1402e490f5"}, "config": {"enabled": false, "alias": "brand_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "int64"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "alias": "brand_data", "enabled": "{{ true if target.type == 'postgres' else false }}"}, "created_at": 1715700423.930339, "config_call_dict": {}, "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50`.`brand_data`", "raw_code": "", "root_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}, "defer_relation": null}]}, "parent_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.brand_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__group"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.zendesk__ticket_summary": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.zendesk__sla_policies": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.zendesk__ticket_backlog": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__schedule"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__sla_policy_applied"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__updater_information", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_enriched", "source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__calendar_spine": ["source.zendesk_source.zendesk.ticket"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__updates": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__daylight_time", "model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday", "model.zendesk_source.stg_zendesk__time_zone"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk_source.stg_zendesk__domain_name", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk_source.stg_zendesk__group_tmp"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk_source.stg_zendesk__user_tmp"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk_source.stg_zendesk__ticket_tmp"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["source.zendesk_source.zendesk.daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["source.zendesk_source.zendesk.user"], "model.zendesk_source.stg_zendesk__group_tmp": ["source.zendesk_source.zendesk.group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["source.zendesk_source.zendesk.ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["source.zendesk_source.zendesk.brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["source.zendesk_source.zendesk.ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["source.zendesk_source.zendesk.schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["source.zendesk_source.zendesk.user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["source.zendesk_source.zendesk.ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["source.zendesk_source.zendesk.ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["source.zendesk_source.zendesk.organization_tag"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["source.zendesk_source.zendesk.schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["source.zendesk_source.zendesk.organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["source.zendesk_source.zendesk.ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["source.zendesk_source.zendesk.domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["source.zendesk_source.zendesk.time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": ["model.zendesk.zendesk__sla_policies"], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": ["model.zendesk_source.stg_zendesk__domain_name"], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": ["model.zendesk_source.stg_zendesk__daylight_time"], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "source.zendesk_source.zendesk.ticket": [], "source.zendesk_source.zendesk.brand": [], "source.zendesk_source.zendesk.domain_name": [], "source.zendesk_source.zendesk.group": [], "source.zendesk_source.zendesk.organization_tag": [], "source.zendesk_source.zendesk.organization": [], "source.zendesk_source.zendesk.ticket_comment": [], "source.zendesk_source.zendesk.user_tag": [], "source.zendesk_source.zendesk.user": [], "source.zendesk_source.zendesk.schedule": [], "source.zendesk_source.zendesk.ticket_schedule": [], "source.zendesk_source.zendesk.ticket_form_history": [], "source.zendesk_source.zendesk.ticket_tag": [], "source.zendesk_source.zendesk.ticket_field_history": [], "source.zendesk_source.zendesk.daylight_time": [], "source.zendesk_source.zendesk.time_zone": [], "source.zendesk_source.zendesk.schedule_holiday": []}, "child_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.brand_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.zendesk__ticket_metrics", "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.zendesk__ticket_summary", "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c"], "model.zendesk.zendesk__ticket_summary": [], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.zendesk__ticket_backlog"], "model.zendesk.zendesk__sla_policies": ["test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd"], "model.zendesk.zendesk__ticket_backlog": [], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_reply_times"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__field_history_enriched"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__calendar_spine": ["model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__schedule_spine"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__updates": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk.int_zendesk__user_aggregates"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk.int_zendesk__ticket_aggregates"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_history_enriched", "model.zendesk.int_zendesk__updates"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk.int_zendesk__schedule_spine", "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk.int_zendesk__schedule_spine", "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk.int_zendesk__schedule_spine", "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_enriched", "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk.int_zendesk__updates", "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_summary", "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk.int_zendesk__latest_ticket_form", "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk.int_zendesk__organization_aggregates", "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk.int_zendesk__organization_aggregates"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["model.zendesk_source.stg_zendesk__daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["model.zendesk_source.stg_zendesk__user"], "model.zendesk_source.stg_zendesk__group_tmp": ["model.zendesk_source.stg_zendesk__group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["model.zendesk_source.stg_zendesk__ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["model.zendesk_source.stg_zendesk__brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["model.zendesk_source.stg_zendesk__ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["model.zendesk_source.stg_zendesk__organization_tag"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["model.zendesk_source.stg_zendesk__schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["model.zendesk_source.stg_zendesk__organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["model.zendesk_source.stg_zendesk__domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": [], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": [], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": [], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": [], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": [], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": [], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": [], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": [], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": [], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": [], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": [], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": [], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": [], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": [], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": [], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": [], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": [], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": [], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": [], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": [], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": [], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": [], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": [], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": [], "source.zendesk_source.zendesk.ticket": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket_tmp"], "source.zendesk_source.zendesk.brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "source.zendesk_source.zendesk.domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "source.zendesk_source.zendesk.group": ["model.zendesk_source.stg_zendesk__group_tmp"], "source.zendesk_source.zendesk.organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "source.zendesk_source.zendesk.organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "source.zendesk_source.zendesk.ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "source.zendesk_source.zendesk.user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "source.zendesk_source.zendesk.user": ["model.zendesk_source.stg_zendesk__user_tmp"], "source.zendesk_source.zendesk.schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "source.zendesk_source.zendesk.ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "source.zendesk_source.zendesk.ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "source.zendesk_source.zendesk.ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "source.zendesk_source.zendesk.ticket_field_history": ["model.zendesk.int_zendesk__field_history_pivot", "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "source.zendesk_source.zendesk.daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "source.zendesk_source.zendesk.time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "source.zendesk_source.zendesk.schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {}} \ No newline at end of file +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/manifest/v11.json", "dbt_version": "1.7.11", "generated_at": "2024-08-26T20:48:46.656587Z", "invocation_id": "57bbaa30-28cc-4e7b-b76f-fe2920291c4e", "env": {}, "project_name": "zendesk_integration_tests", "project_id": "b8a12ac1bacdf035438fc7646299ce11", "user_id": "8929baf0-9bc1-477e-9a57-eb8b0db4da62", "send_anonymous_usage_stats": true, "adapter_type": "postgres"}, "nodes": {"seed.zendesk_integration_tests.organization_tag_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "organization_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data.csv", "original_file_path": "seeds/organization_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data", "fqn": ["zendesk_integration_tests", "organization_tag_data"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "adebcb3827e908ab449435adc556aadf587cfad4103cab2c840d3d9fddc16e20"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1724705297.3664188, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"organization_tag_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_comment_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_comment_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_comment_data.csv", "original_file_path": "seeds/ticket_comment_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_comment_data", "fqn": ["zendesk_integration_tests", "ticket_comment_data"], "alias": "ticket_comment_data", "checksum": {"name": "sha256", "checksum": "033e18229b848b4809699f04f39605771faf437e583a1aefe1af5625f0ac7de5"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "user_id": "bigint", "created": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created": "timestamp"}}, "created_at": 1724705297.367607, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_comment_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_holiday_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "schedule_holiday_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_holiday_data.csv", "original_file_path": "seeds/schedule_holiday_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data", "fqn": ["zendesk_integration_tests", "schedule_holiday_data"], "alias": "schedule_holiday_data", "checksum": {"name": "sha256", "checksum": "f907dea5e2dc21649bf4eae0392add96a884f19f900dc0f2d568141038ba5d28"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "schedule_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1724705297.370084, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"schedule_holiday_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.domain_name_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "domain_name_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "domain_name_data.csv", "original_file_path": "seeds/domain_name_data.csv", "unique_id": "seed.zendesk_integration_tests.domain_name_data", "fqn": ["zendesk_integration_tests", "domain_name_data"], "alias": "domain_name_data", "checksum": {"name": "sha256", "checksum": "3bf711417f9269957353aa9e1ddd28ada8bd74e03128a4b8c94e694a560a09cf"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "organization_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1724705297.3728058, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"domain_name_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_field_history_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_field_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_field_history_data.csv", "original_file_path": "seeds/ticket_field_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data", "fqn": ["zendesk_integration_tests", "ticket_field_history_data"], "alias": "ticket_field_history_data", "checksum": {"name": "sha256", "checksum": "47c9244103b9a8dc25c5ce75693b8389df92258dde23dae71a09f021cf1b7ab7"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "bigint", "user_id": "bigint", "updated": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "updated": "timestamp"}}, "created_at": 1724705297.3751621, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_field_history_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_data.csv", "original_file_path": "seeds/ticket_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_data", "fqn": ["zendesk_integration_tests", "ticket_data"], "alias": "ticket_data", "checksum": {"name": "sha256", "checksum": "effe2837ec0ff3ec59fddc7fce0a5f4a6ff0a69daef5ae904244dcbf34425dae"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "bigint", "brand_id": "bigint", "external_id": "bigint", "forum_topic_id": "bigint", "group_id": "bigint", "organization_id": "bigint", "problem_id": "bigint", "requester_id": "bigint", "submitter_id": "bigint", "ticket_form_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "due_at": "timestamp", "updated_at": "timestamp", "assignee_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "brand_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "forum_topic_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "group_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "problem_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "requester_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "submitter_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "ticket_form_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}"}}, "created_at": 1724705297.376292, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.brand_data_postgres": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "brand_data_postgres", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data_postgres.csv", "original_file_path": "seeds/brand_data_postgres.csv", "unique_id": "seed.zendesk_integration_tests.brand_data_postgres", "fqn": ["zendesk_integration_tests", "brand_data_postgres"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "aa338ab31e4a221da8a0ed5040ec921a4d39a7377ae37a7e79b49e1402e490f5"}, "config": {"enabled": true, "alias": "brand_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "alias": "brand_data", "enabled": "{{ true if target.type == 'postgres' else false }}"}, "created_at": 1724705297.377401, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"brand_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.time_zone_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "time_zone_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "time_zone_data.csv", "original_file_path": "seeds/time_zone_data.csv", "unique_id": "seed.zendesk_integration_tests.time_zone_data", "fqn": ["zendesk_integration_tests", "time_zone_data"], "alias": "time_zone_data", "checksum": {"name": "sha256", "checksum": "b02df4f14e54c7deb0b15c40b35196968de4374ceb1cc5ad95986620a506adb2"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1724705297.3784509, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"time_zone_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_schedule_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_schedule_data.csv", "original_file_path": "seeds/ticket_schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data", "fqn": ["zendesk_integration_tests", "ticket_schedule_data"], "alias": "ticket_schedule_data", "checksum": {"name": "sha256", "checksum": "dc4892d18f3730242f5319bb24498d77a4c32a666b6b4d5c0eec0d4dafd7224b"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "ticket_id": "bigint", "schedule_id": "bigint", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"ticket_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "schedule_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1724705297.379661, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_schedule_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.daylight_time_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "daylight_time_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "daylight_time_data.csv", "original_file_path": "seeds/daylight_time_data.csv", "unique_id": "seed.zendesk_integration_tests.daylight_time_data", "fqn": ["zendesk_integration_tests", "daylight_time_data"], "alias": "daylight_time_data", "checksum": {"name": "sha256", "checksum": "17642d90548c6367ab328762a47066a905e3ba2da8831cd86ef37ac659a38fc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1724705297.3806899, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"daylight_time_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "user_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data.csv", "original_file_path": "seeds/user_data.csv", "unique_id": "seed.zendesk_integration_tests.user_data", "fqn": ["zendesk_integration_tests", "user_data"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "9f600c24b84ed0183e88c5aaa4e7e02bd2228115bebc85217f04c97bd5b6dbc9"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "external_id": "bigint", "locale_id": "bigint", "organization_id": "bigint", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1724705297.381877, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"user_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.schedule_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "schedule_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "schedule_data.csv", "original_file_path": "seeds/schedule_data.csv", "unique_id": "seed.zendesk_integration_tests.schedule_data", "fqn": ["zendesk_integration_tests", "schedule_data"], "alias": "schedule_data", "checksum": {"name": "sha256", "checksum": "e2596e44df02b53d13b850f9742084141b7b75755baae603c8d3db6b8354107a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "end_time": "bigint", "start_time": "bigint", "end_time_utc": "bigint", "start_time_utc": "bigint", "created_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "end_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "start_time_utc": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp"}}, "created_at": 1724705297.383397, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"schedule_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_tag_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_tag_data.csv", "original_file_path": "seeds/ticket_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_tag_data", "fqn": ["zendesk_integration_tests", "ticket_tag_data"], "alias": "ticket_tag_data", "checksum": {"name": "sha256", "checksum": "020b25c3247e21387702778ce0af4e5a5b8b3aee62daaa05f48c643489b57ea0"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}}, "created_at": 1724705297.384598, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_tag_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.organization_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "organization_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_data.csv", "original_file_path": "seeds/organization_data.csv", "unique_id": "seed.zendesk_integration_tests.organization_data", "fqn": ["zendesk_integration_tests", "organization_data"], "alias": "organization_data", "checksum": {"name": "sha256", "checksum": "b3e00faed1ea214f73182b110c5f55653a5b43f2bc082dcb87f6c63dea5303c3"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1724705297.385947, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"organization_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.ticket_form_history_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_form_history_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "ticket_form_history_data.csv", "original_file_path": "seeds/ticket_form_history_data.csv", "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data", "fqn": ["zendesk_integration_tests", "ticket_form_history_data"], "alias": "ticket_form_history_data", "checksum": {"name": "sha256", "checksum": "a5b4edef05a0baa9acac87db3eea1ac0ba55865809db778ff458e20b7352c665"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}}, "created_at": 1724705297.3870552, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_form_history_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.group_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "group_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "group_data.csv", "original_file_path": "seeds/group_data.csv", "unique_id": "seed.zendesk_integration_tests.group_data", "fqn": ["zendesk_integration_tests", "group_data"], "alias": "group_data", "checksum": {"name": "sha256", "checksum": "ded51f1b267e9785ca862ca30656faa2485b5814d834ea35de6892702c3dbd1a"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "created_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "updated_at": "timestamp"}}, "created_at": 1724705297.3882082, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"group_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "seed.zendesk_integration_tests.user_tag_data": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "user_tag_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data.csv", "original_file_path": "seeds/user_tag_data.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data", "fqn": ["zendesk_integration_tests", "user_tag_data"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "fde0d85263495e783fd6fb342940a4dcd67c39581d55bfc9b28935d24367a096"}, "config": {"enabled": true, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "user_id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"user_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'snowflake' else false }}"}, "created_at": 1724705297.3893719, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"user_tag_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}}, "model.zendesk.zendesk__ticket_enriched": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__ticket_enriched", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_enriched.sql", "original_file_path": "models/zendesk__ticket_enriched.sql", "unique_id": "model.zendesk.zendesk__ticket_enriched", "fqn": ["zendesk", "zendesk__ticket_enriched"], "alias": "zendesk__ticket_enriched", "checksum": {"name": "sha256", "checksum": "8d5ccce79dd53bd307569a9a086b4205cfebbd616bb74b594766e524a281c244"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk ticket, enriched with data about it's tags, assignees, requester, submitter, organization and group.", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705298.1570098, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_enriched\"", "raw_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n{% if var('using_ticket_form_history', True) %}\n), latest_ticket_form as (\n\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), latest_satisfaction_ratings as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_satisfaction') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), requester_updates as (\n\n select *\n from {{ ref('int_zendesk__requester_updates') }}\n\n), assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__assignee_updates') }}\n\n), ticket_group as (\n \n select *\n from {{ ref('stg_zendesk__group') }}\n\n), organization as (\n\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n latest_ticket_form.name as ticket_form_name,\n {% endif %}\n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n {% endif %}\n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n requester_org.organization_tags as requester_organization_tags,\n {% endif %}\n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n {% endif %}\n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n {% if var('using_ticket_form_history', True) %}\n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n {% endif %}\n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "language": "sql", "refs": [{"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}, {"name": "int_zendesk__latest_ticket_form", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_satisfaction", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__requester_updates", "package": null, "version": null}, {"name": "int_zendesk__assignee_updates", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__assignee_updates", "model.zendesk_source.stg_zendesk__group", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_enriched.sql", "compiled": true, "compiled_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n\n), latest_ticket_form as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__latest_ticket_form\"\n\n\n), latest_satisfaction_ratings as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), requester_updates as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_updates\"\n\n), assignee_updates as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__assignee_updates\"\n\n), ticket_group as (\n \n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group\"\n\n), organization as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n latest_ticket_form.name as ticket_form_name,\n \n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n \n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n requester_org.organization_tags as requester_organization_tags,\n \n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n \n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n \n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n \n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_metrics": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__ticket_metrics", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_metrics.sql", "original_file_path": "models/zendesk__ticket_metrics.sql", "unique_id": "model.zendesk.zendesk__ticket_metrics", "fqn": ["zendesk", "zendesk__ticket_metrics"], "alias": "zendesk__ticket_metrics", "checksum": {"name": "sha256", "checksum": "0beb1421df42ed71b84e3cfec7f56029ec6bad71570ab9b4cb4ab712fc753ca6"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents a Zendesk Support ticket, enriched with metrics about reply times, resolution times and work times. Calendar and business hours are supported", "columns": {"first_reply_time_calendar_minutes": {"name": "first_reply_time_calendar_minutes", "description": "The number of calendar minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_reply_time_business_minutes": {"name": "first_reply_time_business_minutes", "description": "The number of business minutes between when the ticket was created and when the first public agent response occurred", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_reply_time_calendar_minutes": {"name": "total_reply_time_calendar_minutes", "description": "The combined calendar time between all end-user comments and the next public agent response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_solved_at": {"name": "first_solved_at", "description": "The time the ticket was first in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_solved_at": {"name": "last_solved_at", "description": "The time the ticket was last in 'solved' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_calendar_minutes": {"name": "first_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "final_resolution_calendar_minutes": {"name": "final_resolution_calendar_minutes", "description": "The number of calendar minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_one_touch_resolution": {"name": "is_one_touch_resolution", "description": "A boolean field indicating that the ticket has one public agent response and is in solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_resolution_business_minutes": {"name": "first_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was first set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "full_resolution_business_minutes": {"name": "full_resolution_business_minutes", "description": "The number of business minutes between the ticket created time and the time the ticket was last set to solved status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_business_minutes": {"name": "agent_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_business_minutes": {"name": "requester_wait_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_business_minutes": {"name": "solve_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_business_minutes": {"name": "agent_work_time_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_business_minutes": {"name": "on_hold_time_in_business_minutes", "description": "The combined number of business minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_business_minutes": {"name": "new_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'new' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_business_minutes": {"name": "open_status_duration_in_business_minutes", "description": "The combined number of business minutes the ticket was in 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_wait_time_in_calendar_minutes": {"name": "agent_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_wait_time_in_calendar_minutes": {"name": "requester_wait_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', or 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solve_time_in_calendar_minutes": {"name": "solve_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new', 'open', 'hold', or 'pending' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "agent_work_time_in_calendar_minutes": {"name": "agent_work_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was in 'new' or 'open' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_time_in_calendar_minutes": {"name": "on_hold_time_in_calendar_minutes", "description": "The combined number of calendar minutes the ticket was on 'hold' status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_role": {"name": "assignee_role", "description": "The role of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_email": {"name": "assignee_email", "description": "The email of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The name of the agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_role": {"name": "requester_role", "description": "The role of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_email": {"name": "requester_email", "description": "The email of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_name": {"name": "requester_name", "description": "The name of the user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_role": {"name": "submitter_role", "description": "The role of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_email": {"name": "submitter_email", "description": "The email of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_name": {"name": "submitter_name", "description": "The name of the user who submitted the ticket.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_name": {"name": "organization_name", "description": "The name of the organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "The external id of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_name": {"name": "group_name", "description": "The name of the group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_tags": {"name": "ticket_tags", "description": "A list of all tags assigned to this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_id": {"name": "problem_id", "description": "The reference to the problem if the ticket is listed as a problem", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_incident": {"name": "is_incident", "description": "Boolean indicating whether the ticket is listed as an incident", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_brand_name": {"name": "ticket_brand_name", "description": "The brand name of with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_name": {"name": "ticket_form_name", "description": "The form name of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_rating": {"name": "ticket_satisfaction_rating", "description": "The ticket satisfaction rating", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_comment": {"name": "ticket_satisfaction_comment", "description": "The ticket satisfaction comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_reason": {"name": "ticket_satisfaction_reason", "description": "The ticket satisfaction reason", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_organization_domain_names": {"name": "ticket_organization_domain_names", "description": "The organization associated with the ticket domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_domain_names": {"name": "requester_organization_domain_names", "description": "The ticket requesters organization domain names", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_external_id": {"name": "requester_external_id", "description": "The ticket requester external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_created_at": {"name": "requester_created_at", "description": "The date the ticket requester was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_updated_at": {"name": "requester_updated_at", "description": "The date the ticket requester was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_requester_active": {"name": "is_requester_active", "description": "Boolean indicating whether the requester is currently active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_locale": {"name": "requester_locale", "description": "The locale of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_time_zone": {"name": "requester_time_zone", "description": "The timezone of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_update_count": {"name": "requester_ticket_update_count", "description": "The number of times the requester has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_ticket_last_update_at": {"name": "requester_ticket_last_update_at", "description": "The last date the requester updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_at": {"name": "requester_last_login_at", "description": "The last login of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_id": {"name": "requester_organization_id", "description": "The organization id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_name": {"name": "requester_organization_name", "description": "The organization name of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_tags": {"name": "requester_organization_tags", "description": "The organization tags of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_external_id": {"name": "requester_organization_external_id", "description": "The organization external id of the ticket requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_created_at": {"name": "requester_organization_created_at", "description": "The date the ticket requesters organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_organization_updated_at": {"name": "requester_organization_updated_at", "description": "The date the ticket requesters organization was last updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_external_id": {"name": "submitter_external_id", "description": "The ticket submitter external id", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_agent_submitted": {"name": "is_agent_submitted", "description": "Boolean indicating if the submitter has an agent role", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_submitter_active": {"name": "is_submitter_active", "description": "Boolean indicating if the ticket submitter is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_locale": {"name": "submitter_locale", "description": "The locale of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_time_zone": {"name": "submitter_time_zone", "description": "The time zone of the ticket submitter", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_external_id": {"name": "assignee_external_id", "description": "The external id of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_assignee_active": {"name": "is_assignee_active", "description": "Boolean indicating if the ticket assignee is active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_locale": {"name": "assignee_locale", "description": "The locale of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_time_zone": {"name": "assignee_time_zone", "description": "The time zone of the ticket assignee", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_update_count": {"name": "assignee_ticket_update_count", "description": "The number of times the ticket assignee has updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_ticket_last_update_at": {"name": "assignee_ticket_last_update_at", "description": "The last date the ticket assignee updated the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_at": {"name": "assignee_last_login_at", "description": "The date the ticket assignee last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_tag": {"name": "requester_tag", "description": "The tags associated with the ticket requester.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_tag": {"name": "submitter_tag", "description": "The tags associated with the ticket submitter.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_tag": {"name": "assignee_tag", "description": "The tags associated with the ticket assignee.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_total_satisfaction_scores": {"name": "ticket_total_satisfaction_scores", "description": "The total number of satisfaction scores the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_first_satisfaction_score": {"name": "ticket_first_satisfaction_score", "description": "The first satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_satisfaction_score": {"name": "ticket_satisfaction_score", "description": "The latest satisfaction score the ticket received.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_good_to_bad_satisfaction_score": {"name": "is_good_to_bad_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from good to bad.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_bad_to_good_satisfaction_score": {"name": "is_bad_to_good_satisfaction_score", "description": "Boolean indicating if the ticket had a satisfaction score went from bad to good.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_agent_comments": {"name": "count_agent_comments", "description": "Count of agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_agent_comments": {"name": "count_public_agent_comments", "description": "Count of public agent comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_end_user_comments": {"name": "count_end_user_comments", "description": "Count of end user comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_internal_comments": {"name": "count_internal_comments", "description": "Count of internal comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_public_comments": {"name": "count_public_comments", "description": "Count of public comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_comments": {"name": "total_comments", "description": "Total count of all comments within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_ticket_handoffs": {"name": "count_ticket_handoffs", "description": "", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": [], "dev_snowflake": "Count of distinct internal users who have touched/commented on the ticket."}, "unique_assignee_count": {"name": "unique_assignee_count", "description": "The count of unique assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_stations_count": {"name": "assignee_stations_count", "description": "The total number of assignees that were assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_stations_count": {"name": "group_stations_count", "description": "The total count of group stations within the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignee_id": {"name": "first_assignee_id", "description": "Assignee id of the first agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignee_id": {"name": "last_assignee_id", "description": "Assignee id of the last agent assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_agent_assignment_date": {"name": "first_agent_assignment_date", "description": "The date the first agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_agent_assignment_date": {"name": "last_agent_assignment_date", "description": "The date the last agent was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "first_assignment_to_resolution_calendar_minutes": {"name": "first_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the first assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_assignment_to_resolution_calendar_minutes": {"name": "last_assignment_to_resolution_calendar_minutes", "description": "The time in calendar minutes between the last assignment and resolution of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_resolutions": {"name": "count_resolutions", "description": "The count of ticket resolutions", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "count_reopens": {"name": "count_reopens", "description": "The count of ticket reopen events", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_status_duration_in_calendar_minutes": {"name": "new_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_status_duration_in_calendar_minutes": {"name": "open_status_duration_in_calendar_minutes", "description": "The duration in calendar minutes the ticket was in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "total_agent_replies": {"name": "total_agent_replies", "description": "The total number of agent replies within the ticket, excluding comments where an agent created the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_last_login_age_minutes": {"name": "requester_last_login_age_minutes", "description": "The time in minutes since the ticket requester was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_last_login_age_minutes": {"name": "assignee_last_login_age_minutes", "description": "The time in minutes since the ticket assignee was last logged in", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_minutes": {"name": "unsolved_ticket_age_minutes", "description": "The time in minutes the ticket has been in an unsolved state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_age_since_update_minutes": {"name": "unsolved_ticket_age_since_update_minutes", "description": "The time in minutes the ticket has been unsolved since the last update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_two_touch_resolution": {"name": "is_two_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_multi_touch_resolution": {"name": "is_multi_touch_resolution", "description": "Boolean indicating if the ticket was resolved in two or more public comments", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_last_comment_date": {"name": "ticket_last_comment_date", "description": "The time the last comment was applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_unassigned_duration_calendar_minutes": {"name": "ticket_unassigned_duration_calendar_minutes", "description": "The time in minutes the ticket was in an unassigned state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_status_assignment_date": {"name": "last_status_assignment_date", "description": "The time the status was last changed on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705298.170036, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_metrics\"", "raw_code": "with ticket_enriched as (\n\n select *\n from {{ ref('zendesk__ticket_enriched') }}\n\n), ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_reply_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times_calendar') }}\n\n), ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comment_metrics') }}\n\n), ticket_work_time_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_calendar') }}\n\n-- business hour CTEs\n{% if var('using_schedules', True) %}\n\n), ticket_first_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_resolution_time_business') }}\n\n), ticket_full_resolution_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_full_resolution_time_business') }}\n\n), ticket_work_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_work_time_business') }}\n\n), ticket_first_reply_time_business as (\n\n select *\n from {{ ref('int_zendesk__ticket_first_reply_time_business') }}\n\n{% endif %}\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.requester_last_login_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then ({{ dbt.datediff(\"ticket_enriched.assignee_last_login_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.created_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then ({{ dbt.datediff(\"ticket_enriched.updated_at\", dbt.current_timestamp_backcompat(), 'second') }} /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n{% if var('using_schedules', True) %}\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n{% else %}\n\n) \n\nselect *\nfrom calendar_hour_metrics\n\n{% endif %}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}, {"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__comment_metrics", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_full_resolution_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_work_time_business", "package": null, "version": null}, {"name": "int_zendesk__ticket_first_reply_time_business", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt.datediff"], "nodes": ["model.zendesk.zendesk__ticket_enriched", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_first_reply_time_business"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n), __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n), __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on \n ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n), __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n), ticket_enriched as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_enriched\"\n\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_reply_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times_calendar\n\n), ticket_comments as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__comment_metrics\"\n\n), ticket_work_time_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_calendar\n\n-- business hour CTEs\n\n\n), ticket_first_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_resolution_time_business\n\n), ticket_full_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_full_resolution_time_business\n\n), ticket_work_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_business\n\n), ticket_first_reply_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_reply_time_business\n\n\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.requester_last_login_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.requester_last_login_at)::timestamp)))\n /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.assignee_last_login_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.assignee_last_login_at)::timestamp)))\n /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.created_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.created_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.created_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.created_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.updated_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.updated_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.updated_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.updated_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}, {"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "sql": " __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "sql": " __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_work_time_business", "sql": " __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on \n ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n)"}, {"id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "sql": " __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_summary": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__ticket_summary", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_summary.sql", "original_file_path": "models/zendesk__ticket_summary.sql", "unique_id": "model.zendesk.zendesk__ticket_summary", "fqn": ["zendesk", "zendesk__ticket_summary"], "alias": "zendesk__ticket_summary", "checksum": {"name": "sha256", "checksum": "085f6c784b70f6ca6f38a8f3d4defb1debb06049d0bb6fe1b778ad7638d08f2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A single record table containing Zendesk ticket and user summary metrics. These metrics are updated for the current day the model is run.", "columns": {"user_count": {"name": "user_count", "description": "Total count of users created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active_agent_count": {"name": "active_agent_count", "description": "Total count of agents", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_user_count": {"name": "deleted_user_count", "description": "Total deleted user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_user_count": {"name": "end_user_count", "description": "Total end user count", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended_user_count": {"name": "suspended_user_count", "description": "Total count of users in a suspended state", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "new_ticket_count": {"name": "new_ticket_count", "description": "Total count of tickets in the \"new\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "on_hold_ticket_count": {"name": "on_hold_ticket_count", "description": "Total count of tickets in the \"hold\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "open_ticket_count": {"name": "open_ticket_count", "description": "Total count of tickets in the \"open\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "pending_ticket_count": {"name": "pending_ticket_count", "description": "Total count of tickets in the \"pending\" status", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "solved_ticket_count": {"name": "solved_ticket_count", "description": "Total count of solved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "problem_ticket_count": {"name": "problem_ticket_count", "description": "Total count of tickets labeled as problems", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reassigned_ticket_count": {"name": "reassigned_ticket_count", "description": "Total count of tickets that have been reassigned", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "reopened_ticket_count": {"name": "reopened_ticket_count", "description": "Total count of tickets that have been reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "surveyed_satisfaction_ticket_count": {"name": "surveyed_satisfaction_ticket_count", "description": "Total count of tickets that have been surveyed for a satisfaction response", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unassigned_unsolved_ticket_count": {"name": "unassigned_unsolved_ticket_count", "description": "Total count of tickets that are unassigned and unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_ticket_count": {"name": "unreplied_ticket_count", "description": "Total count of tickets that have not had a reply", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unreplied_unsolved_ticket_count": {"name": "unreplied_unsolved_ticket_count", "description": "Total count of tickets that have not had a reply and are unsolved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "unsolved_ticket_count": {"name": "unsolved_ticket_count", "description": "Total count of unsolved tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assigned_ticket_count": {"name": "assigned_ticket_count", "description": "Total count of assigned tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "deleted_ticket_count": {"name": "deleted_ticket_count", "description": "Total count of deleted tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recovered_ticket_count": {"name": "recovered_ticket_count", "description": "Total count of tickets that were deleted then reopened", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705298.173162, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_summary\"", "raw_code": "with ticket_metrics as (\n select *\n from {{ ref('zendesk__ticket_metrics') }}\n\n), user_table as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), user_sum as (\n select\n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as {{ dbt.type_int() }}) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int"], "nodes": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_summary.sql", "compiled": true, "compiled_code": "with ticket_metrics as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_metrics\"\n\n), user_table as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), user_sum as (\n select\n cast(1 as integer) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as integer) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_field_history": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_field_history.sql", "original_file_path": "models/zendesk__ticket_field_history.sql", "unique_id": "model.zendesk.zendesk__ticket_field_history", "fqn": ["zendesk", "zendesk__ticket_field_history"], "alias": "zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "2fea56dd7631d630021a96594da99a1b65affd7ec6d7a5a913ef3fc0b7759949"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable and the corresponding updater fields defined in the `ticket_field_history_updater_columns` variable.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_day_id": {"name": "ticket_day_id", "description": "The unique key of the table, a surrogate key of date_day and ticket_id.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The assignee id assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1724705298.1600409, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\"", "raw_code": "{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month' } if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{%- set change_data_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_scd')) -%}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_scd') }}\n \n {% if is_incremental() %}\n where valid_from >= (select max(date_day) from {{ this }})\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from {{ this }}\n where date_day = (select max(date_day) from {{ this }} )\n\n{% endif %}\n\n), calendar as (\n\n select *\n from {{ ref('int_zendesk__field_calendar_spine') }}\n where date_day <= current_date\n {% if is_incremental() %}\n and date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n {% if is_incremental() %} \n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , coalesce(change_data.{{ col.name }}, most_recent_data.{{ col.name }}) as {{ col.name }}\n {% endfor %}\n \n {% else %}\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n , {{ col.name }}\n {% endfor %}\n {% endif %}\n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n {% if is_incremental() %}\n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n {% endif %}\n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n , {{ col.name }}\n -- create a batch/partition once a new value is provided\n , sum( case when {{ col.name }} is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as {{ col.name }}_field_partition\n\n {% endfor %}\n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %}\n -- grab the value that started this batch/partition\n , first_value( {{ col.name }} ) over (\n partition by ticket_id, {{ col.name }}_field_partition \n order by date_day asc rows between unbounded preceding and current row) as {{ col.name }}\n {% endfor %}\n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n {% for col in change_data_columns if col.name|lower not in ['ticket_id','valid_from','valid_to','ticket_day_id'] %} \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( {{ col.name }} as {{ dbt.type_string() }} ) = 'is_null' then null else {{ col.name }} end as {{ col.name }}\n {% endfor %}\n\n from fill_values\n\n), surrogate_key as (\n\n select\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_history_scd", "package": null, "version": null}, {"name": "int_zendesk__field_calendar_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.type_string"], "nodes": ["model.zendesk.int_zendesk__field_history_scd", "model.zendesk.int_zendesk__field_calendar_spine"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with change_data as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_history_scd\"\n \n \n where valid_from >= (select max(date_day) from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\")\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\"\n where date_day = (select max(date_day) from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\" )\n\n\n\n), calendar as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_calendar_spine\"\n where date_day <= current_date\n \n and date_day >= (select max(date_day) from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\")\n \n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n \n \n , coalesce(change_data.status, most_recent_data.status) as status\n \n , coalesce(change_data.assignee_id, most_recent_data.assignee_id) as assignee_id\n \n , coalesce(change_data.priority, most_recent_data.priority) as priority\n \n \n \n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n \n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n \n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n \n , status\n -- create a batch/partition once a new value is provided\n , sum( case when status is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as status_field_partition\n\n \n , assignee_id\n -- create a batch/partition once a new value is provided\n , sum( case when assignee_id is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as assignee_id_field_partition\n\n \n , priority\n -- create a batch/partition once a new value is provided\n , sum( case when priority is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as priority_field_partition\n\n \n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n \n -- grab the value that started this batch/partition\n , first_value( status ) over (\n partition by ticket_id, status_field_partition \n order by date_day asc rows between unbounded preceding and current row) as status\n \n -- grab the value that started this batch/partition\n , first_value( assignee_id ) over (\n partition by ticket_id, assignee_id_field_partition \n order by date_day asc rows between unbounded preceding and current row) as assignee_id\n \n -- grab the value that started this batch/partition\n , first_value( priority ) over (\n partition by ticket_id, priority_field_partition \n order by date_day asc rows between unbounded preceding and current row) as priority\n \n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( status as TEXT ) = 'is_null' then null else status end as status\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( assignee_id as TEXT ) = 'is_null' then null else assignee_id end as assignee_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( priority as TEXT ) = 'is_null' then null else priority end as priority\n \n\n from fill_values\n\n), surrogate_key as (\n\n select\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__sla_policies": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__sla_policies", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__sla_policies.sql", "original_file_path": "models/zendesk__sla_policies.sql", "unique_id": "model.zendesk.zendesk__sla_policies", "fqn": ["zendesk", "zendesk__sla_policies"], "alias": "zendesk__sla_policies", "checksum": {"name": "sha256", "checksum": "450c1289895dff2dce94dbed7926eeaa895ffa8c6a25524f558d9dcd5e7075fa"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Each record represents an SLA policy event and additional sla breach and achievement metrics. Calendar and business hour SLA breaches for `first_reply_time`, `next_reply_time`, `requester_wait_time`, and `agent_work_time` are supported. If there is a SLA you would like supported that is not included, please create a feature request.", "columns": {"sla_event_id": {"name": "sla_event_id", "description": "A surrogate key generated from the combination of ticket_id, metric, and sla_applied_at fields", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_policy_name": {"name": "sla_policy_name", "description": "The name of the SLA policy associated with the SLA metric", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "metric": {"name": "metric", "description": "The SLA metric, either agent_work_time, requester_wait_time, first_reply_time or next_reply_time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_applied_at": {"name": "sla_applied_at", "description": "When the SLA target was triggered. This is the starting time", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "target": {"name": "target", "description": "The SLA target, in minutes", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "in_business_hours": {"name": "in_business_hours", "description": "Boolean field indicating if the SLA target is in business hours (true) or calendar hours (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_breach_at": {"name": "sla_breach_at", "description": "The time or expected time of the SLA breach or achieve event.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "sla_elapsed_time": {"name": "sla_elapsed_time", "description": "The total elapsed time to achieve the SLA metric whether breached or achieved", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active_sla": {"name": "is_active_sla", "description": "Boolean field indicating that the SLA event is currently active and not breached (true) or past (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_sla_breach": {"name": "is_sla_breach", "description": "Boolean field indicating if the SLA has been breached (true) or was achieved (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705298.159421, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__sla_policies\"", "raw_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from {{ ref('int_zendesk__reply_time_combined') }}\n\n), agent_work_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_calendar_hours') }}\n\n), requester_wait_calendar_sla as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), agent_work_business_sla as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_business_hours') }}\n\n), requester_wait_business_sla as (\n select *\n from {{ ref('int_zendesk__requester_wait_time_business_hours') }}\n\n{% endif %}\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n{% if var('using_schedules', True) %}\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n {{ fivetran_utils.max_bool(\"is_breached_during_schedule\") }}\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n{% endif %}\n\n)\n\nselect \n {{ dbt_utils.generate_surrogate_key(['ticket_id', 'metric', 'sla_applied_at']) }} as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then ({{ dbt.datediff(\"sla_applied_at\", dbt.current_timestamp_backcompat(), 'second') }} / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > {{ dbt.current_timestamp_backcompat() }})\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_combined", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__agent_work_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__requester_wait_time_business_hours", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.max_bool", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.current_timestamp_backcompat", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__sla_policies.sql", "compiled": true, "compiled_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_combined\"\n\n), agent_work_calendar_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"\n\n), requester_wait_calendar_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"\n\n\n\n), agent_work_business_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"\n\n), requester_wait_business_sla as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"\n\n\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n\n\n)\n\nselect \n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(metric as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sla_applied_at as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (sla_applied_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (sla_applied_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (sla_applied_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (sla_applied_at)::timestamp)))\n / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > \n current_timestamp::timestamp\n)\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.zendesk__ticket_backlog": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk__ticket_backlog", "resource_type": "model", "package_name": "zendesk", "path": "zendesk__ticket_backlog.sql", "original_file_path": "models/zendesk__ticket_backlog.sql", "unique_id": "model.zendesk.zendesk__ticket_backlog", "fqn": ["zendesk", "zendesk__ticket_backlog"], "alias": "zendesk__ticket_backlog", "checksum": {"name": "sha256", "checksum": "546f8460ab16ce0f4671b1ae5742bfdb0f97bc4184c9da30cd21de81400922f7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "A daily historical view of the ticket field values defined in the `ticket_field_history_columns` variable for all backlog tickets. Backlog tickets being defined as any ticket not a 'closed', 'deleted', or 'solved' status.\n", "columns": {"date_day": {"name": "date_day", "description": "The date of the day associated with the field values", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "A ticket's unique identifier, it is automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The status of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel where the ticket was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_name": {"name": "assignee_name", "description": "The assignee name assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The tickets priority ranking", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk://models/zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1724705298.173481, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_backlog\"", "raw_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n{{ config(enabled = 'status' in var('ticket_field_history_columns')) }}\n\nwith ticket_field_history as (\n select *\n from {{ ref('zendesk__ticket_field_history') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), group_names as (\n select *\n from {{ ref('stg_zendesk__group') }}\n\n), users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), brands as (\n select *\n from {{ ref('stg_zendesk__brand') }}\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n{% if 'ticket_form_id' in var('ticket_field_history_columns') %}\n), ticket_forms as (\n select *\n from {{ ref('int_zendesk__latest_ticket_form') }}\n{% endif %}\n\n), organizations as (\n select *\n from {{ ref('stg_zendesk__organization') }}\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n {% for col in var('ticket_field_history_columns') if col != 'status' %} --Looking at all history fields the users passed through in their dbt_project.yml file\n {% if col in ['assignee_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n {% elif col in ['requester_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,requester.name as requester_name\n\n {% elif col in ['ticket_form_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,ticket_forms.name as ticket_form_name\n\n {% elif col in ['organization_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,organizations.name as organization_name\n\n {% elif col in ['brand_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,brands.name as brand_name\n\n {% elif col in ['group_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,group_names.name as group_name\n\n {% elif col in ['locale_id'] %} --Standard ID field where the name can easily be joined from stg model.\n ,assignee.locale as local_name\n\n {% else %} --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.{{ col }}\n {% endif %}\n {% endfor %}\n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n {% if 'ticket_form_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join ticket_forms\n on ticket_forms.ticket_form_id = cast(ticket_field_history.ticket_form_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'group_id' in var('ticket_field_history_columns') %}--Join not needed if field is not located in variable, otherwise it is included.\n left join group_names\n on group_names.group_id = cast(ticket_field_history.group_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'assignee_id' in var('ticket_field_history_columns') or 'requester_id' in var('ticket_field_history_columns') or 'locale_id' in var('ticket_field_history_columns')%} --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'requester_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join users as requester\n on requester.user_id = cast(ticket_field_history.requester_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'brand_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join brands\n on brands.brand_id = cast(ticket_field_history.brand_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n {% if 'organization_id' in var('ticket_field_history_columns') %} --Join not needed if field is not located in variable, otherwise it is included.\n left join organizations\n on organizations.organization_id = cast(ticket_field_history.organization_id as {{ dbt.type_bigint() }})\n {% endif %}\n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "language": "sql", "refs": [{"name": "zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__group", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}, {"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_bigint"], "nodes": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk/models/zendesk__ticket_backlog.sql", "compiled": true, "compiled_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n\n\nwith ticket_field_history as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\"\n\n), tickets as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), group_names as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group\"\n\n), users as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), brands as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand\"\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n\n\n), organizations as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization\"\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n --Looking at all history fields the users passed through in their dbt_project.yml file\n --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n \n --Looking at all history fields the users passed through in their dbt_project.yml file\n --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.priority\n \n \n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n \n\n \n\n --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as bigint)\n \n\n \n\n \n\n \n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__sla_policy_applied": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__sla_policy_applied", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/int_zendesk__sla_policy_applied.sql", "original_file_path": "models/sla_policy/int_zendesk__sla_policy_applied.sql", "unique_id": "model.zendesk.int_zendesk__sla_policy_applied", "fqn": ["zendesk", "sla_policy", "int_zendesk__sla_policy_applied"], "alias": "int_zendesk__sla_policy_applied", "checksum": {"name": "sha256", "checksum": "5879f6ab082c64d3650de0c8a5b3ec5ee85e25eb99646451eab7e9d6499c4d19"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.5389829, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__sla_policy_applied\"", "raw_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), sla_policy_name as (\n\n select \n *\n from {{ ref('int_zendesk__updates') }}\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from {{ ref('int_zendesk__ticket_aggregates') }}\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast({{ fivetran_utils.json_parse('ticket_field_history.value', ['minutes']) }} as {{ dbt.type_int() }} ) as target,\n {{ fivetran_utils.json_parse('ticket_field_history.value', ['in_business_hours']) }} = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, {{ dbt.current_timestamp_backcompat() }}) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__ticket_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.json_parse", "macro.dbt.type_int", "macro.dbt.current_timestamp_backcompat"], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__ticket_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/int_zendesk__sla_policy_applied.sql", "compiled": true, "compiled_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n\n), sla_policy_name as (\n\n select \n *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast(\n\n ticket_field_history.value::json #>> '{minutes}'\n\n as integer ) as target,\n \n\n ticket_field_history.value::json #>> '{in_business_hours}'\n\n = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, \n current_timestamp::timestamp\n) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_business_hours": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__agent_work_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_business_hours"], "alias": "int_zendesk__agent_work_time_business_hours", "checksum": {"name": "sha256", "checksum": "bf3885a1aad6f4f87b63d6c0f1131a6b6149407f7a0f2f7447172884f788cc50"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1724705297.544658, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n \n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_agent_work_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) > cast(schedule.valid_from as {{ dbt.type_timestamp() }})\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) < cast(schedule.valid_until as {{ dbt.type_timestamp() }})\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"\" ~ dbt.date_trunc('week', 'valid_starting_at') ~ \"\",\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom agent_work_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_agent_work_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n \n\n date_trunc('week', valid_starting_at) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom agent_work_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_calendar_hours": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__agent_work_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_calendar_hours"], "alias": "int_zendesk__agent_work_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "f25752139fd2e10c5d666783a5abbf36e9d81b6a4e0012f6e42d816e8d20aa81"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.565464, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"", "raw_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__agent_work_time_filtered_statuses') }}\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__agent_work_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__agent_work_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "agent_work_time", "int_zendesk__agent_work_time_filtered_statuses"], "alias": "int_zendesk__agent_work_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "fbb6aeccc9d5c6ec4e48160a9f5fdf94c7be4e3639d19a3e55e64ecbedccaa62"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.570407, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"", "raw_code": "with agent_work_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp_backcompat() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/agent_work_time/int_zendesk__agent_work_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with agent_work_time_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n \n current_timestamp::timestamp\n + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_business_hours": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__reply_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_business_hours"], "alias": "int_zendesk__reply_time_business_hours", "checksum": {"name": "sha256", "checksum": "9ff6bb4774c2854a7d21ac27ac2690db52bc80920ae8d4e88680631557a9b590"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1724705297.574323, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), ticket_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from {{ ref('stg_zendesk__schedule') }}\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(sla_policy_applied.sla_applied_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n {{ dbt_date.week_start('sla_policy_applied.sla_applied_at','UTC') }} as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_created_at') }} <= sla_policy_applied.sla_applied_at\n and {{ fivetran_utils.timestamp_add('second', -1, 'ticket_schedules.schedule_invalidated_at') }} > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n {{ dbt_utils.group_by(n=14) }}\n\n), week_index_calc as (\n select \n *,\n {{ dbt.datediff(\"sla_applied_at\", \"least(coalesce(first_reply_time, \" ~ dbt.current_timestamp() ~ \"), coalesce(first_solved_time, \" ~ dbt.current_timestamp() ~ \"))\", \"week\") }} + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n {{ dbt_utils.generate_series(52) }}\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast((7*24*60) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast ({{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n {{ dbt.date_trunc('week', 'sla_applied_at') }} as starting_point,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_breach_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_start_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_start_at,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + (schedule_end_time) as \" ~ dbt.type_int() ~ \" )\",\n \"cast(\" ~ dbt_date.week_start('sla_applied_at','UTC') ~ \" as \" ~ dbt.type_timestamp() ~ \")\" ) }} as sla_schedule_end_at,\n {{ dbt_date.week_end(\"sla_applied_at\", tz=\"America/UTC\") }} as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "language": "sql", "refs": [{"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.fivetran_utils.timestamp_add", "macro.dbt_utils.group_by", "macro.dbt.current_timestamp", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt_date.week_end"], "nodes": ["model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), sla_policy_applied as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), ticket_updates as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule\"\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n (\n (\n (\n (\n ((cast(sla_policy_applied.sla_applied_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and \n\n ticket_schedules.schedule_created_at + ((interval '1 second') * (-1))\n\n <= sla_policy_applied.sla_applied_at\n and \n\n ticket_schedules.schedule_invalidated_at + ((interval '1 second') * (-1))\n\n > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10,11,12,13,14\n\n), week_index_calc as (\n select \n *,\n \n (\n ((least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::date - (sla_applied_at)::date)\n / 7 + case\n when date_part('dow', (sla_applied_at)::timestamp) <= date_part('dow', (least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::timestamp) then\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 0 else -1 end\n else\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 1 else 0 end\n end)\n + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as integer) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast((7*24*60) as integer) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n date_trunc('week', sla_applied_at) as starting_point,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as integer )))\n\n as sla_breach_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_start_time) as integer )))\n\n as sla_schedule_start_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time) as integer )))\n\n as sla_schedule_end_at,\n cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_calendar_hours": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__reply_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_calendar_hours"], "alias": "int_zendesk__reply_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "6ec2775efbac4d405efd0b30a1ec5c593e140c3f4a1be4ff8df7fd0cd4791a2e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.5913851, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"", "raw_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n\n), final as (\n select\n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(target as \" ~ dbt.type_int() ~ \" )\",\n \"sla_applied_at\" ) }} as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_int", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_calendar_hours.sql", "compiled": true, "compiled_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), final as (\n select\n *,\n \n\n sla_applied_at + ((interval '1 minute') * (cast(target as integer )))\n\n as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__reply_time_combined": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__reply_time_combined", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "original_file_path": "models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "unique_id": "model.zendesk.int_zendesk__reply_time_combined", "fqn": ["zendesk", "sla_policy", "reply_time", "int_zendesk__reply_time_combined"], "alias": "int_zendesk__reply_time_combined", "checksum": {"name": "sha256", "checksum": "3a7a8ddea0400ea314ff4ae83b81654414788634e76af330bf27c384733ac43b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.5950139, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_combined\"", "raw_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from {{ ref('int_zendesk__reply_time_calendar_hours') }}\n\n{% if var('using_schedules', True) %}\n\n), reply_time_business_hours_sla as (\n\n select *\n from {{ ref('int_zendesk__reply_time_business_hours') }}\n\n{% endif %}\n\n), ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), users as (\n\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as {{ dbt.type_numeric() }}) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as {{ dbt.type_numeric() }}) as week_number,\n cast(null as {{ dbt.type_numeric() }}) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n{% if var('using_schedules', True) %}\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n{% endif %}\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n {{ dbt_utils.group_by(n=10) }}\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n {{ dbt.datediff(\"sla_schedule_start_at\", \"agent_reply_at\", 'second') }} / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and {{ dbt.current_timestamp() }} >= sla_schedule_start_at and ({{ dbt.current_timestamp() }} < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= {{ dbt.current_timestamp() }}) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n {{ dbt.current_timestamp() }} as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + ({{ dbt.datediff(\"sla_schedule_start_at\", \"coalesce(agent_reply_at, next_solved_at, current_time_check)\", 'second') }} / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "language": "sql", "refs": [{"name": "int_zendesk__reply_time_calendar_hours", "package": null, "version": null}, {"name": "int_zendesk__reply_time_business_hours", "package": null, "version": null}, {"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "int_zendesk__user_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_numeric", "macro.dbt_utils.group_by", "macro.dbt.datediff", "macro.dbt.current_timestamp"], "nodes": ["model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/reply_time/int_zendesk__reply_time_combined.sql", "compiled": true, "compiled_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"\n\n\n\n), reply_time_business_hours_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"\n\n\n\n), ticket_updates as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as numeric(28,6)) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as numeric(28,6)) as week_number,\n cast(null as numeric(28,6)) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n \n (\n (\n (\n ((agent_reply_at)::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (agent_reply_at)::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (agent_reply_at)::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (agent_reply_at)::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and now() >= sla_schedule_start_at and (now() < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= now()) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n now() as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + (\n (\n (\n (\n ((coalesce(agent_reply_at, next_solved_at, current_time_check))::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__requester_wait_time_calendar_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_calendar_hours"], "alias": "int_zendesk__requester_wait_time_calendar_hours", "checksum": {"name": "sha256", "checksum": "adaa86b537177e2792f3b8e48def56a520c6a442b11f3859c649f549d4b60087"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.603596, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"", "raw_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n {{ dbt.datediff(\n 'valid_starting_at',\n 'valid_ending_at',\n 'minute' )}} as calendar_minutes,\n sum({{ dbt.datediff(\n 'valid_starting_at', \n 'valid_ending_at', \n 'minute') }} ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n {{ fivetran_utils.timestamp_add(\n 'minute',\n '(remaining_target_minutes + calendar_minutes)',\n 'valid_starting_at', \n ) }} as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_calendar_hours.sql", "compiled": true, "compiled_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_business_hours": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__requester_wait_time_business_hours", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_business_hours"], "alias": "int_zendesk__requester_wait_time_business_hours", "checksum": {"name": "sha256", "checksum": "101f406be8cee0e94ed9a45f338aa5618ac7a9bc030632f0cf8c33008de8394a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1724705297.6078541, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from {{ ref('int_zendesk__requester_wait_time_filtered_statuses') }}\n where in_business_hours\n\n), schedule as (\n\n select * \n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_schedules as (\n\n select * \n from {{ ref('int_zendesk__ticket_schedules') }}\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where {{ dbt.datediff(\n 'greatest(valid_starting_at, schedule_created_at)', \n 'least(valid_ending_at, schedule_invalidated_at)', \n 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.valid_starting_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as valid_starting_at_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.valid_starting_at', \n 'ticket_status_crossed_with_schedule.valid_ending_at',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.valid_starting_at','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=10) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_requester_wait_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time_minute', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) > cast(schedule.valid_from as {{ dbt.type_timestamp() }})\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time_minute', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) < cast(schedule.valid_until as {{ dbt.type_timestamp() }})\n \n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n {{ fivetran_utils.timestamp_add(\n \"minute\",\n \"cast(((7*24*60) * week_number) + breach_minutes_from_week as \" ~ dbt.type_int() ~ \" )\",\n \"\" ~ dbt.date_trunc('week', 'valid_starting_at') ~ \"\",\n ) }} as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "language": "sql", "refs": [{"name": "int_zendesk__requester_wait_time_filtered_statuses", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_business_hours.sql", "compiled": true, "compiled_code": "\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_requester_wait_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n \n\n date_trunc('week', valid_starting_at) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__requester_wait_time_filtered_statuses", "resource_type": "model", "package_name": "zendesk", "path": "sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "original_file_path": "models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "fqn": ["zendesk", "sla_policy", "requester_wait_time", "int_zendesk__requester_wait_time_filtered_statuses"], "alias": "int_zendesk__requester_wait_time_filtered_statuses", "checksum": {"name": "sha256", "checksum": "1ddb077adfbf13244c13cb12643a6914f5eac17c714885eac834f9e1eee88475"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.6186519, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"", "raw_code": "with requester_wait_time_sla as (\n\n select *\n from {{ ref('int_zendesk__sla_policy_applied') }}\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n {{ fivetran_utils.timestamp_add('day', 30, \"\" ~ dbt.current_timestamp_backcompat() ~ \"\") }} ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "language": "sql", "refs": [{"name": "int_zendesk__sla_policy_applied", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.fivetran_utils.timestamp_add"], "nodes": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/sla_policy/requester_wait_time/int_zendesk__requester_wait_time_filtered_statuses.sql", "compiled": true, "compiled_code": "with requester_wait_time_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n \n current_timestamp::timestamp\n + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_reply_times", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times"], "alias": "int_zendesk__ticket_reply_times", "checksum": {"name": "sha256", "checksum": "6de1b30f99a9bbd078c823538ca0e87c5b57d33160f65c290ecd67765e8d4472"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1724705297.656778, "relation_name": null, "raw_code": "with ticket_public_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n ({{ dbt.datediff(\n 'end_user_comment_created_at',\n 'agent_responded_at',\n 'second') }} / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_reply_times_calendar": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_reply_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_reply_times_calendar.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_reply_times_calendar", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_reply_times_calendar"], "alias": "int_zendesk__ticket_reply_times_calendar", "checksum": {"name": "sha256", "checksum": "6fb6a60134019d78fcfc8c135b4a7887b3ce52ec53d8db463194f7824d2c71c2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1724705297.6603909, "relation_name": null, "raw_code": "with ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_reply_times"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_reply_times_calendar.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comments_enriched": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__comments_enriched", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__comments_enriched.sql", "original_file_path": "models/reply_times/int_zendesk__comments_enriched.sql", "unique_id": "model.zendesk.int_zendesk__comments_enriched", "fqn": ["zendesk", "reply_times", "int_zendesk__comments_enriched"], "alias": "int_zendesk__comments_enriched", "checksum": {"name": "sha256", "checksum": "970004a2aa343ae78a3f810828600c7eca8585428b52b05e4353f9debc6f1af5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1724705297.6619892, "relation_name": null, "raw_code": "with ticket_comment as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'comment'\n\n), users as (\n\n select *\n from {{ ref('stg_zendesk__user') }}\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__comments_enriched.sql", "compiled": true, "compiled_code": "with ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_reply_time_business": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_first_reply_time_business", "resource_type": "model", "package_name": "zendesk", "path": "reply_times/int_zendesk__ticket_first_reply_time_business.sql", "original_file_path": "models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_reply_time_business", "fqn": ["zendesk", "reply_times", "int_zendesk__ticket_first_reply_time_business"], "alias": "int_zendesk__ticket_first_reply_time_business", "checksum": {"name": "sha256", "checksum": "3b0a4efc758ab6f25063ec97d60455b76873e355e0e916fc4670d5d67066430a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1724705297.6631181, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_reply_times as (\n\n select *\n from {{ ref('int_zendesk__ticket_reply_times') }}\n\n), ticket_schedules as (\n\n select \n *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as date) > cast(schedule.valid_from as date)\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_reply_times", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/reply_times/int_zendesk__ticket_first_reply_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}, {"id": "model.zendesk.int_zendesk__ticket_reply_times", "sql": " __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_enriched": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__field_history_enriched", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_enriched.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_enriched.sql", "unique_id": "model.zendesk.int_zendesk__field_history_enriched", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_enriched"], "alias": "int_zendesk__field_history_enriched", "checksum": {"name": "sha256", "checksum": "cdf920b1df5fee8c6a08b0e26996028d327964903e8acc4dd15498d23c00005c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1724705297.670548, "relation_name": null, "raw_code": "with ticket_field_history as (\n\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), updater_info as (\n select *\n from {{ ref('int_zendesk__updater_information') }}\n\n), final as (\n select\n ticket_field_history.*\n\n {% if var('ticket_field_history_updater_columns')%} --The below will be run if any fields are included in the variable within the dbt_project.yml.\n {% for col in var('ticket_field_history_updater_columns') %} --Iterating through the updater fields included in the variable.\n\n --The below statements are needed to populate Zendesk automated fields for when the zendesk triggers automatically change fields based on user defined triggers.\n {% if col in ['updater_is_active'] %}\n ,coalesce(updater_info.{{ col|lower }}, true) as {{ col }}\n\n {% elif col in ['updater_user_id','updater_organization_id'] %}\n ,coalesce(updater_info.{{ col|lower }}, -1) as {{ col }}\n \n {% elif col in ['updater_last_login_at'] %}\n ,coalesce(updater_info.{{ col|lower }}, current_timestamp) as {{ col }}\n \n {% else %}\n ,coalesce(updater_info.{{ col|lower }}, concat('zendesk_trigger_change_', '{{ col }}' )) as {{ col }}\n \n {% endif %}\n {% endfor %}\n {% endif %} \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "int_zendesk__updater_information", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk.int_zendesk__updater_information"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_enriched.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), ticket_field_history as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_pivot": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__field_history_pivot", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_pivot.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_pivot.sql", "unique_id": "model.zendesk.int_zendesk__field_history_pivot", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_pivot"], "alias": "int_zendesk__field_history_pivot", "checksum": {"name": "sha256", "checksum": "077bf8d76ba0523c2ebb987be0fd0746acbdae8fdbdd39fc7a03203a5d070f87"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1724705297.675331, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_history_pivot\"", "raw_code": "-- depends_on: {{ source('zendesk', 'ticket_field_history') }}\n\n{{ \n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n ) \n}}\n\n{% if execute -%}\n {% set results = run_query('select distinct field_name from ' ~ source('zendesk', 'ticket_field_history') ) %}\n {% set results_list = results.columns[0].values() %}\n{% endif -%}\n\nwith field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n ,\n {{ var('ticket_field_history_updater_columns') | join (\", \")}}\n\n {% endif %}\n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from {{ ref('int_zendesk__field_history_enriched') }}\n {% if is_incremental() %}\n where cast( {{ dbt.date_trunc('day', 'valid_starting_at') }} as date) >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast({{ dbt.date_trunc('day', 'valid_starting_at') }} as date) as date_day\n\n {% for col in results_list if col in var('ticket_field_history_columns') %}\n {% set col_xf = col|lower %}\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.value end) as {{ col_xf }}\n\n --Only runs if the user passes updater fields through the final ticket field history model\n {% if var('ticket_field_history_updater_columns') %}\n\n {% for upd in var('ticket_field_history_updater_columns') %}\n\n {% set upd_xf = (col|lower + '_' + upd ) %} --Creating the appropriate column name based on the history field + update field names.\n\n {% if upd == 'updater_is_active' and target.type in ('postgres', 'redshift') %}\n\n ,bool_or(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% else %}\n\n ,min(case when lower(field_name) = '{{ col|lower }}' then filtered.{{ upd }} end) as {{ upd_xf }}\n\n {% endif %}\n {% endfor %}\n {% endif %}\n {% endfor %}\n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n {{ dbt_utils.generate_surrogate_key(['ticket_id','date_day'])}} as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__field_history_enriched", "package": null, "version": null}], "sources": [["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.date_trunc", "macro.dbt_utils.generate_surrogate_key", "macro.dbt.run_query"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history", "model.zendesk.int_zendesk__field_history_enriched"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_pivot.sql", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zendesk_integration_tests_55\".\"ticket_field_history_data\"\n\n\n\n\n \nwith __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n), field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from __dbt__cte__int_zendesk__field_history_enriched\n \n where cast( date_trunc('day', valid_starting_at) as date) >= (select max(date_day) from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_history_pivot\")\n \n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast(date_trunc('day', valid_starting_at) as date) as date_day\n\n \n \n ,min(case when lower(field_name) = 'status' then filtered.value end) as status\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'assignee_id' then filtered.value end) as assignee_id\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'priority' then filtered.value end) as priority\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__updater_information", "sql": " __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n)"}, {"id": "model.zendesk.int_zendesk__field_history_enriched", "sql": " __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updater_information": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__updater_information", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__updater_information.sql", "original_file_path": "models/ticket_history/int_zendesk__updater_information.sql", "unique_id": "model.zendesk.int_zendesk__updater_information", "fqn": ["zendesk", "ticket_history", "int_zendesk__updater_information"], "alias": "int_zendesk__updater_information", "checksum": {"name": "sha256", "checksum": "62a690646cff991c0e0b6e205440a070bb44aab8d4d9286714710c52a4c6677a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1724705297.68442, "relation_name": null, "raw_code": "with users as (\n select *\n from {{ ref('int_zendesk__user_aggregates') }}\n\n), organizations as (\n select *\n from {{ ref('int_zendesk__organization_aggregates') }}\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,users.user_tags as updater_user_tags\n {% endif %}\n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,organizations.domain_names as updater_organization_domain_names\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,organizations.organization_tags as updater_organization_organization_tags\n {% endif %}\n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__user_aggregates", "package": null, "version": null}, {"name": "int_zendesk__organization_aggregates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__user_aggregates", "model.zendesk.int_zendesk__organization_aggregates"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__updater_information.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_history_scd": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__field_history_scd", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_history_scd.sql", "original_file_path": "models/ticket_history/int_zendesk__field_history_scd.sql", "unique_id": "model.zendesk.int_zendesk__field_history_scd", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_history_scd"], "alias": "int_zendesk__field_history_scd", "checksum": {"name": "sha256", "checksum": "a748f9163dc6edaca993c8a3f5e3cecc9d057d3b47817d403e0b0778deda2466"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.688123, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_history_scd\"", "raw_code": "-- model needs to materialize as a table to avoid erroneous null values\n{{ config( materialized='table') }} \n\n{% set ticket_columns = adapter.get_columns_in_relation(ref('int_zendesk__field_history_pivot')) %}\n\nwith change_data as (\n\n select *\n from {{ ref('int_zendesk__field_history_pivot') }}\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,{{ col.name }}\n ,sum(case when {{ col.name }} is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as {{ col.name }}_field_partition\n {% endfor %}\n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n {% for col in ticket_columns if col.name|lower not in ['date_day','ending_day','ticket_id','ticket_day_id'] %} \n\n ,first_value( {{ col.name }} ) over (partition by {{ col.name }}_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as {{ col.name }}\n \n {% endfor %}\n from set_values\n) \n\nselect *\nfrom fill_values", "language": "sql", "refs": [{"name": "int_zendesk__field_history_pivot", "package": null, "version": null}, {"name": "int_zendesk__field_history_pivot", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__field_history_pivot"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_history_scd.sql", "compiled": true, "compiled_code": "-- model needs to materialize as a table to avoid erroneous null values\n \n\n\n\nwith change_data as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_history_pivot\"\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n \n\n ,status\n ,sum(case when status is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as status_field_partition\n \n\n ,assignee_id\n ,sum(case when assignee_id is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as assignee_id_field_partition\n \n\n ,priority\n ,sum(case when priority is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as priority_field_partition\n \n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n \n\n ,first_value( status ) over (partition by status_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as status\n \n \n\n ,first_value( assignee_id ) over (partition by assignee_id_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as assignee_id\n \n \n\n ,first_value( priority ) over (partition by priority_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as priority\n \n \n from set_values\n) \n\nselect *\nfrom fill_values", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__field_calendar_spine": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__field_calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "ticket_history/int_zendesk__field_calendar_spine.sql", "original_file_path": "models/ticket_history/int_zendesk__field_calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__field_calendar_spine", "fqn": ["zendesk", "ticket_history", "int_zendesk__field_calendar_spine"], "alias": "int_zendesk__field_calendar_spine", "checksum": {"name": "sha256", "checksum": "79bd1e8de549cfc22088000a4171419b554d6b0fa931a1f2deaabaed7e01e72b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "incremental", "incremental_strategy": "delete+insert", "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": "ticket_day_id", "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "file_format": "delta"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "incremental", "partition_by": {"field": "date_day", "data_type": "date", "granularity": "month"}, "unique_key": "ticket_day_id", "incremental_strategy": "delete+insert", "file_format": "delta"}, "created_at": 1724705297.6928158, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_calendar_spine\"", "raw_code": "{{\n config(\n materialized='incremental',\n partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'],\n unique_key='ticket_day_id',\n incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert',\n file_format='delta'\n )\n}}\n\nwith calendar as (\n\n select *\n from {{ ref('int_zendesk__calendar_spine') }}\n {% if is_incremental() %}\n where date_day >= (select max(date_day) from {{ this }})\n {% endif %}\n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( {{ dbt.date_trunc('day', \"case when status != 'closed' then \" ~ dbt.current_timestamp_backcompat() ~ \" else updated_at end\") }} as date) as open_until\n from {{ var('ticket') }}\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and {{ dbt.dateadd('month', var('ticket_field_history_extension_months', 0), 'ticket.open_until') }} >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n {{ dbt_utils.generate_surrogate_key(['date_day','ticket_id']) }} as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "language": "sql", "refs": [{"name": "int_zendesk__calendar_spine", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.is_incremental", "macro.dbt.current_timestamp_backcompat", "macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_utils.generate_surrogate_key"], "nodes": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/ticket_history/int_zendesk__field_calendar_spine.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1663\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), calendar as (\n\n select *\n from __dbt__cte__int_zendesk__calendar_spine\n \n where date_day >= (select max(date_day) from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_calendar_spine\")\n \n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( date_trunc('day', case when status != 'closed' then \n current_timestamp::timestamp\n else updated_at end) as date) as open_until\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and \n\n ticket.open_until + ((interval '1 month') * (0))\n\n >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__calendar_spine", "sql": " __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1663\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_calendar": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_work_time_calendar", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_calendar", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_calendar"], "alias": "int_zendesk__ticket_work_time_calendar", "checksum": {"name": "sha256", "checksum": "e3cda559c663cc0e6ef1defcf5d8c418bbb9c20bb60aa118fc698579b3c37814"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1724705297.698889, "relation_name": null, "raw_code": "with ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_calendar.sql", "compiled": true, "compiled_code": "with ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_work_time_business": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_work_time_business", "resource_type": "model", "package_name": "zendesk", "path": "agent_work_time/int_zendesk__ticket_work_time_business.sql", "original_file_path": "models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_work_time_business", "fqn": ["zendesk", "agent_work_time", "int_zendesk__ticket_work_time_business"], "alias": "int_zendesk__ticket_work_time_business", "checksum": {"name": "sha256", "checksum": "abc04dab22fb0e16e5cdb757bab3b4edb0573dcc230d769126e24a88a3f53430"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1724705297.7011049, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_historical_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where {{ dbt.datediff('greatest(valid_starting_at, schedule_created_at)', 'least(valid_ending_at, schedule_invalidated_at)', 'second') }} > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_status_crossed_with_schedule.status_schedule_start as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n ({{ dbt.datediff(\n 'ticket_status_crossed_with_schedule.status_schedule_start',\n 'ticket_status_crossed_with_schedule.status_schedule_end',\n 'second') }} /60\n ) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_status_crossed_with_schedule.status_schedule_start','UTC') }} as start_week_date\n\n from ticket_status_crossed_with_schedule\n {{ dbt_utils.group_by(n=7) }}\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on \n ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) > cast(schedule.valid_from as {{ dbt.type_timestamp() }})\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) < cast(schedule.valid_until as {{ dbt.type_timestamp() }})\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt_utils.group_by", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/agent_work_time/int_zendesk__ticket_work_time_business.sql", "compiled": true, "compiled_code": "\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on \n ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_resolution_times_calendar": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_resolution_times_calendar", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "unique_id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_resolution_times_calendar"], "alias": "int_zendesk__ticket_resolution_times_calendar", "checksum": {"name": "sha256", "checksum": "0c3e1e19084b3e1829c18b80315e8f64aaf63e94522fc56d64652e89b02afadc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1724705297.7232351, "relation_name": null, "raw_code": "with historical_solved_status as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_status') }}\n where status = 'solved'\n\n), ticket as (\n\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_historical_assignee as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_assignee') }}\n\n), ticket_historical_group as (\n\n select *\n from {{ ref('int_zendesk__ticket_historical_group') }}\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n {{ dbt.datediff(\n 'ticket_historical_assignee.first_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as first_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket_historical_assignee.last_agent_assignment_date', \n 'solved_times.last_solved_at',\n 'minute' ) }} as last_assignment_to_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.first_solved_at',\n 'minute' ) }} as first_resolution_calendar_minutes,\n {{ dbt.datediff(\n 'ticket.created_at', \n 'solved_times.last_solved_at',\n 'minute') }} as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "language": "sql", "refs": [{"name": "int_zendesk__ticket_historical_status", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_assignee", "package": null, "version": null}, {"name": "int_zendesk__ticket_historical_group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_resolution_times_calendar.sql", "compiled": true, "compiled_code": "with historical_solved_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_first_resolution_time_business": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_first_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_first_resolution_time_business"], "alias": "int_zendesk__ticket_first_resolution_time_business", "checksum": {"name": "sha256", "checksum": "b26eaf93a2a443204c26eba5cf4dd0fcec83a4e9ec3ab6a6abdd1b58273bddbd"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1724705297.7282581, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) > cast(schedule.valid_from as {{ dbt.type_timestamp() }})\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) < cast(schedule.valid_until as {{ dbt.type_timestamp() }})\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_first_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_full_resolution_time_business": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_full_resolution_time_business", "resource_type": "model", "package_name": "zendesk", "path": "resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "original_file_path": "models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "unique_id": "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "fqn": ["zendesk", "resolution_times", "int_zendesk__ticket_full_resolution_time_business"], "alias": "int_zendesk__ticket_full_resolution_time_business", "checksum": {"name": "sha256", "checksum": "f9e2ed998cdaa9765b1aa3dccd7b7e8b2dc28779d555ca845d1b0115a26d8577"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral", "enabled": true}, "created_at": 1724705297.73744, "relation_name": null, "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from {{ ref('int_zendesk__ticket_resolution_times_calendar') }}\n\n), ticket_schedules as (\n\n select *\n from {{ ref('int_zendesk__ticket_schedules') }}\n\n), schedule as (\n\n select *\n from {{ ref('int_zendesk__schedule_spine') }}\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n ({{ dbt.datediff(\n \"cast(\" ~ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') ~ \"as \" ~ dbt.type_timestamp() ~ \")\", \n \"cast(ticket_schedules.schedule_created_at as \" ~ dbt.type_timestamp() ~ \")\",\n 'second') }} /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n {{ dbt.datediff(\n 'ticket_schedules.schedule_created_at',\n 'least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at))',\n 'second') }}/60\n )) as raw_delta_in_minutes,\n {{ dbt_date.week_start('ticket_schedules.schedule_created_at','UTC') }} as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n {{ dbt_utils.generate_series(208) }}\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as {{ dbt.type_int() }}) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as {{ dbt.type_int() }}) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_end_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) > cast(schedule.valid_from as {{ dbt.type_timestamp() }})\n and cast( {{ dbt.dateadd(datepart='minute', interval='week_number * (7*24*60) + ticket_week_start_time', from_date_or_timestamp='start_week_date') }} as {{ dbt.type_timestamp() }}) < cast(schedule.valid_until as {{ dbt.type_timestamp() }})\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "language": "sql", "refs": [{"name": "int_zendesk__ticket_resolution_times_calendar", "package": null, "version": null}, {"name": "int_zendesk__ticket_schedules", "package": null, "version": null}, {"name": "int_zendesk__schedule_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt.type_timestamp", "macro.dbt.datediff", "macro.dbt_utils.generate_series", "macro.dbt.type_int", "macro.dbt.dateadd"], "nodes": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__schedule_spine"]}, "compiled_path": "target/compiled/zendesk/models/resolution_times/int_zendesk__ticket_full_resolution_time_business.sql", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "sql": " __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__updates": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__updates.sql", "original_file_path": "models/intermediate/int_zendesk__updates.sql", "unique_id": "model.zendesk.int_zendesk__updates", "fqn": ["zendesk", "intermediate", "int_zendesk__updates"], "alias": "int_zendesk__updates", "checksum": {"name": "sha256", "checksum": "3ecf6bfe15bd7a820b369379fff7dadf236c00ce2fe6c7e335c73c07ba67de0e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.7457001, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"", "raw_code": "with ticket_history as (\n select *\n from {{ ref('stg_zendesk__ticket_field_history') }}\n\n), ticket_comment as (\n select *\n from {{ ref('stg_zendesk__ticket_comment') }}\n\n), tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as {{ dbt.type_string() }}) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__updates.sql", "compiled": true, "compiled_code": "with ticket_history as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), ticket_comment as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), tickets as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as TEXT) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_assignee": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_historical_assignee", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_assignee.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_assignee.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_assignee"], "alias": "int_zendesk__ticket_historical_assignee", "checksum": {"name": "sha256", "checksum": "7ae5d5632274b7ccf900910f272cf791e7e976e48fbd170adca647955ab5e2ae"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.7494059, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"", "raw_code": "with assignee_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then {{ dbt.datediff(\"coalesce(previous_update, ticket_created_date)\", \"valid_starting_at\", 'second') }} / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n {{ dbt_utils.group_by(n=6) }}\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_utils.group_by"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_assignee.sql", "compiled": true, "compiled_code": "with assignee_updates as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then \n (\n (\n (\n ((valid_starting_at)::date - (coalesce(previous_update, ticket_created_date))::date)\n * 24 + date_part('hour', (valid_starting_at)::timestamp) - date_part('hour', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + date_part('minute', (valid_starting_at)::timestamp) - date_part('minute', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + floor(date_part('second', (valid_starting_at)::timestamp)) - floor(date_part('second', (coalesce(previous_update, ticket_created_date))::timestamp)))\n / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n group by 1,2,3,4,5,6\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_status": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_historical_status", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_status.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_status.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_status", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_status"], "alias": "int_zendesk__ticket_historical_status", "checksum": {"name": "sha256", "checksum": "1c8a86acea05e857271d7f0a2b8f3ec6c09481ecdb9278f922b2df35d5410a48"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.753907, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"", "raw_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n {{ dbt.datediff(\n 'valid_starting_at',\n \"coalesce(valid_ending_at, \" ~ dbt.current_timestamp_backcompat() ~ \")\",\n 'minute') }} as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt.datediff"], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_status.sql", "compiled": true, "compiled_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n \n (\n (\n ((coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__user_aggregates": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__user_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__user_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__user_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__user_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__user_aggregates"], "alias": "int_zendesk__user_aggregates", "checksum": {"name": "sha256", "checksum": "ae23565fdc62d13c33ddb03f3b25a5e288ec6e6ffe6b57cb01496be6ecd2b73f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.757905, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"", "raw_code": "with users as (\n select *\n from {{ ref('stg_zendesk__user') }}\n\n--If you use user tags this will be included, if not it will be ignored.\n{% if var('using_user_tags', True) %}\n), user_tags as (\n\n select *\n from {{ ref('stg_zendesk__user_tag') }}\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n {{ fivetran_utils.string_agg( 'user_tags.tags', \"', '\" )}} as user_tags\n from user_tags\n group by 1\n\n{% endif %}\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n ,user_tag_aggregate.user_tags\n {% endif %}\n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n {% if var('using_user_tags', True) %}\n left join user_tag_aggregate\n using(user_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}, {"name": "stg_zendesk__user_tag", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__user_aggregates.sql", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n--If you use user tags this will be included, if not it will be ignored.\n\n), user_tags as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tag\"\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n \n string_agg(user_tags.tags, ', ')\n\n as user_tags\n from user_tags\n group by 1\n\n\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,user_tag_aggregate.user_tags\n \n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n \n left join user_tag_aggregate\n using(user_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__schedule_spine": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__schedule_spine", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__schedule_spine.sql", "original_file_path": "models/intermediate/int_zendesk__schedule_spine.sql", "unique_id": "model.zendesk.int_zendesk__schedule_spine", "fqn": ["zendesk", "intermediate", "int_zendesk__schedule_spine"], "alias": "int_zendesk__schedule_spine", "checksum": {"name": "sha256", "checksum": "7f1a9c1fa0958132d288a2a8f980d661f6f04662b0573382c9a434574a8daf6b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1724705297.763176, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings.\n End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time)\n*/\n\nwith timezone as (\n\n select *\n from {{ var('time_zone') }}\n\n), daylight_time as (\n\n select *\n from {{ var('daylight_time') }}\n\n), schedule as (\n\n select *\n from {{ var('schedule') }} \n\n-- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules.\n), schedule_holiday as ( \n\n select\n _fivetran_synced,\n cast(date_day as {{ dbt.type_timestamp() }} ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n cast(date_day as {{ dbt.type_timestamp() }} ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n holiday_id,\n holiday_name,\n schedule_id\n\n from {{ var('schedule_holiday') }} \n inner join {{ ref('int_zendesk__calendar_spine') }} \n on holiday_start_date_at <= cast(date_day as {{ dbt.type_timestamp() }} )\n and holiday_end_date_at >= cast(date_day as {{ dbt.type_timestamp() }} )\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard schedule (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT schedule (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( {{ dbt.dateadd('year', 1, dbt.current_timestamp_backcompat()) }} as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast({{ dbt.current_timestamp_backcompat() }} as date)\n\n), calculate_schedules as (\n\n select \n schedule.schedule_id,\n schedule.time_zone,\n schedule.start_time,\n schedule.end_time,\n schedule.created_at,\n schedule.schedule_name,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes_to_add,\n -- we'll use these to determine which schedule version to associate tickets with\n cast(split_timezones.valid_from as {{ dbt.type_timestamp() }}) as valid_from,\n cast(split_timezones.valid_until as {{ dbt.type_timestamp() }}) as valid_until\n\n from schedule\n left join split_timezones\n on split_timezones.time_zone = schedule.time_zone\n\n-- Now we need take holiday's into consideration and perform the following transformations to account for Holidays in existing schedules\n), holiday_start_end_times as (\n\n select\n calculate_schedules.*,\n schedule_holiday.holiday_name,\n schedule_holiday.holiday_start_date_at,\n cast({{ dbt.dateadd(\"second\", \"86400\", \"schedule_holiday.holiday_end_date_at\") }} as {{ dbt.type_timestamp() }}) as holiday_end_date_at, -- add 24*60*60 seconds\n cast({{ dbt_date.week_start(\"schedule_holiday.holiday_start_date_at\") }} as {{ dbt.type_timestamp() }}) as holiday_week_start,\n cast({{ dbt_date.week_end(\"schedule_holiday.holiday_end_date_at\") }} as {{ dbt.type_timestamp() }}) as holiday_week_end\n from schedule_holiday\n inner join calculate_schedules\n on calculate_schedules.schedule_id = schedule_holiday.schedule_id\n and schedule_holiday.holiday_start_date_at >= calculate_schedules.valid_from \n and schedule_holiday.holiday_start_date_at < calculate_schedules.valid_until\n\n-- Let's calculate the start and end date of the Holiday in terms of minutes from Sunday (like other Zendesk schedules)\n), holiday_minutes as(\n\n select\n holiday_start_end_times.*,\n {{ dbt.datediff(\"holiday_week_start\", \"holiday_start_date_at\", \"minute\") }} - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_start,\n {{ dbt.datediff(\"holiday_week_start\", \"holiday_end_date_at\", \"minute\") }} - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_end\n from holiday_start_end_times\n left join timezone\n on timezone.time_zone = holiday_start_end_times.time_zone\n\n-- Determine which schedule days include a holiday\n), holiday_check as (\n\n select\n *,\n case when minutes_from_sunday_start < start_time_utc and minutes_from_sunday_end > end_time_utc \n then holiday_name \n end as holiday_name_check\n from holiday_minutes\n\n-- Consolidate the holiday records that were just created\n), holiday_consolidated as (\n\n select \n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n cast({{ dbt.dateadd(\"second\", \"86400\", \"holiday_week_end\") }} as {{ dbt.type_timestamp() }}) as holiday_week_end,\n max(holiday_name_check) as holiday_name_check\n from holiday_check\n {{ dbt_utils.group_by(n=9) }}\n\n-- Since we have holiday schedules and normal schedules, we need to union them into a holistic schedule spine\n), spine_union as (\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n holiday_week_end,\n holiday_name_check\n from holiday_consolidated\n\n union all\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n null as holiday_week_start,\n null as holiday_week_end,\n null as holiday_name_check\n from calculate_schedules\n\n-- Now that we have an understanding of which weeks are holiday's let's consolidate them with non holiday weeks\n), all_periods as (\n\n select distinct\n schedule_id,\n holiday_week_start as period_start,\n holiday_week_end as period_end,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n true as is_holiday_week\n from spine_union\n where holiday_week_start is not null\n and holiday_week_end is not null\n\n union all\n\n select distinct\n schedule_id,\n valid_from as period_start,\n valid_until as period_end,\n start_time_utc,\n end_time_utc,\n cast(null as {{ dbt.type_string() }}) as holiday_name_check,\n false as is_holiday_week\n from spine_union\n\n-- We have holiday and non holiday schedules together, now let's sort them to understand the previous end and next start of neighboring schedules\n), sorted_periods as (\n\n select distinct\n *,\n lag(period_end) over (partition by schedule_id order by period_start, start_time_utc) as prev_end,\n lead(period_start) over (partition by schedule_id order by period_start, start_time_utc) as next_start\n from all_periods\n\n-- We need to adjust some non holiday schedules in order to properly fill holiday gaps in the schedules later down the transformation\n), non_holiday_period_adjustments as (\n\n select\n schedule_id, \n period_start, \n period_end,\n prev_end,\n next_start,\n -- taking first_value/last_value because prev_end and next_start are inconsistent within the schedule partitions -- they all include a record that is outside the partition. so we need to ignore those erroneous records that slip in\n coalesce(greatest(case \n when not is_holiday_week and prev_end is not null then first_value(prev_end) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_start\n end, period_start), period_start) as valid_from,\n coalesce(case \n when not is_holiday_week and next_start is not null then last_value(next_start) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_end\n end, period_end) as valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from sorted_periods\n\n-- A few window function results will be leveraged downstream. Let's generate them now.\n), gap_starter as (\n select \n *,\n max(period_end) over (partition by schedule_id) as max_valid_until,\n last_value(next_start) over (partition by schedule_id, period_start order by valid_until rows between unbounded preceding and unbounded following) as lead_next_start,\n first_value(prev_end) over (partition by schedule_id, valid_from order by start_time_utc rows between unbounded preceding and unbounded following) as first_prev_end\n from non_holiday_period_adjustments\n\n-- There may be gaps in holiday and non holiday schedules, so we need to identify where these gaps are\n), gap_adjustments as(\n\n select \n *,\n -- In order to identify the gaps we check to see if the valid_from and previous valid_until are right next to one. If we add two hours to the previous valid_until it should always be greater than the current valid_from.\n -- However, if the valid_from is greater instead then we can identify that this period has a gap that needs to be filled.\n case \n when cast({{ dbt.dateadd(\"hour\", \"2\", \"valid_until\") }} as {{ dbt.type_timestamp() }}) < cast(lead_next_start as {{ dbt.type_timestamp() }})\n then 'gap'\n when (lead_next_start is null and valid_from < max_valid_until and period_end != max_valid_until)\n then 'gap'\n else null\n end as is_schedule_gap\n\n from gap_starter\n\n-- We know where the gaps are, so now lets prime the data to fill those gaps\n), schedule_spine_primer as (\n\n select \n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n lead_next_start,\n max_valid_until,\n holiday_name_check,\n is_holiday_week,\n max(is_schedule_gap) over (partition by schedule_id, valid_until) as is_gap_period,\n lead(valid_from) over (partition by schedule_id order by valid_from, start_time_utc) as fill_primer\n from gap_adjustments\n\n-- We know the gaps and where they are, so let's fill them with the following union\n), final_union as (\n\n -- For all gap periods, let's properly create a schedule filled before the holiday.\n select \n schedule_id,\n valid_until as valid_from,\n coalesce(last_value(fill_primer) over (partition by schedule_id, valid_until order by start_time_utc rows between unbounded preceding and unbounded following), max_valid_until) as valid_until,\n start_time_utc, \n end_time_utc, \n cast(null as {{ dbt.type_string() }}) as holiday_name_check,\n false as is_holiday_week\n from schedule_spine_primer\n where is_gap_period is not null\n\n union all\n\n -- Fill all other normal schedules.\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from schedule_spine_primer\n\n-- We can finally filter out the holiday_name_check results as the gap filling properly filled in the gaps for holidays\n), final as(\n\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n is_holiday_week\n from final_union\n where holiday_name_check is null\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}, {"name": "int_zendesk__calendar_spine", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.current_timestamp_backcompat", "macro.dbt.dateadd", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt.datediff", "macro.dbt_utils.group_by", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone", "model.zendesk_source.stg_zendesk__daylight_time", "model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday", "model.zendesk.int_zendesk__calendar_spine"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__schedule_spine.sql", "compiled": true, "compiled_code": "\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings.\n End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time)\n*/\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1663\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), timezone as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule\" \n\n-- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules.\n), schedule_holiday as ( \n\n select\n _fivetran_synced,\n cast(date_day as timestamp ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n cast(date_day as timestamp ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n holiday_id,\n holiday_name,\n schedule_id\n\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday\" \n inner join __dbt__cte__int_zendesk__calendar_spine \n on holiday_start_date_at <= cast(date_day as timestamp )\n and holiday_end_date_at >= cast(date_day as timestamp )\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard schedule (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n \n current_timestamp::timestamp\n + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT schedule (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n \n current_timestamp::timestamp\n + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(\n current_timestamp::timestamp\n as date)\n\n), calculate_schedules as (\n\n select \n schedule.schedule_id,\n schedule.time_zone,\n schedule.start_time,\n schedule.end_time,\n schedule.created_at,\n schedule.schedule_name,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes_to_add,\n -- we'll use these to determine which schedule version to associate tickets with\n cast(split_timezones.valid_from as timestamp) as valid_from,\n cast(split_timezones.valid_until as timestamp) as valid_until\n\n from schedule\n left join split_timezones\n on split_timezones.time_zone = schedule.time_zone\n\n-- Now we need take holiday's into consideration and perform the following transformations to account for Holidays in existing schedules\n), holiday_start_end_times as (\n\n select\n calculate_schedules.*,\n schedule_holiday.holiday_name,\n schedule_holiday.holiday_start_date_at,\n cast(\n\n schedule_holiday.holiday_end_date_at + ((interval '1 second') * (86400))\n\n as timestamp) as holiday_end_date_at, -- add 24*60*60 seconds\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_holiday.holiday_start_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_week_start,\n cast(cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_holiday.holiday_end_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as timestamp) as holiday_week_end\n from schedule_holiday\n inner join calculate_schedules\n on calculate_schedules.schedule_id = schedule_holiday.schedule_id\n and schedule_holiday.holiday_start_date_at >= calculate_schedules.valid_from \n and schedule_holiday.holiday_start_date_at < calculate_schedules.valid_until\n\n-- Let's calculate the start and end date of the Holiday in terms of minutes from Sunday (like other Zendesk schedules)\n), holiday_minutes as(\n\n select\n holiday_start_end_times.*,\n \n (\n (\n ((holiday_start_date_at)::date - (holiday_week_start)::date)\n * 24 + date_part('hour', (holiday_start_date_at)::timestamp) - date_part('hour', (holiday_week_start)::timestamp))\n * 60 + date_part('minute', (holiday_start_date_at)::timestamp) - date_part('minute', (holiday_week_start)::timestamp))\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_start,\n \n (\n (\n ((holiday_end_date_at)::date - (holiday_week_start)::date)\n * 24 + date_part('hour', (holiday_end_date_at)::timestamp) - date_part('hour', (holiday_week_start)::timestamp))\n * 60 + date_part('minute', (holiday_end_date_at)::timestamp) - date_part('minute', (holiday_week_start)::timestamp))\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_end\n from holiday_start_end_times\n left join timezone\n on timezone.time_zone = holiday_start_end_times.time_zone\n\n-- Determine which schedule days include a holiday\n), holiday_check as (\n\n select\n *,\n case when minutes_from_sunday_start < start_time_utc and minutes_from_sunday_end > end_time_utc \n then holiday_name \n end as holiday_name_check\n from holiday_minutes\n\n-- Consolidate the holiday records that were just created\n), holiday_consolidated as (\n\n select \n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n cast(\n\n holiday_week_end + ((interval '1 second') * (86400))\n\n as timestamp) as holiday_week_end,\n max(holiday_name_check) as holiday_name_check\n from holiday_check\n group by 1,2,3,4,5,6,7,8,9\n\n-- Since we have holiday schedules and normal schedules, we need to union them into a holistic schedule spine\n), spine_union as (\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n holiday_week_end,\n holiday_name_check\n from holiday_consolidated\n\n union all\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n null as holiday_week_start,\n null as holiday_week_end,\n null as holiday_name_check\n from calculate_schedules\n\n-- Now that we have an understanding of which weeks are holiday's let's consolidate them with non holiday weeks\n), all_periods as (\n\n select distinct\n schedule_id,\n holiday_week_start as period_start,\n holiday_week_end as period_end,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n true as is_holiday_week\n from spine_union\n where holiday_week_start is not null\n and holiday_week_end is not null\n\n union all\n\n select distinct\n schedule_id,\n valid_from as period_start,\n valid_until as period_end,\n start_time_utc,\n end_time_utc,\n cast(null as TEXT) as holiday_name_check,\n false as is_holiday_week\n from spine_union\n\n-- We have holiday and non holiday schedules together, now let's sort them to understand the previous end and next start of neighboring schedules\n), sorted_periods as (\n\n select distinct\n *,\n lag(period_end) over (partition by schedule_id order by period_start, start_time_utc) as prev_end,\n lead(period_start) over (partition by schedule_id order by period_start, start_time_utc) as next_start\n from all_periods\n\n-- We need to adjust some non holiday schedules in order to properly fill holiday gaps in the schedules later down the transformation\n), non_holiday_period_adjustments as (\n\n select\n schedule_id, \n period_start, \n period_end,\n prev_end,\n next_start,\n -- taking first_value/last_value because prev_end and next_start are inconsistent within the schedule partitions -- they all include a record that is outside the partition. so we need to ignore those erroneous records that slip in\n coalesce(greatest(case \n when not is_holiday_week and prev_end is not null then first_value(prev_end) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_start\n end, period_start), period_start) as valid_from,\n coalesce(case \n when not is_holiday_week and next_start is not null then last_value(next_start) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_end\n end, period_end) as valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from sorted_periods\n\n-- A few window function results will be leveraged downstream. Let's generate them now.\n), gap_starter as (\n select \n *,\n max(period_end) over (partition by schedule_id) as max_valid_until,\n last_value(next_start) over (partition by schedule_id, period_start order by valid_until rows between unbounded preceding and unbounded following) as lead_next_start,\n first_value(prev_end) over (partition by schedule_id, valid_from order by start_time_utc rows between unbounded preceding and unbounded following) as first_prev_end\n from non_holiday_period_adjustments\n\n-- There may be gaps in holiday and non holiday schedules, so we need to identify where these gaps are\n), gap_adjustments as(\n\n select \n *,\n -- In order to identify the gaps we check to see if the valid_from and previous valid_until are right next to one. If we add two hours to the previous valid_until it should always be greater than the current valid_from.\n -- However, if the valid_from is greater instead then we can identify that this period has a gap that needs to be filled.\n case \n when cast(\n\n valid_until + ((interval '1 hour') * (2))\n\n as timestamp) < cast(lead_next_start as timestamp)\n then 'gap'\n when (lead_next_start is null and valid_from < max_valid_until and period_end != max_valid_until)\n then 'gap'\n else null\n end as is_schedule_gap\n\n from gap_starter\n\n-- We know where the gaps are, so now lets prime the data to fill those gaps\n), schedule_spine_primer as (\n\n select \n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n lead_next_start,\n max_valid_until,\n holiday_name_check,\n is_holiday_week,\n max(is_schedule_gap) over (partition by schedule_id, valid_until) as is_gap_period,\n lead(valid_from) over (partition by schedule_id order by valid_from, start_time_utc) as fill_primer\n from gap_adjustments\n\n-- We know the gaps and where they are, so let's fill them with the following union\n), final_union as (\n\n -- For all gap periods, let's properly create a schedule filled before the holiday.\n select \n schedule_id,\n valid_until as valid_from,\n coalesce(last_value(fill_primer) over (partition by schedule_id, valid_until order by start_time_utc rows between unbounded preceding and unbounded following), max_valid_until) as valid_until,\n start_time_utc, \n end_time_utc, \n cast(null as TEXT) as holiday_name_check,\n false as is_holiday_week\n from schedule_spine_primer\n where is_gap_period is not null\n\n union all\n\n -- Fill all other normal schedules.\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from schedule_spine_primer\n\n-- We can finally filter out the holiday_name_check results as the gap filling properly filled in the gaps for holidays\n), final as(\n\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n is_holiday_week\n from final_union\n where holiday_name_check is null\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__calendar_spine", "sql": " __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1663\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_schedules": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_schedules", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_schedules.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_schedules.sql", "unique_id": "model.zendesk.int_zendesk__ticket_schedules", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_schedules"], "alias": "int_zendesk__ticket_schedules", "checksum": {"name": "sha256", "checksum": "cb5b35bbbe0e39c8ef56fcffc75db481246fe4863cd5c80d4a6dd43d956f93af"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1724705297.77689, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"", "raw_code": "{{ config(enabled=var('using_schedules', True)) }}\n\nwith ticket as (\n \n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_schedule as (\n \n select *\n from {{ ref('stg_zendesk__ticket_schedule') }}\n\n), schedule as (\n \n select *\n from {{ ref('stg_zendesk__schedule') }}\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n{% if execute %}\n\n {% set default_schedule_id_query %}\n with set_default_schedule_flag as (\n select \n row_number() over (order by created_at) = 1 as is_default_schedule,\n id\n from {{ source('zendesk','schedule') }}\n where not coalesce(_fivetran_deleted, false)\n )\n select \n id\n from set_default_schedule_flag\n where is_default_schedule\n\n {% endset %}\n\n {% set default_schedule_id = run_query(default_schedule_id_query).columns[0][0]|string %}\n\n {% endif %}\n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '{{default_schedule_id}}' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and {{ fivetran_utils.timestamp_add('second', -5, 'first_schedule.created_at') }} <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , {{ fivetran_utils.timestamp_add(\"hour\", 1000, \"\" ~ dbt.current_timestamp_backcompat() ~ \"\") }} ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule", "package": null, "version": null}, {"name": "stg_zendesk__schedule", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.timestamp_add", "macro.dbt.current_timestamp_backcompat", "macro.dbt.run_query"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule", "model.zendesk_source.stg_zendesk__schedule"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_schedules.sql", "compiled": true, "compiled_code": "\n\nwith ticket as (\n \n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_schedule as (\n \n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_schedule\"\n\n), schedule as (\n \n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule\"\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n\n\n \n\n \n\n \n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '360000310393' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and \n\n first_schedule.created_at + ((interval '1 second') * (-5))\n\n <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , \n\n \n current_timestamp::timestamp\n + ((interval '1 hour') * (1000))\n\n ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__assignee_updates": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__assignee_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__assignee_updates.sql", "original_file_path": "models/intermediate/int_zendesk__assignee_updates.sql", "unique_id": "model.zendesk.int_zendesk__assignee_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__assignee_updates"], "alias": "int_zendesk__assignee_updates", "checksum": {"name": "sha256", "checksum": "951ec2d4f8c9a7470a50cfc6e01838a090472a9f18fccd2dd65097d309d43aed"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.782212, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__assignee_updates\"", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__assignee_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__comment_metrics": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__comment_metrics", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__comment_metrics.sql", "original_file_path": "models/intermediate/int_zendesk__comment_metrics.sql", "unique_id": "model.zendesk.int_zendesk__comment_metrics", "fqn": ["zendesk", "intermediate", "int_zendesk__comment_metrics"], "alias": "int_zendesk__comment_metrics", "checksum": {"name": "sha256", "checksum": "b82ef2f9d10d6344cd46dcce904fe263a3b5b2cc12fd9b5c662e8b477a4b5f95"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.783582, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__comment_metrics\"", "raw_code": "with ticket_comments as (\n\n select *\n from {{ ref('int_zendesk__comments_enriched') }}\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__comments_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__comments_enriched"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__comment_metrics.sql", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [{"id": "model.zendesk.int_zendesk__comments_enriched", "sql": " __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n)"}], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_group": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_historical_group", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_group.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_group.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_group", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_group"], "alias": "int_zendesk__ticket_historical_group", "checksum": {"name": "sha256", "checksum": "7d4d72f5d6a7ef73a23ad4be966b00683532fe2a11c9729a8d640752ebee1adc"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.78475, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_group\"", "raw_code": "with ticket_group_history as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_group.sql", "compiled": true, "compiled_code": "with ticket_group_history as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__requester_updates": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__requester_updates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__requester_updates.sql", "original_file_path": "models/intermediate/int_zendesk__requester_updates.sql", "unique_id": "model.zendesk.int_zendesk__requester_updates", "fqn": ["zendesk", "intermediate", "int_zendesk__requester_updates"], "alias": "int_zendesk__requester_updates", "checksum": {"name": "sha256", "checksum": "b2d14b09db3cadfb56e4b3dcb55c4f9000e670e3c7c29ef89b249e626e8ba103"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.78609, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_updates\"", "raw_code": "with ticket_updates as (\n select *\n from {{ ref('int_zendesk__updates') }}\n\n), ticket as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}, {"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__requester_updates.sql", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_historical_satisfaction": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_historical_satisfaction", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_historical_satisfaction.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_historical_satisfaction"], "alias": "int_zendesk__ticket_historical_satisfaction", "checksum": {"name": "sha256", "checksum": "dce9b5b8705d72688802f99250a8f8a34b8791c3cb440f85efa11f09ebfe3e1d"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.787384, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"", "raw_code": "with satisfaction_updates as (\n\n select *\n from {{ ref('int_zendesk__updates') }}\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "int_zendesk__updates", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk.int_zendesk__updates"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_historical_satisfaction.sql", "compiled": true, "compiled_code": "with satisfaction_updates as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__latest_ticket_form": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__latest_ticket_form", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__latest_ticket_form.sql", "original_file_path": "models/intermediate/int_zendesk__latest_ticket_form.sql", "unique_id": "model.zendesk.int_zendesk__latest_ticket_form", "fqn": ["zendesk", "intermediate", "int_zendesk__latest_ticket_form"], "alias": "int_zendesk__latest_ticket_form", "checksum": {"name": "sha256", "checksum": "906a97576bff9f4fead3b0ed4632aa8a04b94f523e62b0e05425770213f78ea5"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table", "enabled": true}, "created_at": 1724705297.788588, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__latest_ticket_form\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith ticket_form_history as (\n select *\n from {{ ref('stg_zendesk__ticket_form_history') }}\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__latest_ticket_form.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith ticket_form_history as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__ticket_aggregates": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__ticket_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__ticket_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__ticket_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__ticket_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__ticket_aggregates"], "alias": "int_zendesk__ticket_aggregates", "checksum": {"name": "sha256", "checksum": "cef0c080fae7a2b361b077473aa1ccfd4bfa472469b9006038aa3866a5bf8b50"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.7920718, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_aggregates\"", "raw_code": "with tickets as (\n select *\n from {{ ref('stg_zendesk__ticket') }}\n\n), ticket_tags as (\n\n select *\n from {{ ref('stg_zendesk__ticket_tag') }}\n\n), brands as (\n\n select *\n from {{ ref('stg_zendesk__brand') }}\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n {{ fivetran_utils.string_agg( 'ticket_tags.tags', \"', '\" )}} as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag", "package": null, "version": null}, {"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag", "model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__ticket_aggregates.sql", "compiled": true, "compiled_code": "with tickets as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_tags as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tag\"\n\n), brands as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand\"\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n \n string_agg(ticket_tags.tags, ', ')\n\n as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk.int_zendesk__organization_aggregates": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__organization_aggregates", "resource_type": "model", "package_name": "zendesk", "path": "intermediate/int_zendesk__organization_aggregates.sql", "original_file_path": "models/intermediate/int_zendesk__organization_aggregates.sql", "unique_id": "model.zendesk.int_zendesk__organization_aggregates", "fqn": ["zendesk", "intermediate", "int_zendesk__organization_aggregates"], "alias": "int_zendesk__organization_aggregates", "checksum": {"name": "sha256", "checksum": "a16300f45d2cb0bd1c26dfec62e967a047095b92f340974bfef56178bfff6cf9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.795234, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__organization_aggregates\"", "raw_code": "with organizations as (\n select * \n from {{ ref('stg_zendesk__organization') }}\n\n--If you use organization tags this will be included, if not it will be ignored.\n{% if var('using_organization_tags', True) %}\n), organization_tags as (\n select * \n from {{ ref('stg_zendesk__organization_tag') }}\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('organization_tags.tags', \"', '\" ) }} as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n{% endif %}\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n{% if var('using_domain_names', True) %}\n), domain_names as (\n\n select *\n from {{ ref('stg_zendesk__domain_name') }}\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n {{ fivetran_utils.string_agg('domain_names.domain_name', \"', '\" ) }} as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n{% endif %}\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n ,tag_aggregates.organization_tags\n {% endif %}\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n ,domain_aggregates.domain_names\n {% endif %}\n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n {% if var('using_domain_names', True) %}\n left join domain_aggregates\n using(organization_id)\n {% endif %}\n\n --If you use organization tags this will be included, if not it will be ignored.\n {% if var('using_organization_tags', True) %}\n left join tag_aggregates\n using(organization_id)\n {% endif %}\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag", "package": null, "version": null}, {"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.string_agg"], "nodes": ["model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag", "model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk/models/intermediate/int_zendesk__organization_aggregates.sql", "compiled": true, "compiled_code": "with organizations as (\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization\"\n\n--If you use organization tags this will be included, if not it will be ignored.\n\n), organization_tags as (\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tag\"\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(organization_tags.tags, ', ')\n\n as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n\n), domain_names as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__domain_name\"\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(domain_names.domain_name, ', ')\n\n as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,tag_aggregates.organization_tags\n \n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,domain_aggregates.domain_names\n \n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n left join domain_aggregates\n using(organization_id)\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n left join tag_aggregates\n using(organization_id)\n \n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "operation.zendesk.zendesk-on-run-start-0": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "zendesk-on-run-start-0", "resource_type": "operation", "package_name": "zendesk", "path": "hooks/zendesk-on-run-start-0.sql", "original_file_path": "./dbt_project.yml", "unique_id": "operation.zendesk.zendesk-on-run-start-0", "fqn": ["zendesk", "hooks", "zendesk-on-run-start-0"], "alias": "zendesk-on-run-start-0", "checksum": {"name": "sha256", "checksum": "5fb1f3a675292d5e65c697d8e481dc79bd1fdf221ab22ad95bdc1d16b0a29fa1"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}}, "tags": ["on-run-start"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "table"}, "created_at": 1724705297.839394, "relation_name": null, "raw_code": "{{ fivetran_utils.empty_variable_warning(\"ticket_field_history_columns\", \"zendesk_ticket_field_history\") }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.fivetran_utils.empty_variable_warning"], "nodes": []}, "compiled_path": "target/compiled/zendesk/./dbt_project.yml/hooks/zendesk-on-run-start-0.sql", "compiled": true, "compiled_code": "\n\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "index": 0}, "model.zendesk_source.stg_zendesk__user_tag": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__user_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user_tag.sql", "original_file_path": "models/stg_zendesk__user_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag", "fqn": ["zendesk_source", "stg_zendesk__user_tag"], "alias": "stg_zendesk__user_tag", "checksum": {"name": "sha256", "checksum": "0aabe5c461e492bc7afb162a0dcb6e3334cca4c60093eb5be52b74e5dbfa429b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.226197, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tag\"", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__user_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tag_tmp')),\n staging_columns=get_user_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_tag.sql", "original_file_path": "models/stg_zendesk__ticket_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag", "fqn": ["zendesk_source", "stg_zendesk__ticket_tag"], "alias": "stg_zendesk__ticket_tag", "checksum": {"name": "sha256", "checksum": "41ea7cea80e135bf87adfff97bfadecd5c8ee0622d74f9904759305fd6cb7541"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.229683, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tag\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tag_tmp')),\n staging_columns=get_ticket_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n {% if target.type == 'redshift' %}\n \"tag\" as tags\n {% else %}\n tag as tags\n {% endif %}\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_tag.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n \n tag as tags\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_field_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_field_history.sql", "original_file_path": "models/stg_zendesk__ticket_field_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_field_history"], "alias": "stg_zendesk__ticket_field_history", "checksum": {"name": "sha256", "checksum": "5c165700bdcc50383952e4c645b4d6c42d5410205205c5de889b009dad3b0a10"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_starting_at": {"name": "valid_starting_at", "description": "The time the ticket field value became valid", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "valid_ending_at": {"name": "valid_ending_at", "description": "The time the ticket field value became invalidated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.230457, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_field_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_field_history_tmp')),\n staging_columns=get_ticket_field_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as {{ dbt.type_timestamp() }}) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as {{ dbt.type_timestamp() }}) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_field_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_field_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_field_history.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n field_name\n \n as \n \n field_name\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n updated\n \n as \n \n updated\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n value\n \n as \n \n value\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as timestamp) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as timestamp) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__schedule_holiday", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule_holiday.sql", "original_file_path": "models/stg_zendesk__schedule_holiday.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday", "fqn": ["zendesk_source", "stg_zendesk__schedule_holiday"], "alias": "stg_zendesk__schedule_holiday", "checksum": {"name": "sha256", "checksum": "154109fa9fd9dc5e3b0b034929ac3e3ddb591755d52a78f64ab2bb7d6cfe2476"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Information about holidays for each specified schedule.", "columns": {"end_date_at": {"name": "end_date_at", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_id": {"name": "holiday_id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "holiday_name": {"name": "holiday_name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date_at": {"name": "start_date_at", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.233509, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_holiday_tmp') }}\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_holiday_tmp')),\n staging_columns=get_schedule_holiday_columns()\n )\n }}\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as {{ dbt.type_timestamp() }} ) as _fivetran_synced,\n cast(end_date as {{ dbt.type_timestamp() }} ) as holiday_end_date_at,\n cast(id as {{ dbt.type_string() }} ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as {{ dbt.type_string() }} ) as schedule_id,\n cast(start_date as {{ dbt.type_timestamp() }} ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_holiday_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_schedule_holiday_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule_holiday.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n end_date\n \n as \n \n end_date\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n start_date\n \n as \n \n start_date\n \n\n\n\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as timestamp ) as _fivetran_synced,\n cast(end_date as timestamp ) as holiday_end_date_at,\n cast(id as TEXT ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as TEXT ) as schedule_id,\n cast(start_date as timestamp ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__daylight_time", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__daylight_time.sql", "original_file_path": "models/stg_zendesk__daylight_time.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time", "fqn": ["zendesk_source", "stg_zendesk__daylight_time"], "alias": "stg_zendesk__daylight_time", "checksum": {"name": "sha256", "checksum": "8bc98221c9781fc37b2424b62b5d72cd62b62c53aa887be08e98114f98530df9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset_minutes": {"name": "daylight_offset_minutes", "description": "Number of **minutes** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.2321448, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__daylight_time\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__daylight_time_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__daylight_time_tmp')),\n staging_columns=get_daylight_time_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}, {"name": "stg_zendesk__daylight_time_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_daylight_time_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__daylight_time.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n daylight_end_utc\n \n as \n \n daylight_end_utc\n \n, \n \n \n daylight_offset\n \n as \n \n daylight_offset\n \n, \n \n \n daylight_start_utc\n \n as \n \n daylight_start_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n year\n \n as \n \n year\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__organization", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization.sql", "original_file_path": "models/stg_zendesk__organization.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization", "fqn": ["zendesk_source", "stg_zendesk__organization"], "alias": "stg_zendesk__organization", "checksum": {"name": "sha256", "checksum": "5fb51f160efdf3ffa60e0a7be33e40e4b59f814d345558631e06fcce160f6329"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"organization_id": {"name": "organization_id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.224834, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tmp')),\n staging_columns=get_organization_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__organization_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_columns", "macro.fivetran_utils.fill_staging_columns", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n shared_comments\n \n as \n \n shared_comments\n \n, \n \n \n shared_tickets\n \n as \n \n shared_tickets\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__time_zone", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__time_zone.sql", "original_file_path": "models/stg_zendesk__time_zone.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone", "fqn": ["zendesk_source", "stg_zendesk__time_zone"], "alias": "stg_zendesk__time_zone", "checksum": {"name": "sha256", "checksum": "289f08e30f9298f5b4beed89d28c1ff6a82386ee7c9f5084499eedb8998aa137"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset_minutes": {"name": "standard_offset_minutes", "description": "Standard offset of the timezone (non-daylight savings hours) in minutes.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.232703, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__time_zone_tmp') }}\n\n),\n\nfields as (\n\n select\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__time_zone_tmp')),\n staging_columns=get_time_zone_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=1) }} as {{ dbt.type_int() }} ) * 60 +\n (cast( {{ dbt.split_part(string_text='standard_offset', delimiter_text=\"':'\", part_number=2) }} as {{ dbt.type_int() }} ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}, {"name": "stg_zendesk__time_zone_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_time_zone_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.split_part", "macro.dbt.type_int"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__time_zone.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"\n\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n standard_offset\n \n as \n \n standard_offset\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 1\n )\n\n\n \n\n as integer ) * 60 +\n (cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 2\n )\n\n\n \n\n as integer ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__group", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__group.sql", "original_file_path": "models/stg_zendesk__group.sql", "unique_id": "model.zendesk_source.stg_zendesk__group", "fqn": ["zendesk_source", "stg_zendesk__group"], "alias": "stg_zendesk__group", "checksum": {"name": "sha256", "checksum": "21a956af3b03e9e49e9e94ade093fa716db9f061e7eb9e209c3ff7f9986b15b9"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"group_id": {"name": "group_id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.2239559, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__group_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__group_tmp')),\n staging_columns=get_group_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__group_tmp", "package": null, "version": null}, {"name": "stg_zendesk__group_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_group_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__group_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__group.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_comment", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_comment.sql", "original_file_path": "models/stg_zendesk__ticket_comment.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment", "fqn": ["zendesk_source", "stg_zendesk__ticket_comment"], "alias": "stg_zendesk__ticket_comment", "checksum": {"name": "sha256", "checksum": "d81e21e84092f9b0ddb806817680c774a31e35cefafd5ad15895436887156439"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"ticket_comment_id": {"name": "ticket_comment_id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_facebook_comment": {"name": "is_facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_tweet": {"name": "is_tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_voice_comment": {"name": "is_voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.225963, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_comment_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_comment_tmp')),\n staging_columns=get_ticket_comment_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n body,\n cast(created as {{ dbt.type_timestamp() }}) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_comment_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_comment_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_comment.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n body\n \n as \n \n body\n \n, \n cast(null as integer) as \n \n call_duration\n \n , \n cast(null as integer) as \n \n call_id\n \n , \n \n \n created\n \n as \n \n created\n \n, \n \n \n facebook_comment\n \n as \n \n facebook_comment\n \n, \n \n \n id\n \n as \n \n id\n \n, \n cast(null as integer) as \n \n location\n \n , \n \n \n public\n \n as \n \n public\n \n, \n cast(null as integer) as \n \n recording_url\n \n , \n cast(null as timestamp) as \n \n started_at\n \n , \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n cast(null as integer) as \n \n transcription_status\n \n , \n cast(null as integer) as \n \n transcription_text\n \n , \n cast(null as integer) as \n \n trusted\n \n , \n \n \n tweet\n \n as \n \n tweet\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n voice_comment\n \n as \n \n voice_comment\n \n, \n cast(null as integer) as \n \n voice_comment_transcription_visible\n \n \n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n body,\n cast(created as timestamp) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_schedule.sql", "original_file_path": "models/stg_zendesk__ticket_schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule", "fqn": ["zendesk_source", "stg_zendesk__ticket_schedule"], "alias": "stg_zendesk__ticket_schedule", "checksum": {"name": "sha256", "checksum": "69d32ac51b73241f990f8c1a08309cb42e79d0c1b26b99a7060353bfee88066e"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.228896, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_schedule\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_schedule_tmp')),\n staging_columns=get_ticket_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(schedule_id as {{ dbt.type_string() }}) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as timestamp) as created_at,\n cast(schedule_id as TEXT) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__schedule", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__schedule.sql", "original_file_path": "models/stg_zendesk__schedule.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule", "fqn": ["zendesk_source", "stg_zendesk__schedule"], "alias": "stg_zendesk__schedule", "checksum": {"name": "sha256", "checksum": "336dabaf980af5f08c6a5f43d04cdfd00146191b0927176fe4add5f65117c673"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The support schedules created with different business hours and holidays.", "columns": {"schedule_id": {"name": "schedule_id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_name": {"name": "schedule_name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.228642, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__schedule_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__schedule_tmp')),\n staging_columns=get_schedule_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as {{ dbt.type_string() }}) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}, {"name": "stg_zendesk__schedule_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_schedule_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_string"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__schedule.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n end_time\n \n as \n \n end_time\n \n, \n \n \n end_time_utc\n \n as \n \n end_time_utc\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n start_time\n \n as \n \n start_time\n \n, \n \n \n start_time_utc\n \n as \n \n start_time_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as TEXT) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__user", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__user.sql", "original_file_path": "models/stg_zendesk__user.sql", "unique_id": "model.zendesk_source.stg_zendesk__user", "fqn": ["zendesk_source", "stg_zendesk__user"], "alias": "stg_zendesk__user", "checksum": {"name": "sha256", "checksum": "0bdebead73baf5943015b93700be6c9a96569a4a6d96ec23f2da0327082dc351"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"user_id": {"name": "user_id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_active": {"name": "is_active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_suspended": {"name": "is_suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.227817, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__user_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__user_tmp')),\n staging_columns=get_user_columns()\n )\n }}\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n cast(last_login_at as {{ dbt.type_timestamp() }}) as last_login_at,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n email,\n name,\n organization_id,\n phone,\n {% if var('internal_user_criteria', false) -%}\n case \n when role in ('admin', 'agent') then role\n when {{ var('internal_user_criteria', false) }} then 'agent'\n else role end as role,\n {% else -%}\n role,\n {% endif -%}\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__user_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__user_tmp", "package": null, "version": null}, {"name": "stg_zendesk__user_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_user_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__user_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__user.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n alias\n \n as \n \n alias\n \n, \n \n \n authenticity_token\n \n as \n \n authenticity_token\n \n, \n \n \n chat_only\n \n as \n \n chat_only\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n email\n \n as \n \n email\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n last_login_at\n \n as \n \n last_login_at\n \n, \n \n \n locale\n \n as \n \n locale\n \n, \n \n \n locale_id\n \n as \n \n locale_id\n \n, \n \n \n moderator\n \n as \n \n moderator\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n only_private_comments\n \n as \n \n only_private_comments\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n phone\n \n as \n \n phone\n \n, \n \n \n remote_photo_url\n \n as \n \n remote_photo_url\n \n, \n \n \n restricted_agent\n \n as \n \n restricted_agent\n \n, \n \n \n role\n \n as \n \n role\n \n, \n \n \n shared\n \n as \n \n shared\n \n, \n \n \n shared_agent\n \n as \n \n shared_agent\n \n, \n \n \n signature\n \n as \n \n signature\n \n, \n \n \n suspended\n \n as \n \n suspended\n \n, \n \n \n ticket_restriction\n \n as \n \n ticket_restriction\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n two_factor_auth_enabled\n \n as \n \n two_factor_auth_enabled\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n verified\n \n as \n \n verified\n \n\n\n\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n cast(last_login_at as timestamp) as last_login_at,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n email,\n name,\n organization_id,\n phone,\n role,\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__brand", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__brand.sql", "original_file_path": "models/stg_zendesk__brand.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand", "fqn": ["zendesk_source", "stg_zendesk__brand"], "alias": "stg_zendesk__brand", "checksum": {"name": "sha256", "checksum": "106699200d371f2fac9fe94ce084a357331b215d4130195e1e94d2d07c6d169c"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"brand_id": {"name": "brand_id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.2231581, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__brand_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__brand_tmp')),\n staging_columns=get_brand_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__brand_tmp", "package": null, "version": null}, {"name": "stg_zendesk__brand_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_brand_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__brand_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__brand.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n brand_url\n \n as \n \n brand_url\n \n, \n \n \n has_help_center\n \n as \n \n has_help_center\n \n, \n \n \n help_center_state\n \n as \n \n help_center_state\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n logo_content_type\n \n as \n \n logo_content_type\n \n, \n \n \n logo_content_url\n \n as \n \n logo_content_url\n \n, \n \n \n logo_deleted\n \n as \n \n logo_deleted\n \n, \n \n \n logo_file_name\n \n as \n \n logo_file_name\n \n, \n \n \n logo_height\n \n as \n \n logo_height\n \n, \n \n \n logo_id\n \n as \n \n logo_id\n \n, \n \n \n logo_inline\n \n as \n \n logo_inline\n \n, \n \n \n logo_mapped_content_url\n \n as \n \n logo_mapped_content_url\n \n, \n \n \n logo_size\n \n as \n \n logo_size\n \n, \n \n \n logo_url\n \n as \n \n logo_url\n \n, \n \n \n logo_width\n \n as \n \n logo_width\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n subdomain\n \n as \n \n subdomain\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_form_history", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket_form_history.sql", "original_file_path": "models/stg_zendesk__ticket_form_history.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history", "fqn": ["zendesk_source", "stg_zendesk__ticket_form_history"], "alias": "stg_zendesk__ticket_form_history", "checksum": {"name": "sha256", "checksum": "1e70e9a0b2dfce82e649a8a0507d59d6f3f2832429191ea67988ba0dfd1017cf"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"ticket_form_id": {"name": "ticket_form_id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.229339, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_form_history\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_form_history_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_form_history_tmp')),\n staging_columns=get_ticket_form_history_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_form_history_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_form_history_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket_form_history.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n display_name\n \n as \n \n display_name\n \n, \n \n \n end_user_visible\n \n as \n \n end_user_visible\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__domain_name", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__domain_name.sql", "original_file_path": "models/stg_zendesk__domain_name.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name", "fqn": ["zendesk_source", "stg_zendesk__domain_name"], "alias": "stg_zendesk__domain_name", "checksum": {"name": "sha256", "checksum": "8c3a4735e0cdea5a463eefc3c6820d15d622857af45dab942410dc64a0ac4bda"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.223692, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__domain_name\"", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__domain_name_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__domain_name_tmp')),\n staging_columns=get_domain_name_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}, {"name": "stg_zendesk__domain_name_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_domain_name_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__domain_name.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n domain_name\n \n as \n \n domain_name\n \n, \n \n \n index\n \n as \n \n index\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__organization_tag", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__organization_tag.sql", "original_file_path": "models/stg_zendesk__organization_tag.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag", "fqn": ["zendesk_source", "stg_zendesk__organization_tag"], "alias": "stg_zendesk__organization_tag", "checksum": {"name": "sha256", "checksum": "15f1f4014e4ba78ae7992f28c61e3926b7cd12c6bb32efc7b516db93c1e20d82"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.224384, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tag\"", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nwith base as (\n\n select * \n from {{ ref('stg_zendesk__organization_tag_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__organization_tag_tmp')),\n staging_columns=get_organization_tag_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n {% if target.type == 'redshift' %}\n 'tag'\n {% else %}\n tag\n {% endif %}\n as tags\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}, {"name": "stg_zendesk__organization_tag_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_organization_tag_columns", "macro.fivetran_utils.fill_staging_columns"], "nodes": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__organization_tag.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket", "resource_type": "model", "package_name": "zendesk_source", "path": "stg_zendesk__ticket.sql", "original_file_path": "models/stg_zendesk__ticket.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket", "fqn": ["zendesk_source", "stg_zendesk__ticket"], "alias": "stg_zendesk__ticket", "checksum": {"name": "sha256", "checksum": "618e84a2e5a55edffebd745bc81183a58912db69c48150d984c3cd582e0a41dd"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "table", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_channel": {"name": "created_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_id": {"name": "source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_from_title": {"name": "source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_rel": {"name": "source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_address": {"name": "source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "source_to_name": {"name": "source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": "zendesk_source://models/stg_zendesk.yml", "build_path": null, "deferred": false, "unrendered_config": {"materialized": "table", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.222216, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"", "raw_code": "with base as (\n\n select * \n from {{ ref('stg_zendesk__ticket_tmp') }}\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n {{\n fivetran_utils.fill_staging_columns(\n source_columns=adapter.get_columns_in_relation(ref('stg_zendesk__ticket_tmp')),\n staging_columns=get_ticket_columns()\n )\n }}\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n assignee_id,\n brand_id,\n cast(created_at as {{ dbt.type_timestamp() }}) as created_at,\n cast(updated_at as {{ dbt.type_timestamp() }}) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n {{ fivetran_utils.fill_pass_through_columns('zendesk__ticket_passthrough_columns') }}\n\n from fields\n)\n\nselect * \nfrom final", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}, {"name": "stg_zendesk__ticket_tmp", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.zendesk_source.get_ticket_columns", "macro.fivetran_utils.fill_staging_columns", "macro.dbt.type_timestamp", "macro.fivetran_utils.fill_pass_through_columns"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_tmp"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk__ticket.sql", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n allow_channelback\n \n as \n \n allow_channelback\n \n, \n \n \n assignee_id\n \n as \n \n assignee_id\n \n, \n \n \n brand_id\n \n as \n \n brand_id\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n description\n \n as \n \n description\n \n, \n \n \n due_at\n \n as \n \n due_at\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n forum_topic_id\n \n as \n \n forum_topic_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n has_incidents\n \n as \n \n has_incidents\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n is_public\n \n as \n \n is_public\n \n, \n \n \n merged_ticket_ids\n \n as \n \n merged_ticket_ids\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n priority\n \n as \n \n priority\n \n, \n \n \n problem_id\n \n as \n \n problem_id\n \n, \n \n \n recipient\n \n as \n \n recipient\n \n, \n \n \n requester_id\n \n as \n \n requester_id\n \n, \n \n \n status\n \n as \n \n status\n \n, \n \n \n subject\n \n as \n \n subject\n \n, \n \n \n submitter_id\n \n as \n \n submitter_id\n \n, \n cast(null as integer) as \n \n system_ccs\n \n , \n \n \n system_client\n \n as \n \n system_client\n \n, \n cast(null as TEXT) as \n \n system_ip_address\n \n , \n cast(null as integer) as \n \n system_json_email_identifier\n \n , \n cast(null as float) as \n \n system_latitude\n \n , \n cast(null as TEXT) as \n \n system_location\n \n , \n cast(null as float) as \n \n system_longitude\n \n , \n cast(null as integer) as \n \n system_machine_generated\n \n , \n cast(null as integer) as \n \n system_message_id\n \n , \n cast(null as integer) as \n \n system_raw_email_identifier\n \n , \n \n \n ticket_form_id\n \n as \n \n ticket_form_id\n \n, \n \n \n type\n \n as \n \n type\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n via_channel\n \n as \n \n via_channel\n \n, \n \n \n via_source_from_address\n \n as \n \n via_source_from_address\n \n, \n \n \n via_source_from_id\n \n as \n \n via_source_from_id\n \n, \n \n \n via_source_from_title\n \n as \n \n via_source_from_title\n \n, \n \n \n via_source_rel\n \n as \n \n via_source_rel\n \n, \n \n \n via_source_to_address\n \n as \n \n via_source_to_address\n \n, \n \n \n via_source_to_name\n \n as \n \n via_source_to_name\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n assignee_id,\n brand_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__daylight_time_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__daylight_time_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__daylight_time_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__daylight_time_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__daylight_time_tmp"], "alias": "stg_zendesk__daylight_time_tmp", "checksum": {"name": "sha256", "checksum": "01afb893cce2ef776ef8c4c64dbd2cf3e40fe1f73986fdc4b78fd99ff0948ac8"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705297.998533, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'daylight_time')) }}\nfrom {{ source('zendesk', 'daylight_time') }} as daylight_time_table", "language": "sql", "refs": [], "sources": [["zendesk", "daylight_time"], ["zendesk", "daylight_time"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__daylight_time_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"year\",\n \"_fivetran_synced\",\n \"daylight_end_utc\",\n \"daylight_offset\",\n \"daylight_start_utc\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"daylight_time_data\" as daylight_time_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__user_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tmp"], "alias": "stg_zendesk__user_tmp", "checksum": {"name": "sha256", "checksum": "606364c3b138f68707d75a04f859f28d4b0f17f99966b27a8f6087adfa091042"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.0098848, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','user')) }} \nfrom {{ source('zendesk','user') }} as user_table", "language": "sql", "refs": [], "sources": [["zendesk", "user"], ["zendesk", "user"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"active\",\n \"alias\",\n \"authenticity_token\",\n \"chat_only\",\n \"created_at\",\n \"details\",\n \"email\",\n \"external_id\",\n \"last_login_at\",\n \"locale\",\n \"locale_id\",\n \"moderator\",\n \"name\",\n \"notes\",\n \"only_private_comments\",\n \"organization_id\",\n \"phone\",\n \"remote_photo_url\",\n \"restricted_agent\",\n \"role\",\n \"shared\",\n \"shared_agent\",\n \"signature\",\n \"suspended\",\n \"ticket_restriction\",\n \"time_zone\",\n \"two_factor_auth_enabled\",\n \"updated_at\",\n \"url\",\n \"verified\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"user_data\" as user_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__group_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__group_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__group_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__group_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__group_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__group_tmp"], "alias": "stg_zendesk__group_tmp", "checksum": {"name": "sha256", "checksum": "dc91ce1ab4b5ce5fec29b74b8f999d04fa063ab6354b7387d5875997f4db7e11"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.013942, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','group')) }} \nfrom {{ source('zendesk','group') }} as group_table", "language": "sql", "refs": [], "sources": [["zendesk", "group"], ["zendesk", "group"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.group"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__group_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"created_at\",\n \"name\",\n \"updated_at\",\n \"url\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"group_data\" as group_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tmp"], "alias": "stg_zendesk__ticket_tmp", "checksum": {"name": "sha256", "checksum": "b90132a6d22e753a066ebeaaea0bc164376837b702d7886ad0d1bb1a993e6e9a"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.017261, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket')) }}\nfrom {{ source('zendesk', 'ticket') }} as ticket_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket"], ["zendesk", "ticket"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"allow_channelback\",\n \"assignee_id\",\n \"brand_id\",\n \"created_at\",\n \"description\",\n \"due_at\",\n \"external_id\",\n \"forum_topic_id\",\n \"group_id\",\n \"has_incidents\",\n \"is_public\",\n \"organization_id\",\n \"priority\",\n \"problem_id\",\n \"recipient\",\n \"requester_id\",\n \"status\",\n \"subject\",\n \"submitter_id\",\n \"system_client\",\n \"ticket_form_id\",\n \"type\",\n \"updated_at\",\n \"url\",\n \"via_channel\",\n \"via_source_from_id\",\n \"via_source_from_title\",\n \"via_source_rel\",\n \"via_source_to_address\",\n \"via_source_to_name\",\n \"merged_ticket_ids\",\n \"via_source_from_address\",\n \"followup_ids\",\n \"via_followup_source_id\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\" as ticket_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__brand_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__brand_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__brand_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__brand_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__brand_tmp"], "alias": "stg_zendesk__brand_tmp", "checksum": {"name": "sha256", "checksum": "9658c9bd90fda5610067615a971eff98dc7c7b8c04827b9ab04da65f28630381"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.0205219, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk','brand')) }} \nfrom {{ source('zendesk','brand') }} as brand_table", "language": "sql", "refs": [], "sources": [["zendesk", "brand"], ["zendesk", "brand"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.brand"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__brand_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"brand_url\",\n \"default\",\n \"has_help_center\",\n \"help_center_state\",\n \"logo_content_type\",\n \"logo_content_url\",\n \"logo_deleted\",\n \"logo_file_name\",\n \"logo_height\",\n \"logo_id\",\n \"logo_inline\",\n \"logo_mapped_content_url\",\n \"logo_size\",\n \"logo_url\",\n \"logo_width\",\n \"name\",\n \"subdomain\",\n \"url\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"brand_data\" as brand_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_tag_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_tag_tmp"], "alias": "stg_zendesk__ticket_tag_tmp", "checksum": {"name": "sha256", "checksum": "d88425c9db1a948768fa8683e58654de3aab9ffc2966d829b6707c12afd94283"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.023885, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_tag')) }}\nfrom {{ source('zendesk', 'ticket_tag') }} as ticket_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_tag"], ["zendesk", "ticket_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_tag_tmp.sql", "compiled": true, "compiled_code": "select \"tag\",\n \"ticket_id\",\n \"_fivetran_synced\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_tag_data\" as ticket_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__schedule_holiday_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_holiday_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_holiday_tmp"], "alias": "stg_zendesk__schedule_holiday_tmp", "checksum": {"name": "sha256", "checksum": "9cd5e53ebcb7f11e55f772a7826b78a7f5f6b27ba975834c28c504181a548a3b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.027918, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule_holiday')) }}\nfrom {{ source('zendesk', 'schedule_holiday') }} as schedule_holiday_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule_holiday"], ["zendesk", "schedule_holiday"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_holiday_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"schedule_id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_date\",\n \"name\",\n \"start_date\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"schedule_holiday_data\" as schedule_holiday_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__user_tag_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__user_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__user_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__user_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__user_tag_tmp"], "alias": "stg_zendesk__user_tag_tmp", "checksum": {"name": "sha256", "checksum": "7ee78431bec698af41296439428c74a8d5f8fa607c55e9b5a9b97de8b777f490"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.031551, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"", "raw_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_user_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','user_tag')) }} \nfrom {{ source('zendesk','user_tag') }} as user_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "user_tag"], ["zendesk", "user_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.user_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__user_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nselect \"tag\",\n \"user_id\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"user_tag_data\" as user_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_field_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_field_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_field_history_tmp"], "alias": "stg_zendesk__ticket_field_history_tmp", "checksum": {"name": "sha256", "checksum": "9dbb7257a2998c6e0d0d7a572aa7b0d301c777cea8e7085abfa42809b9312aa7"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.0351481, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_field_history')) }}\nfrom {{ source('zendesk', 'ticket_field_history') }} as ticket_field_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_field_history"], ["zendesk", "ticket_field_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_field_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_field_history_tmp.sql", "compiled": true, "compiled_code": "select \"field_name\",\n \"ticket_id\",\n \"updated\",\n \"_fivetran_synced\",\n \"user_id\",\n \"value\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_field_history_data\" as ticket_field_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_form_history_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_form_history_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_form_history_tmp"], "alias": "stg_zendesk__ticket_form_history_tmp", "checksum": {"name": "sha256", "checksum": "0e95f65a6932c12231ef9419574fd09b287a70ca20612cce228a7fb642fe1609"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.0384269, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"", "raw_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_ticket_form_history', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_form_history')) }}\nfrom {{ source('zendesk', 'ticket_form_history') }} as ticket_form_history_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_form_history"], ["zendesk", "ticket_form_history"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_form_history_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"updated_at\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"created_at\",\n \"display_name\",\n \"end_user_visible\",\n \"name\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_form_history_data\" as ticket_form_history_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_comment_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_comment_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_comment_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_comment_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_comment_tmp"], "alias": "stg_zendesk__ticket_comment_tmp", "checksum": {"name": "sha256", "checksum": "756209cf9e8c53e873cd7ac7a2dce2bdbafbd5a9d416e503c628b3ee57603c86"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.042859, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'ticket_comment')) }}\nfrom {{ source('zendesk', 'ticket_comment') }} as ticket_comment_table", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_comment"], ["zendesk", "ticket_comment"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_comment_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"body\",\n \"created\",\n \"facebook_comment\",\n \"public\",\n \"ticket_id\",\n \"tweet\",\n \"user_id\",\n \"voice_comment\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_comment_data\" as ticket_comment_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tag_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__organization_tag_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tag_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tag_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tag_tmp"], "alias": "stg_zendesk__organization_tag_tmp", "checksum": {"name": "sha256", "checksum": "b917812c188e64cda849a61d784cd95507c1c9187fc0ef2e083f2eee61c58231"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.04627, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"", "raw_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_organization_tags', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk','organization_tag')) }} \nfrom {{ source('zendesk','organization_tag') }} as organization_tag_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization_tag"], ["zendesk", "organization_tag"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization_tag"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tag_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nselect \"organization_id\",\n \"tag\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"organization_tag_data\" as organization_tag_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__schedule_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__schedule_tmp"], "alias": "stg_zendesk__schedule_tmp", "checksum": {"name": "sha256", "checksum": "7d55acbaaa3cc93868bcd3fe4f945b1ecb4871da7b8bed7bf04714ce3fc11eef"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.0499258, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'schedule')) }}\nfrom {{ source('zendesk', 'schedule') }} as schedule_table", "language": "sql", "refs": [], "sources": [["zendesk", "schedule"], ["zendesk", "schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"end_time\",\n \"id\",\n \"start_time\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_time_utc\",\n \"name\",\n \"start_time_utc\",\n \"time_zone\",\n \"created_at\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"schedule_data\" as schedule_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__organization_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__organization_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__organization_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__organization_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__organization_tmp"], "alias": "stg_zendesk__organization_tmp", "checksum": {"name": "sha256", "checksum": "f2b39377f97f3a1a71fee168330c6971c06292c4ea702091a978eb64af9bd28f"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}"}, "created_at": 1724705298.053393, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tmp\"", "raw_code": "select {{ dbt_utils.star(source('zendesk', 'organization')) }}\nfrom {{ source('zendesk','organization') }} as organization_table", "language": "sql", "refs": [], "sources": [["zendesk", "organization"], ["zendesk", "organization"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.organization"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__organization_tmp.sql", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"created_at\",\n \"details\",\n \"external_id\",\n \"group_id\",\n \"name\",\n \"notes\",\n \"shared_comments\",\n \"shared_tickets\",\n \"updated_at\",\n \"url\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"organization_data\" as organization_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__ticket_schedule_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__ticket_schedule_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__ticket_schedule_tmp"], "alias": "stg_zendesk__ticket_schedule_tmp", "checksum": {"name": "sha256", "checksum": "59d017b8bb4285288bd47b79a1cb1afdb64faca436f52a718f6c8051d24cf6f1"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.05666, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\n{%- set source_relation = adapter.get_relation(\n database=source('zendesk', 'ticket_schedule').database,\n schema=source('zendesk', 'ticket_schedule').schema,\n identifier=source('zendesk', 'ticket_schedule').name) -%}\n\n{% set table_exists=source_relation is not none %}\n\n{% if table_exists %}\n\nselect {{ dbt_utils.star(source('zendesk', 'ticket_schedule')) }}\nfrom {{ source('zendesk', 'ticket_schedule') }} as ticket_schedule_table\n\n{% else %}\n\nselect\n cast(null as {{ dbt.type_timestamp() }}) as _fivetran_synced,\n cast(null as {{ dbt.type_timestamp() }}) as created_at,\n cast(null as {{ dbt.type_int() }}) as schedule_id,\n cast(null as {{ dbt.type_int() }}) as ticket_id\n\n{% endif %}", "language": "sql", "refs": [], "sources": [["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"], ["zendesk", "ticket_schedule"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.ticket_schedule"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__ticket_schedule_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\n\n\nselect \"created_at\",\n \"ticket_id\",\n \"_fivetran_synced\",\n \"schedule_id\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_schedule_data\" as ticket_schedule_table\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__domain_name_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__domain_name_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__domain_name_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__domain_name_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__domain_name_tmp"], "alias": "stg_zendesk__domain_name_tmp", "checksum": {"name": "sha256", "checksum": "58ba804a3f1cf2e7abe29a28cc9064e9be0355e6b358cca9e714e5777ff11b4b"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.099234, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"", "raw_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_domain_names', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'domain_name')) }} \nfrom {{ source('zendesk', 'domain_name') }} as domain_name_table", "language": "sql", "refs": [], "sources": [["zendesk", "domain_name"], ["zendesk", "domain_name"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__domain_name_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nselect \"index\",\n \"organization_id\",\n \"_fivetran_synced\",\n \"domain_name\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"domain_name_data\" as domain_name_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "model.zendesk_source.stg_zendesk__time_zone_tmp": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "stg_zendesk__time_zone_tmp", "resource_type": "model", "package_name": "zendesk_source", "path": "tmp/stg_zendesk__time_zone_tmp.sql", "original_file_path": "models/tmp/stg_zendesk__time_zone_tmp.sql", "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp", "fqn": ["zendesk_source", "tmp", "stg_zendesk__time_zone_tmp"], "alias": "stg_zendesk__time_zone_tmp", "checksum": {"name": "sha256", "checksum": "b2a214af27259564121fd0c977a7d7388bd644f797f972ed48575a4979819ec2"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "view", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"materialized": "view", "schema": "zendesk_{{ var('directed_schema','dev') }}", "enabled": true}, "created_at": 1724705298.1029499, "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"", "raw_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n{{ config(enabled=var('using_schedules', True)) }}\n\nselect {{ dbt_utils.star(source('zendesk', 'time_zone')) }} \nfrom {{ source('zendesk', 'time_zone') }} as time_zone_table", "language": "sql", "refs": [], "sources": [["zendesk", "time_zone"], ["zendesk", "time_zone"]], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.star"], "nodes": ["source.zendesk_source.zendesk.time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/tmp/stg_zendesk__time_zone_tmp.sql", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"_fivetran_synced\",\n \"standard_offset\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"time_zone_data\" as time_zone_table", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}, "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef", "fqn": ["zendesk", "unique_zendesk__ticket_enriched_ticket_id"], "alias": "unique_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.207814, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched"}, "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_enriched')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_zendesk__ticket_enriched_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_enriched_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "fqn": ["zendesk", "not_null_zendesk__ticket_enriched_ticket_id"], "alias": "not_null_zendesk__ticket_enriched_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.208833, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_enriched", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_enriched"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_enriched_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_enriched", "attached_node": "model.zendesk.zendesk__ticket_enriched"}, "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "sla_event_id", "model": "{{ get_where_subquery(ref('zendesk__sla_policies')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_zendesk__sla_policies_sla_event_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__sla_policies_sla_event_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd", "fqn": ["zendesk", "unique_zendesk__sla_policies_sla_event_id"], "alias": "unique_zendesk__sla_policies_sla_event_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.2096488, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__sla_policies"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__sla_policies_sla_event_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n sla_event_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__sla_policies\"\nwhere sla_event_id is not null\ngroup by sla_event_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "sla_event_id", "file_key_name": "models.zendesk__sla_policies", "attached_node": "model.zendesk.zendesk__sla_policies"}, "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "unique_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c", "fqn": ["zendesk", "unique_zendesk__ticket_metrics_ticket_id"], "alias": "unique_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.2104409, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/unique_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics"}, "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('zendesk__ticket_metrics')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_zendesk__ticket_metrics_ticket_id", "resource_type": "test", "package_name": "zendesk", "path": "not_null_zendesk__ticket_metrics_ticket_id.sql", "original_file_path": "models/zendesk.yml", "unique_id": "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "fqn": ["zendesk", "not_null_zendesk__ticket_metrics_ticket_id"], "alias": "not_null_zendesk__ticket_metrics_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.211207, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk.zendesk__ticket_metrics"]}, "compiled_path": "target/compiled/zendesk/models/zendesk.yml/not_null_zendesk__ticket_metrics_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.zendesk__ticket_metrics", "attached_node": "model.zendesk.zendesk__ticket_metrics"}, "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_ticket_id"], "alias": "unique_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.234024, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket"}, "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_ticket_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_ticket_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_ticket_id"], "alias": "not_null_stg_zendesk__ticket_ticket_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.2350779, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_ticket_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_id", "file_key_name": "models.stg_zendesk__ticket", "attached_node": "model.zendesk_source.stg_zendesk__ticket"}, "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e", "fqn": ["zendesk_source", "unique_stg_zendesk__brand_brand_id"], "alias": "unique_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.235883, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n brand_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is not null\ngroup by brand_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand"}, "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "brand_id", "model": "{{ get_where_subquery(ref('stg_zendesk__brand')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_stg_zendesk__brand_brand_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__brand_brand_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "fqn": ["zendesk_source", "not_null_stg_zendesk__brand_brand_id"], "alias": "not_null_stg_zendesk__brand_brand_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.236685, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__brand", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__brand"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__brand_brand_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect brand_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "brand_id", "file_key_name": "models.stg_zendesk__brand", "attached_node": "model.zendesk_source.stg_zendesk__brand"}, "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__domain_name')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_stg_zendesk__domain_name_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__domain_name_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3", "fqn": ["zendesk_source", "not_null_stg_zendesk__domain_name_organization_id"], "alias": "not_null_stg_zendesk__domain_name_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.2374542, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__domain_name", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__domain_name"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__domain_name_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__domain_name\"\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__domain_name", "attached_node": "model.zendesk_source.stg_zendesk__domain_name"}, "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd", "fqn": ["zendesk_source", "unique_stg_zendesk__group_group_id"], "alias": "unique_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.238424, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n group_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is not null\ngroup by group_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group"}, "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "group_id", "model": "{{ get_where_subquery(ref('stg_zendesk__group')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_stg_zendesk__group_group_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__group_group_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "fqn": ["zendesk_source", "not_null_stg_zendesk__group_group_id"], "alias": "not_null_stg_zendesk__group_group_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.239233, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__group", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__group"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__group_group_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect group_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "group_id", "file_key_name": "models.stg_zendesk__group", "attached_node": "model.zendesk_source.stg_zendesk__group"}, "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31", "fqn": ["zendesk_source", "unique_stg_zendesk__organization_organization_id"], "alias": "unique_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.240025, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n organization_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is not null\ngroup by organization_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization"}, "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "organization_id", "model": "{{ get_where_subquery(ref('stg_zendesk__organization')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_stg_zendesk__organization_organization_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__organization_organization_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "fqn": ["zendesk_source", "not_null_stg_zendesk__organization_organization_id"], "alias": "not_null_stg_zendesk__organization_organization_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.241316, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__organization", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__organization"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__organization_organization_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "organization_id", "file_key_name": "models.stg_zendesk__organization", "attached_node": "model.zendesk_source.stg_zendesk__organization"}, "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd", "fqn": ["zendesk_source", "unique_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "unique_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.242156, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_comment_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is not null\ngroup by ticket_comment_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment"}, "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_comment_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_comment')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_comment_ticket_comment_id"], "alias": "not_null_stg_zendesk__ticket_comment_ticket_comment_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.243123, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_comment", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_comment"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_comment_ticket_comment_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_comment_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_comment_id", "file_key_name": "models.stg_zendesk__ticket_comment", "attached_node": "model.zendesk_source.stg_zendesk__ticket_comment"}, "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11", "fqn": ["zendesk_source", "unique_stg_zendesk__user_user_id"], "alias": "unique_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.243924, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n user_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is not null\ngroup by user_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user"}, "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "user_id", "model": "{{ get_where_subquery(ref('stg_zendesk__user')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_stg_zendesk__user_user_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__user_user_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "fqn": ["zendesk_source", "not_null_stg_zendesk__user_user_id"], "alias": "not_null_stg_zendesk__user_user_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.2447062, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__user", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__user"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__user_user_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect user_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "user_id", "file_key_name": "models.stg_zendesk__user", "attached_node": "model.zendesk_source.stg_zendesk__user"}, "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "ticket_form_id", "model": "{{ get_where_subquery(ref('stg_zendesk__ticket_form_history')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17", "fqn": ["zendesk_source", "not_null_stg_zendesk__ticket_form_history_ticket_form_id"], "alias": "not_null_stg_zendesk__ticket_form_history_ticket_form_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.245492, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_form_history", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__ticket_form_history"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__ticket_form_history_ticket_form_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_form_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\nwhere ticket_form_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "ticket_form_id", "file_key_name": "models.stg_zendesk__ticket_form_history", "attached_node": "model.zendesk_source.stg_zendesk__ticket_form_history"}, "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": {"test_metadata": {"name": "unique_combination_of_columns", "kwargs": {"combination_of_columns": ["time_zone", "year"], "model": "{{ get_where_subquery(ref('stg_zendesk__daylight_time')) }}"}, "namespace": "dbt_utils"}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year", "resource_type": "test", "package_name": "zendesk_source", "path": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d", "fqn": ["zendesk_source", "dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year"], "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9", "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"alias": "dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9"}, "created_at": 1724705298.24647, "relation_name": null, "raw_code": "{{ dbt_utils.test_unique_combination_of_columns(**_dbt_generic_test_kwargs) }}{{ config(alias=\"dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9\") }}", "language": "sql", "refs": [{"name": "stg_zendesk__daylight_time", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt_utils.test_unique_combination_of_columns", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__daylight_time"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/dbt_utils_unique_combination_o_54ab42208165c9c38d3147cec984eab9.sql", "compiled": true, "compiled_code": "\n\n\n\n\n\nwith validation_errors as (\n\n select\n time_zone, year\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__daylight_time\"\n group by time_zone, year\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": null, "file_key_name": "models.stg_zendesk__daylight_time", "attached_node": "model.zendesk_source.stg_zendesk__daylight_time"}, "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf", "fqn": ["zendesk_source", "unique_stg_zendesk__time_zone_time_zone"], "alias": "unique_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.253264, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n time_zone as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is not null\ngroup by time_zone\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone"}, "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "time_zone", "model": "{{ get_where_subquery(ref('stg_zendesk__time_zone')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_stg_zendesk__time_zone_time_zone", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__time_zone_time_zone.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "fqn": ["zendesk_source", "not_null_stg_zendesk__time_zone_time_zone"], "alias": "not_null_stg_zendesk__time_zone_time_zone", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.2540832, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__time_zone", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__time_zone"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__time_zone_time_zone.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect time_zone\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "time_zone", "file_key_name": "models.stg_zendesk__time_zone", "attached_node": "model.zendesk_source.stg_zendesk__time_zone"}, "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": {"test_metadata": {"name": "unique", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "unique_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "unique_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a", "fqn": ["zendesk_source", "unique_stg_zendesk__schedule_holiday_holiday_id"], "alias": "unique_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.2548559, "relation_name": null, "raw_code": "{{ test_unique(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_unique", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/unique_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\nselect\n holiday_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is not null\ngroup by holiday_id\nhaving count(*) > 1\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday"}, "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": {"test_metadata": {"name": "not_null", "kwargs": {"column_name": "holiday_id", "model": "{{ get_where_subquery(ref('stg_zendesk__schedule_holiday')) }}"}, "namespace": null}, "database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "not_null_stg_zendesk__schedule_holiday_holiday_id", "resource_type": "test", "package_name": "zendesk_source", "path": "not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "original_file_path": "models/stg_zendesk.yml", "unique_id": "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "fqn": ["zendesk_source", "not_null_stg_zendesk__schedule_holiday_holiday_id"], "alias": "not_null_stg_zendesk__schedule_holiday_holiday_id", "checksum": {"name": "none", "checksum": ""}, "config": {"enabled": true, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {}, "created_at": 1724705298.255708, "relation_name": null, "raw_code": "{{ test_not_null(**_dbt_generic_test_kwargs) }}", "language": "sql", "refs": [{"name": "stg_zendesk__schedule_holiday", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": ["macro.dbt.test_not_null", "macro.dbt.get_where_subquery"], "nodes": ["model.zendesk_source.stg_zendesk__schedule_holiday"]}, "compiled_path": "target/compiled/zendesk_source/models/stg_zendesk.yml/not_null_stg_zendesk__schedule_holiday_holiday_id.sql", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect holiday_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is null\n\n\n", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "column_name": "holiday_id", "file_key_name": "models.stg_zendesk__schedule_holiday", "attached_node": "model.zendesk_source.stg_zendesk__schedule_holiday"}, "model.zendesk.int_zendesk__calendar_spine": {"database": "postgres", "schema": "zendesk_integration_tests_55_zendesk_dev", "name": "int_zendesk__calendar_spine", "resource_type": "model", "package_name": "zendesk", "path": "utils/int_zendesk__calendar_spine.sql", "original_file_path": "models/utils/int_zendesk__calendar_spine.sql", "unique_id": "model.zendesk.int_zendesk__calendar_spine", "fqn": ["zendesk", "utils", "int_zendesk__calendar_spine"], "alias": "int_zendesk__calendar_spine", "checksum": {"name": "sha256", "checksum": "2131dbec96be6f5fee780a243b7f48940504a36a33c6fe1b66b24be1a8396928"}, "config": {"enabled": true, "alias": null, "schema": "zendesk_dev", "database": null, "tags": [], "meta": {}, "group": null, "materialized": "ephemeral", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "access": "protected"}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"schema": "zendesk_{{ var('directed_schema','dev') }}", "materialized": "ephemeral"}, "created_at": 1724705308.9281669, "relation_name": null, "raw_code": "-- depends_on: {{ source('zendesk', 'ticket') }}\n\nwith spine as (\n\n {% if execute %}\n {% set current_ts = dbt.current_timestamp_backcompat() %}\n {% set first_date_query %}\n select min( created_at ) as min_date from {{ source('zendesk', 'ticket') }}\n -- by default take all the data \n where cast(created_at as date) >= {{ dbt.dateadd('year', - var('ticket_field_history_timeframe_years', 50), current_ts ) }}\n {% endset %}\n\n {% set first_date = run_query(first_date_query).columns[0][0]|string %}\n \n {% if target.type == 'postgres' %}\n {% set first_date_adjust = \"cast('\" ~ first_date[0:10] ~ \"' as date)\" %}\n\n {% else %}\n {% set first_date_adjust = \"'\" ~ first_date[0:10] ~ \"'\" %}\n\n {% endif %}\n\n {% else %} {% set first_date_adjust = \"2016-01-01\" %}\n {% endif %}\n\n\n{{\n dbt_utils.date_spine(\n datepart = \"day\", \n start_date = first_date_adjust,\n end_date = dbt.dateadd(\"week\", 1, \"current_date\")\n ) \n}}\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast", "language": "sql", "refs": [], "sources": [["zendesk", "ticket"]], "metrics": [], "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_utils.date_spine", "macro.dbt.current_timestamp_backcompat", "macro.dbt.run_query"], "nodes": ["source.zendesk_source.zendesk.ticket"]}, "compiled_path": "target/compiled/zendesk/models/utils/int_zendesk__calendar_spine.sql", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1663\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast", "extra_ctes_injected": true, "extra_ctes": [], "contract": {"enforced": false, "alias_types": true, "checksum": null}, "access": "protected", "constraints": [], "version": null, "latest_version": null, "deprecation_date": null}}, "sources": {"source.zendesk_source.zendesk.ticket": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket", "fqn": ["zendesk_source", "zendesk", "ticket"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Tickets are the means through which your end users (customers) communicate with agents in Zendesk Support. Tickets can originate from a number of channels, including email, Help Center, chat, phone call, Twitter, Facebook, or the API.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the ticket is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "assignee_id": {"name": "assignee_id", "description": "The agent currently assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_id": {"name": "brand_id", "description": "Enterprise only. The id of the brand this ticket is associated with", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "When this record was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "type": {"name": "type", "description": "The type of this ticket, possible values are problem, incident, question or task", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subject": {"name": "subject", "description": "The value of the subject field for this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "description": {"name": "description", "description": "Read-only first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "priority": {"name": "priority", "description": "The urgency with which the ticket should be addressed, possible values are urgent, high, normal and low", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "status": {"name": "status", "description": "The state of the ticket, possible values are new, open, pending, hold, solved and closed", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "recipient": {"name": "recipient", "description": "The original recipient e-mail address of the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "requester_id": {"name": "requester_id", "description": "The user who requested this ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "submitter_id": {"name": "submitter_id", "description": "The user who submitted the ticket. The submitter always becomes the author of the first comment on the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The organization of the requester", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "The group this ticket is assigned to", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "due_at": {"name": "due_at", "description": "If this is a ticket of type \"task\" it has a due date. Due date format uses ISO 8601 format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_form_id": {"name": "ticket_form_id", "description": "Enterprise only. The id of the ticket form to render for the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "is_public": {"name": "is_public", "description": "Is true if any comments are public, false otherwise", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "When this record last got updated", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_channel": {"name": "via_channel", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_id": {"name": "via_source_from_id", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_from_title": {"name": "via_source_from_title", "description": "The channel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_rel": {"name": "via_source_rel", "description": "The rel the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_address": {"name": "via_source_to_address", "description": "The address of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "via_source_to_name": {"name": "via_source_to_name", "description": "The name of the source the ticket was created from", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\"", "created_at": 1724705298.307675}, "source.zendesk_source.zendesk.brand": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "brand", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.brand", "fqn": ["zendesk_source", "zendesk", "brand"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "brand_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Brands are your customer-facing identities. They might represent multiple products or services, or they might literally be multiple brands owned and represented by your company.\n", "columns": {"id": {"name": "id", "description": "The ID automatically assigned when the brand is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "brand_url": {"name": "brand_url", "description": "The url of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "subdomain": {"name": "subdomain", "description": "The subdomain of the brand", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the brand is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"brand_data\"", "created_at": 1724705298.3078332}, "source.zendesk_source.zendesk.domain_name": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "domain_name", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.domain_name", "fqn": ["zendesk_source", "zendesk", "domain_name"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "domain_name_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Domain names associated with an organization. An organization may have multiple domain names.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_name": {"name": "domain_name", "description": "The name of the domain associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "index": {"name": "index", "description": "Index number of the domain name associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"domain_name_data\"", "created_at": 1724705298.3079429}, "source.zendesk_source.zendesk.group": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "group", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.group", "fqn": ["zendesk_source", "zendesk", "group"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "group_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "When support requests arrive in Zendesk Support, they can be assigned to a Group. Groups serve as the core element of ticket workflow; support agents are organized into Groups and tickets can be assigned to a Group only, or to an assigned agent within a Group. A ticket can never be assigned to an agent without also being assigned to a Group.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating groups", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"group_data\"", "created_at": 1724705298.308075}, "source.zendesk_source.zendesk.organization_tag": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "organization_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization_tag", "fqn": ["zendesk_source", "zendesk", "organization_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "The tags associated with an organization. An organization may have multiple tags.", "columns": {"organization_id": {"name": "organization_id", "description": "Reference to the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"organization_tag_data\"", "created_at": 1724705298.308238}, "source.zendesk_source.zendesk.organization": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "organization", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.organization", "fqn": ["zendesk_source", "zendesk", "organization"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "organization_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Just as agents can be segmented into groups in Zendesk Support, your customers (end-users) can be segmented into organizations. You can manually assign customers to an organization or automatically assign them to an organization by their email address domain. Organizations can be used in business rules to route tickets to groups of agents or to send email notifications.\n", "columns": {"id": {"name": "id", "description": "Automatically assigned when the organization is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "A unique name for the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "details": {"name": "details", "description": "Any details obout the organization, such as the address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "url": {"name": "url", "description": "The API url of this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique external id to associate organizations to an external record", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the organization was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "domain_names": {"name": "domain_names", "description": "An array of domain names associated with this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "notes": {"name": "notes", "description": "Any notes you have about the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "group_id": {"name": "group_id", "description": "New tickets from users in this organization are automatically put in this group", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_tickets": {"name": "shared_tickets", "description": "End users in this organization are able to see each other's tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "shared_comments": {"name": "shared_comments", "description": "End users in this organization are able to see each other's comments on tickets", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tags of the organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_fields": {"name": "organization_fields", "description": "Custom fields for this organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"organization_data\"", "created_at": 1724705298.308347}, "source.zendesk_source.zendesk.ticket_comment": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_comment", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_comment", "fqn": ["zendesk_source", "zendesk", "ticket_comment"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_comment_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket comments represent the conversation between requesters, collaborators, and agents. Comments can be public or private.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the comment is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "body": {"name": "body", "description": "The comment string", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created": {"name": "created", "description": "The time the comment was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "public": {"name": "public", "description": "Boolean field indicating if the comment is public (true), or if it is an internal note (false)", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_id": {"name": "ticket_id", "description": "The ticket id associated with this comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the comment author", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "facebook_comment": {"name": "facebook_comment", "description": "Boolean field indicating if the comment is a facebook comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tweet": {"name": "tweet", "description": "Boolean field indicating if the comment is a twitter tweet", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "voice_comment": {"name": "voice_comment", "description": "Boolean field indicating if the comment is a voice comment", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_comment_data\"", "created_at": 1724705298.308513}, "source.zendesk_source.zendesk.user_tag": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "user_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user_tag", "fqn": ["zendesk_source", "zendesk", "user_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Table containing all tags associated with a user. Only present if your account has user tagging enabled.", "columns": {"user_id": {"name": "user_id", "description": "Reference to the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tag": {"name": "tag", "description": "Tag associated with the user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"user_tag_data\"", "created_at": 1724705298.3086069}, "source.zendesk_source.zendesk.user": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "user", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.user", "fqn": ["zendesk_source", "zendesk", "user"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "user_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Zendesk Support has three types of users, end-users (your customers), agents, and administrators.", "columns": {"id": {"name": "id", "description": "Automatically assigned when the user is created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "email": {"name": "email", "description": "The user's primary email address. *Writeable on create only. On update, a secondary email is added. See Email Address", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The user's name", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "false if the user has been deleted", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the user was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "organization_id": {"name": "organization_id", "description": "The id of the user's organization. If the user has more than one organization memberships, the id of the user's default organization", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "phone": {"name": "phone", "description": "User's phone number.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "role": {"name": "role", "description": "The user's role. Possible values are \"end-user\", \"agent\", or \"admin\"", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "The user's time zone. See Time Zone", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "ticket_restriction": {"name": "ticket_restriction", "description": "Specifies which tickets the user has access to. Possible values are organization, groups, assigned, requested and null", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "suspended": {"name": "suspended", "description": "Boolean representing whether the user has been suspended, meaning that they can no longer sign in and any new support requests you receive from them are sent to the suspended tickets queue.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "external_id": {"name": "external_id", "description": "A unique identifier from another system. The API treats the id as case insensitive.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time the user was last updated.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "last_login_at": {"name": "last_login_at", "description": "Last time the user signed in to Zendesk Support or made an API request using an API token or basic authentication.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "locale": {"name": "locale", "description": "The user's locale. A BCP-47 compliant tag for the locale. If both \"locale\" and \"locale_id\" are present on create or update, \"locale_id\" is ignored and only \"locale\" is used.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"user_data\"", "created_at": 1724705298.308701}, "source.zendesk_source.zendesk.schedule": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule", "fqn": ["zendesk_source", "zendesk", "schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The support schedules created with different business hours and holidays.", "columns": {"id": {"name": "id", "description": "ID automatically assigned to the schedule upon creation", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "Time the schedule was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_time": {"name": "start_time", "description": "Start time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "end_time": {"name": "end_time", "description": "End time of the schedule, in the schedule's time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Timezone in which the schedule operates.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"schedule_data\"", "created_at": 1724705298.308789}, "source.zendesk_source.zendesk.ticket_schedule": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_schedule", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_schedule", "fqn": ["zendesk_source", "zendesk", "ticket_schedule"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_schedule_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "The schedules applied to tickets through a trigger.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket assigned to the schedule", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the schedule was assigned to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule applied to the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_schedule_data\"", "created_at": 1724705298.308866}, "source.zendesk_source.zendesk.ticket_form_history": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_form_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_form_history", "fqn": ["zendesk_source", "zendesk", "ticket_form_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_form_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": {"warn_after": {"count": 72, "period": "hour"}, "error_after": {"count": 168, "period": "hour"}, "filter": null}, "external": null, "description": "Ticket forms allow an admin to define a subset of ticket fields for display to both agents and end users.", "columns": {"id": {"name": "id", "description": "Automatically assigned when creating ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "created_at": {"name": "created_at", "description": "The time the ticket form was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated_at": {"name": "updated_at", "description": "The time of the last update of the ticket form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "display_name": {"name": "display_name", "description": "The name of the form that is displayed to an end user", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "active": {"name": "active", "description": "If the form is set as active", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "The name of the form", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_form_history_data\"", "created_at": 1724705298.308958}, "source.zendesk_source.zendesk.ticket_tag": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_tag", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_tag", "fqn": ["zendesk_source", "zendesk", "ticket_tag"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_tag_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Tags are words, or combinations of words, you can use to add more context to tickets. The table lists all tags currently associated with a ticket.\n", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the tag", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "tags": {"name": "tags", "description": "The tag, or word(s), associated with the ticket", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_tag_data\"", "created_at": 1724705298.309033}, "source.zendesk_source.zendesk.ticket_field_history": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "ticket_field_history", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.ticket_field_history", "fqn": ["zendesk_source", "zendesk", "ticket_field_history"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "ticket_field_history_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "All fields and field values associated with tickets.", "columns": {"ticket_id": {"name": "ticket_id", "description": "The ID of the ticket associated with the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "field_name": {"name": "field_name", "description": "The name of the ticket field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "updated": {"name": "updated", "description": "The time the ticket field value was created", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "value": {"name": "value", "description": "The value of the field", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "user_id": {"name": "user_id", "description": "The id of the user who made the update", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"ticket_field_history_data\"", "created_at": 1724705298.309112}, "source.zendesk_source.zendesk.daylight_time": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "daylight_time", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.daylight_time", "fqn": ["zendesk_source", "zendesk", "daylight_time"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "daylight_time_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Appropriate offsets (from UTC) for timezones that engage or have engaged with Daylight Savings at some point since 1970.\n", "columns": {"daylight_end_utc": {"name": "daylight_end_utc", "description": "UTC timestamp of when Daylight Time ended in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_offset": {"name": "daylight_offset", "description": "Number of **hours** added during Daylight Savings Time.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "daylight_start_utc": {"name": "daylight_start_utc", "description": "UTC timestamp of when Daylight Time began in this year.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "time_zone": {"name": "time_zone", "description": "Name of the timezone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "year": {"name": "year", "description": "Year in which daylight savings occurred.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"daylight_time_data\"", "created_at": 1724705298.309192}, "source.zendesk_source.zendesk.time_zone": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "time_zone", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.time_zone", "fqn": ["zendesk_source", "zendesk", "time_zone"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "time_zone_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Offsets (from UTC) for each timezone.", "columns": {"time_zone": {"name": "time_zone", "description": "Name of the time zone.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "standard_offset": {"name": "standard_offset", "description": "Standard offset of the timezone (non-daylight savings hours). In `+/-hh:mm` format.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"time_zone_data\"", "created_at": 1724705298.309267}, "source.zendesk_source.zendesk.schedule_holiday": {"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "schedule_holiday", "resource_type": "source", "package_name": "zendesk_source", "path": "models/src_zendesk.yml", "original_file_path": "models/src_zendesk.yml", "unique_id": "source.zendesk_source.zendesk.schedule_holiday", "fqn": ["zendesk_source", "zendesk", "schedule_holiday"], "source_name": "zendesk", "source_description": "", "loader": "fivetran", "identifier": "schedule_holiday_data", "quoting": {"database": null, "schema": null, "identifier": null, "column": null}, "loaded_at_field": "_fivetran_synced", "freshness": null, "external": null, "description": "Information about holidays for each specified schedule.", "columns": {"end_date": {"name": "end_date", "description": "ISO 8601 representation of the holiday end date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "id": {"name": "id", "description": "The ID of the scheduled holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "name": {"name": "name", "description": "Name of the holiday.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "schedule_id": {"name": "schedule_id", "description": "The ID of the schedule.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}, "start_date": {"name": "start_date", "description": "ISO 8601 representation of the holiday start date.", "meta": {}, "data_type": null, "constraints": [], "quote": null, "tags": []}}, "meta": {}, "source_meta": {}, "tags": [], "config": {"enabled": true}, "patch_path": null, "unrendered_config": {"enabled": true}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"schedule_holiday_data\"", "created_at": 1724705298.3093581}}, "macros": {"macro.zendesk_integration_tests.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "zendesk_integration_tests", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.zendesk_integration_tests.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.523818, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp": {"name": "postgres__current_timestamp", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp", "macro_sql": "{% macro postgres__current_timestamp() -%}\n now()\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5240939, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_string_as_time": {"name": "postgres__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_string_as_time", "macro_sql": "{% macro postgres__snapshot_string_as_time(timestamp) -%}\n {%- set result = \"'\" ~ timestamp ~ \"'::timestamp without time zone\" -%}\n {{ return(result) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.524267, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_get_time": {"name": "postgres__snapshot_get_time", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_get_time", "macro_sql": "{% macro postgres__snapshot_get_time() -%}\n {{ current_timestamp() }}::timestamp without time zone\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.524363, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_backcompat": {"name": "postgres__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_backcompat", "macro_sql": "{% macro postgres__current_timestamp_backcompat() %}\n current_timestamp::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.524461, "supported_languages": null}, "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat": {"name": "postgres__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/timestamps.sql", "original_file_path": "macros/timestamps.sql", "unique_id": "macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro postgres__current_timestamp_in_utc_backcompat() %}\n (current_timestamp at time zone 'utc')::{{ type_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.524555, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog_relations": {"name": "postgres__get_catalog_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog_relations", "macro_sql": "{% macro postgres__get_catalog_relations(information_schema, relations) -%}\n {%- call statement('catalog', fetch_result=True) -%}\n\n {#\n If the user has multiple databases set and the first one is wrong, this will fail.\n But we won't fail in the case where there are multiple quoting-difference-only dbs, which is better.\n #}\n {% set database = information_schema.database %}\n {{ adapter.verify_database(database) }}\n\n select\n '{{ database }}' as table_database,\n sch.nspname as table_schema,\n tbl.relname as table_name,\n case tbl.relkind\n when 'v' then 'VIEW'\n when 'm' then 'MATERIALIZED VIEW'\n else 'BASE TABLE'\n end as table_type,\n tbl_desc.description as table_comment,\n col.attname as column_name,\n col.attnum as column_index,\n pg_catalog.format_type(col.atttypid, col.atttypmod) as column_type,\n col_desc.description as column_comment,\n pg_get_userbyid(tbl.relowner) as table_owner\n\n from pg_catalog.pg_namespace sch\n join pg_catalog.pg_class tbl on tbl.relnamespace = sch.oid\n join pg_catalog.pg_attribute col on col.attrelid = tbl.oid\n left outer join pg_catalog.pg_description tbl_desc on (tbl_desc.objoid = tbl.oid and tbl_desc.objsubid = 0)\n left outer join pg_catalog.pg_description col_desc on (col_desc.objoid = tbl.oid and col_desc.objsubid = col.attnum)\n where (\n {%- for relation in relations -%}\n {%- if relation.identifier -%}\n (upper(sch.nspname) = upper('{{ relation.schema }}') and\n upper(tbl.relname) = upper('{{ relation.identifier }}'))\n {%- else-%}\n upper(sch.nspname) = upper('{{ relation.schema }}')\n {%- endif -%}\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n )\n and not pg_is_other_temp_schema(sch.oid) -- not a temporary schema belonging to another session\n and tbl.relpersistence in ('p', 'u') -- [p]ermanent table or [u]nlogged table. Exclude [t]emporary tables\n and tbl.relkind in ('r', 'v', 'f', 'p', 'm') -- o[r]dinary table, [v]iew, [f]oreign table, [p]artitioned table, [m]aterialized view. Other values are [i]ndex, [S]equence, [c]omposite type, [t]OAST table\n and col.attnum > 0 -- negative numbers are used for system columns such as oid\n and not col.attisdropped -- column as not been dropped\n\n order by\n sch.nspname,\n tbl.relname,\n col.attnum\n\n {%- endcall -%}\n\n {{ return(load_result('catalog').table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.525894, "supported_languages": null}, "macro.dbt_postgres.postgres__get_catalog": {"name": "postgres__get_catalog", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/catalog.sql", "original_file_path": "macros/catalog.sql", "unique_id": "macro.dbt_postgres.postgres__get_catalog", "macro_sql": "{% macro postgres__get_catalog(information_schema, schemas) -%}\n {%- set relations = [] -%}\n {%- for schema in schemas -%}\n {%- set dummy = relations.append({'schema': schema}) -%}\n {%- endfor -%}\n {{ return(postgres__get_catalog_relations(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.526217, "supported_languages": null}, "macro.dbt_postgres.postgres__get_relations": {"name": "postgres__get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres__get_relations", "macro_sql": "{% macro postgres__get_relations() -%}\n\n {#\n -- in pg_depend, objid is the dependent, refobjid is the referenced object\n -- > a pg_depend entry indicates that the referenced object cannot be\n -- > dropped without also dropping the dependent object.\n #}\n\n {%- call statement('relations', fetch_result=True) -%}\n with relation as (\n select\n pg_rewrite.ev_class as class,\n pg_rewrite.oid as id\n from pg_rewrite\n ),\n class as (\n select\n oid as id,\n relname as name,\n relnamespace as schema,\n relkind as kind\n from pg_class\n ),\n dependency as (\n select distinct\n pg_depend.objid as id,\n pg_depend.refobjid as ref\n from pg_depend\n ),\n schema as (\n select\n pg_namespace.oid as id,\n pg_namespace.nspname as name\n from pg_namespace\n where nspname != 'information_schema' and nspname not like 'pg\\_%'\n ),\n referenced as (\n select\n relation.id AS id,\n referenced_class.name ,\n referenced_class.schema ,\n referenced_class.kind\n from relation\n join class as referenced_class on relation.class=referenced_class.id\n where referenced_class.kind in ('r', 'v', 'm')\n ),\n relationships as (\n select\n referenced.name as referenced_name,\n referenced.schema as referenced_schema_id,\n dependent_class.name as dependent_name,\n dependent_class.schema as dependent_schema_id,\n referenced.kind as kind\n from referenced\n join dependency on referenced.id=dependency.id\n join class as dependent_class on dependency.ref=dependent_class.id\n where\n (referenced.name != dependent_class.name or\n referenced.schema != dependent_class.schema)\n )\n\n select\n referenced_schema.name as referenced_schema,\n relationships.referenced_name as referenced_name,\n dependent_schema.name as dependent_schema,\n relationships.dependent_name as dependent_name\n from relationships\n join schema as dependent_schema on relationships.dependent_schema_id=dependent_schema.id\n join schema as referenced_schema on relationships.referenced_schema_id=referenced_schema.id\n group by referenced_schema, referenced_name, dependent_schema, dependent_name\n order by referenced_schema, referenced_name, dependent_schema, dependent_name;\n\n {%- endcall -%}\n\n {{ return(load_result('relations').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5267992, "supported_languages": null}, "macro.dbt_postgres.postgres_get_relations": {"name": "postgres_get_relations", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations.sql", "original_file_path": "macros/relations.sql", "unique_id": "macro.dbt_postgres.postgres_get_relations", "macro_sql": "{% macro postgres_get_relations() %}\n {{ return(postgres__get_relations()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.526913, "supported_languages": null}, "macro.dbt_postgres.postgres__create_table_as": {"name": "postgres__create_table_as", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_table_as", "macro_sql": "{% macro postgres__create_table_as(temporary, relation, sql) -%}\n {%- set unlogged = config.get('unlogged', default=false) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary -%}\n temporary\n {%- elif unlogged -%}\n unlogged\n {%- endif %} table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {% endif -%}\n {% if contract_config.enforced and (not temporary) -%}\n {{ get_table_columns_and_constraints() }} ;\n insert into {{ relation }} (\n {{ adapter.dispatch('get_column_names', 'dbt')() }}\n )\n {%- set sql = get_select_subquery(sql) %}\n {% else %}\n as\n {% endif %}\n (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.default__get_column_names", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.535181, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_index_sql": {"name": "postgres__get_create_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_index_sql", "macro_sql": "{% macro postgres__get_create_index_sql(relation, index_dict) -%}\n {%- set index_config = adapter.parse_index(index_dict) -%}\n {%- set comma_separated_columns = \", \".join(index_config.columns) -%}\n {%- set index_name = index_config.render(relation) -%}\n\n create {% if index_config.unique -%}\n unique\n {%- endif %} index if not exists\n \"{{ index_name }}\"\n on {{ relation }} {% if index_config.type -%}\n using {{ index_config.type }}\n {%- endif %}\n ({{ comma_separated_columns }});\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.535729, "supported_languages": null}, "macro.dbt_postgres.postgres__create_schema": {"name": "postgres__create_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__create_schema", "macro_sql": "{% macro postgres__create_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier().include(database=False) }}\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.536016, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_schema": {"name": "postgres__drop_schema", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__drop_schema", "macro_sql": "{% macro postgres__drop_schema(relation) -%}\n {% if relation.database -%}\n {{ adapter.verify_database(relation.database) }}\n {%- endif -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier().include(database=False) }} cascade\n {%- endcall -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.536296, "supported_languages": null}, "macro.dbt_postgres.postgres__get_columns_in_relation": {"name": "postgres__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_columns_in_relation", "macro_sql": "{% macro postgres__get_columns_in_relation(relation) -%}\n {% call statement('get_columns_in_relation', fetch_result=True) %}\n select\n column_name,\n data_type,\n character_maximum_length,\n numeric_precision,\n numeric_scale\n\n from {{ relation.information_schema('columns') }}\n where table_name = '{{ relation.identifier }}'\n {% if relation.schema %}\n and table_schema = '{{ relation.schema }}'\n {% endif %}\n order by ordinal_position\n\n {% endcall %}\n {% set table = load_result('get_columns_in_relation').table %}\n {{ return(sql_convert_columns_in_relation(table)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.sql_convert_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.53673, "supported_languages": null}, "macro.dbt_postgres.postgres__list_relations_without_caching": {"name": "postgres__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_relations_without_caching", "macro_sql": "{% macro postgres__list_relations_without_caching(schema_relation) %}\n {% call statement('list_relations_without_caching', fetch_result=True) -%}\n select\n '{{ schema_relation.database }}' as database,\n tablename as name,\n schemaname as schema,\n 'table' as type\n from pg_tables\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n viewname as name,\n schemaname as schema,\n 'view' as type\n from pg_views\n where schemaname ilike '{{ schema_relation.schema }}'\n union all\n select\n '{{ schema_relation.database }}' as database,\n matviewname as name,\n schemaname as schema,\n 'materialized_view' as type\n from pg_matviews\n where schemaname ilike '{{ schema_relation.schema }}'\n {% endcall %}\n {{ return(load_result('list_relations_without_caching').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.537136, "supported_languages": null}, "macro.dbt_postgres.postgres__information_schema_name": {"name": "postgres__information_schema_name", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__information_schema_name", "macro_sql": "{% macro postgres__information_schema_name(database) -%}\n {% if database_name -%}\n {{ adapter.verify_database(database_name) }}\n {%- endif -%}\n information_schema\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.537303, "supported_languages": null}, "macro.dbt_postgres.postgres__list_schemas": {"name": "postgres__list_schemas", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__list_schemas", "macro_sql": "{% macro postgres__list_schemas(database) %}\n {% if database -%}\n {{ adapter.verify_database(database) }}\n {%- endif -%}\n {% call statement('list_schemas', fetch_result=True, auto_begin=False) %}\n select distinct nspname from pg_namespace\n {% endcall %}\n {{ return(load_result('list_schemas').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.537612, "supported_languages": null}, "macro.dbt_postgres.postgres__check_schema_exists": {"name": "postgres__check_schema_exists", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__check_schema_exists", "macro_sql": "{% macro postgres__check_schema_exists(information_schema, schema) -%}\n {% if information_schema.database -%}\n {{ adapter.verify_database(information_schema.database) }}\n {%- endif -%}\n {% call statement('check_schema_exists', fetch_result=True, auto_begin=False) %}\n select count(*) from pg_namespace where nspname = '{{ schema }}'\n {% endcall %}\n {{ return(load_result('check_schema_exists').table) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.53811, "supported_languages": null}, "macro.dbt_postgres.postgres__make_relation_with_suffix": {"name": "postgres__make_relation_with_suffix", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_relation_with_suffix", "macro_sql": "{% macro postgres__make_relation_with_suffix(base_relation, suffix, dstring) %}\n {% if dstring %}\n {% set dt = modules.datetime.datetime.now() %}\n {% set dtstring = dt.strftime(\"%H%M%S%f\") %}\n {% set suffix = suffix ~ dtstring %}\n {% endif %}\n {% set suffix_length = suffix|length %}\n {% set relation_max_name_length = base_relation.relation_max_name_length() %}\n {% if suffix_length > relation_max_name_length %}\n {% do exceptions.raise_compiler_error('Relation suffix is too long (' ~ suffix_length ~ ' characters). Maximum length is ' ~ relation_max_name_length ~ ' characters.') %}\n {% endif %}\n {% set identifier = base_relation.identifier[:relation_max_name_length - suffix_length] ~ suffix %}\n\n {{ return(base_relation.incorporate(path={\"identifier\": identifier })) }}\n\n {% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.538953, "supported_languages": null}, "macro.dbt_postgres.postgres__make_intermediate_relation": {"name": "postgres__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_intermediate_relation", "macro_sql": "{% macro postgres__make_intermediate_relation(base_relation, suffix) %}\n {{ return(postgres__make_relation_with_suffix(base_relation, suffix, dstring=False)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.539156, "supported_languages": null}, "macro.dbt_postgres.postgres__make_temp_relation": {"name": "postgres__make_temp_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_temp_relation", "macro_sql": "{% macro postgres__make_temp_relation(base_relation, suffix) %}\n {% set temp_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=True) %}\n {{ return(temp_relation.incorporate(path={\"schema\": none,\n \"database\": none})) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5394619, "supported_languages": null}, "macro.dbt_postgres.postgres__make_backup_relation": {"name": "postgres__make_backup_relation", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__make_backup_relation", "macro_sql": "{% macro postgres__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {% set backup_relation = postgres__make_relation_with_suffix(base_relation, suffix, dstring=False) %}\n {{ return(backup_relation.incorporate(type=backup_relation_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_relation_with_suffix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.539724, "supported_languages": null}, "macro.dbt_postgres.postgres_escape_comment": {"name": "postgres_escape_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres_escape_comment", "macro_sql": "{% macro postgres_escape_comment(comment) -%}\n {% if comment is not string %}\n {% do exceptions.raise_compiler_error('cannot escape a non-string: ' ~ comment) %}\n {% endif %}\n {%- set magic = '$dbt_comment_literal_block$' -%}\n {%- if magic in comment -%}\n {%- do exceptions.raise_compiler_error('The string ' ~ magic ~ ' is not allowed in comments.') -%}\n {%- endif -%}\n {{ magic }}{{ comment }}{{ magic }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.540117, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_relation_comment": {"name": "postgres__alter_relation_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_relation_comment", "macro_sql": "{% macro postgres__alter_relation_comment(relation, comment) %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on {{ relation.type }} {{ relation }} is {{ escaped_comment }};\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5403259, "supported_languages": null}, "macro.dbt_postgres.postgres__alter_column_comment": {"name": "postgres__alter_column_comment", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__alter_column_comment", "macro_sql": "{% macro postgres__alter_column_comment(relation, column_dict) %}\n {% set existing_columns = adapter.get_columns_in_relation(relation) | map(attribute=\"name\") | list %}\n {% for column_name in column_dict if (column_name in existing_columns) %}\n {% set comment = column_dict[column_name]['description'] %}\n {% set escaped_comment = postgres_escape_comment(comment) %}\n comment on column {{ relation }}.{{ adapter.quote(column_name) if column_dict[column_name]['quote'] else column_name }} is {{ escaped_comment }};\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres_escape_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5408719, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_grant_sql": {"name": "postgres__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_grant_sql", "macro_sql": "\n\n{%- macro postgres__get_show_grant_sql(relation) -%}\n select grantee, privilege_type\n from {{ relation.information_schema('role_table_grants') }}\n where grantor = current_role\n and grantee != current_role\n and table_schema = '{{ relation.schema }}'\n and table_name = '{{ relation.identifier }}'\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.541055, "supported_languages": null}, "macro.dbt_postgres.postgres__copy_grants": {"name": "postgres__copy_grants", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__copy_grants", "macro_sql": "{% macro postgres__copy_grants() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.541164, "supported_languages": null}, "macro.dbt_postgres.postgres__get_show_indexes_sql": {"name": "postgres__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_show_indexes_sql", "macro_sql": "{% macro postgres__get_show_indexes_sql(relation) %}\n select\n i.relname as name,\n m.amname as method,\n ix.indisunique as \"unique\",\n array_to_string(array_agg(a.attname), ',') as column_names\n from pg_index ix\n join pg_class i\n on i.oid = ix.indexrelid\n join pg_am m\n on m.oid=i.relam\n join pg_class t\n on t.oid = ix.indrelid\n join pg_namespace n\n on n.oid = t.relnamespace\n join pg_attribute a\n on a.attrelid = t.oid\n and a.attnum = ANY(ix.indkey)\n where t.relname = '{{ relation.identifier }}'\n and n.nspname = '{{ relation.schema }}'\n and t.relkind in ('r', 'm')\n group by 1, 2, 3\n order by 1, 2, 3\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.541331, "supported_languages": null}, "macro.dbt_postgres.postgres__get_drop_index_sql": {"name": "postgres__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/adapters.sql", "original_file_path": "macros/adapters.sql", "unique_id": "macro.dbt_postgres.postgres__get_drop_index_sql", "macro_sql": "\n\n\n{%- macro postgres__get_drop_index_sql(relation, index_name) -%}\n drop index if exists \"{{ relation.schema }}\".\"{{ index_name }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5414588, "supported_languages": null}, "macro.dbt_postgres.postgres__get_incremental_default_sql": {"name": "postgres__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/incremental_strategies.sql", "original_file_path": "macros/materializations/incremental_strategies.sql", "unique_id": "macro.dbt_postgres.postgres__get_incremental_default_sql", "macro_sql": "{% macro postgres__get_incremental_default_sql(arg_dict) %}\n\n {% if arg_dict[\"unique_key\"] %}\n {% do return(get_incremental_delete_insert_sql(arg_dict)) %}\n {% else %}\n {% do return(get_incremental_append_sql(arg_dict)) %}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_delete_insert_sql", "macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5418408, "supported_languages": null}, "macro.dbt_postgres.postgres__snapshot_merge_sql": {"name": "postgres__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/materializations/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshot_merge.sql", "unique_id": "macro.dbt_postgres.postgres__snapshot_merge_sql", "macro_sql": "{% macro postgres__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n update {{ target }}\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_scd_id::text = {{ target }}.dbt_scd_id::text\n and DBT_INTERNAL_SOURCE.dbt_change_type::text in ('update'::text, 'delete'::text)\n and {{ target }}.dbt_valid_to is null;\n\n insert into {{ target }} ({{ insert_cols_csv }})\n select {% for column in insert_cols -%}\n DBT_INTERNAL_SOURCE.{{ column }} {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n from {{ source }} as DBT_INTERNAL_SOURCE\n where DBT_INTERNAL_SOURCE.dbt_change_type::text = 'insert'::text;\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.542528, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_materialized_view": {"name": "postgres__drop_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_materialized_view", "macro_sql": "{% macro postgres__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5426612, "supported_languages": null}, "macro.dbt_postgres.postgres__describe_materialized_view": {"name": "postgres__describe_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/describe.sql", "original_file_path": "macros/relations/materialized_view/describe.sql", "unique_id": "macro.dbt_postgres.postgres__describe_materialized_view", "macro_sql": "{% macro postgres__describe_materialized_view(relation) %}\n -- for now just get the indexes, we don't need the name or the query yet\n {% set _indexes = run_query(get_show_indexes_sql(relation)) %}\n {% do return({'indexes': _indexes}) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.542934, "supported_languages": null}, "macro.dbt_postgres.postgres__refresh_materialized_view": {"name": "postgres__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt_postgres.postgres__refresh_materialized_view", "macro_sql": "{% macro postgres__refresh_materialized_view(relation) %}\n refresh materialized view {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.543061, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_materialized_view_sql": {"name": "postgres__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_materialized_view_sql", "macro_sql": "{% macro postgres__get_rename_materialized_view_sql(relation, new_name) %}\n alter materialized view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.543318, "supported_languages": null}, "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql": {"name": "postgres__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n\n -- apply a full refresh immediately if needed\n {% if configuration_changes.requires_full_refresh %}\n\n {{ get_replace_sql(existing_relation, relation, sql) }}\n\n -- otherwise apply individual changes as needed\n {% else %}\n\n {{ postgres__update_indexes_on_materialized_view(relation, configuration_changes.indexes) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_sql", "macro.dbt_postgres.postgres__update_indexes_on_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5440938, "supported_languages": null}, "macro.dbt_postgres.postgres__update_indexes_on_materialized_view": {"name": "postgres__update_indexes_on_materialized_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__update_indexes_on_materialized_view", "macro_sql": "\n\n\n{%- macro postgres__update_indexes_on_materialized_view(relation, index_changes) -%}\n {{- log(\"Applying UPDATE INDEXES to: \" ~ relation) -}}\n\n {%- for _index_change in index_changes -%}\n {%- set _index = _index_change.context -%}\n\n {%- if _index_change.action == \"drop\" -%}\n\n {{ postgres__get_drop_index_sql(relation, _index.name) }};\n\n {%- elif _index_change.action == \"create\" -%}\n\n {{ postgres__get_create_index_sql(relation, _index.as_node_config) }}\n\n {%- endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql", "macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.544535, "supported_languages": null}, "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes": {"name": "postgres__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt_postgres.postgres__get_materialized_view_configuration_changes", "macro_sql": "{% macro postgres__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {% set _existing_materialized_view = postgres__describe_materialized_view(existing_relation) %}\n {% set _configuration_changes = existing_relation.get_materialized_view_config_change_collection(_existing_materialized_view, new_config) %}\n {% do return(_configuration_changes) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__describe_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.544801, "supported_languages": null}, "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql": {"name": "postgres__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt_postgres.postgres__get_create_materialized_view_as_sql", "macro_sql": "{% macro postgres__get_create_materialized_view_as_sql(relation, sql) %}\n create materialized view if not exists {{ relation }} as {{ sql }};\n\n {% for _index_dict in config.get('indexes', []) -%}\n {{- get_create_index_sql(relation, _index_dict) -}}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5451362, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_table": {"name": "postgres__drop_table", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_table", "macro_sql": "{% macro postgres__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.545265, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_table_sql": {"name": "postgres__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_table_sql", "macro_sql": "{% macro postgres__get_replace_table_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace table {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.545894, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_table_sql": {"name": "postgres__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_table_sql", "macro_sql": "{% macro postgres__get_rename_table_sql(relation, new_name) %}\n alter table {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5460532, "supported_languages": null}, "macro.dbt_postgres.postgres__drop_view": {"name": "postgres__drop_view", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt_postgres.postgres__drop_view", "macro_sql": "{% macro postgres__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.546172, "supported_languages": null}, "macro.dbt_postgres.postgres__get_replace_view_sql": {"name": "postgres__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt_postgres.postgres__get_replace_view_sql", "macro_sql": "{% macro postgres__get_replace_view_sql(relation, sql) -%}\n\n {%- set sql_header = config.get('sql_header', none) -%}\n {{ sql_header if sql_header is not none }}\n\n create or replace view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5466812, "supported_languages": null}, "macro.dbt_postgres.postgres__get_rename_view_sql": {"name": "postgres__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt_postgres.postgres__get_rename_view_sql", "macro_sql": "{% macro postgres__get_rename_view_sql(relation, new_name) %}\n alter view {{ relation }} rename to {{ new_name }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.54685, "supported_languages": null}, "macro.dbt_postgres.postgres__dateadd": {"name": "postgres__dateadd", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt_postgres.postgres__dateadd", "macro_sql": "{% macro postgres__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {{ from_date_or_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.547059, "supported_languages": null}, "macro.dbt_postgres.postgres__listagg": {"name": "postgres__listagg", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt_postgres.postgres__listagg", "macro_sql": "{% macro postgres__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n (array_agg(\n {{ measure }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n ))[1:{{ limit_num }}],\n {{ delimiter_text }}\n )\n {%- else %}\n string_agg(\n {{ measure }},\n {{ delimiter_text }}\n {% if order_by_clause -%}\n {{ order_by_clause }}\n {%- endif %}\n )\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.547626, "supported_languages": null}, "macro.dbt_postgres.postgres__datediff": {"name": "postgres__datediff", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt_postgres.postgres__datediff", "macro_sql": "{% macro postgres__datediff(first_date, second_date, datepart) -%}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.550551, "supported_languages": null}, "macro.dbt_postgres.postgres__any_value": {"name": "postgres__any_value", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt_postgres.postgres__any_value", "macro_sql": "{% macro postgres__any_value(expression) -%}\n\n min({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.550699, "supported_languages": null}, "macro.dbt_postgres.postgres__last_day": {"name": "postgres__last_day", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt_postgres.postgres__last_day", "macro_sql": "{% macro postgres__last_day(date, datepart) -%}\n\n {%- if datepart == 'quarter' -%}\n -- postgres dateadd does not support quarter interval.\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd('month', '3', dbt.date_trunc(datepart, date))\n )}}\n as date)\n {%- else -%}\n {{dbt.default_last_day(date, datepart)}}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc", "macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.551172, "supported_languages": null}, "macro.dbt_postgres.postgres__split_part": {"name": "postgres__split_part", "resource_type": "macro", "package_name": "dbt_postgres", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt_postgres.postgres__split_part", "macro_sql": "{% macro postgres__split_part(string_text, delimiter_text, part_number) %}\n\n {% if part_number >= 0 %}\n {{ dbt.default__split_part(string_text, delimiter_text, part_number) }}\n {% else %}\n {{ dbt._split_part_negative(string_text, delimiter_text, part_number) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__split_part", "macro.dbt._split_part_negative"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.551542, "supported_languages": null}, "macro.dbt.run_hooks": {"name": "run_hooks", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.run_hooks", "macro_sql": "{% macro run_hooks(hooks, inside_transaction=True) %}\n {% for hook in hooks | selectattr('transaction', 'equalto', inside_transaction) %}\n {% if not inside_transaction and loop.first %}\n {% call statement(auto_begin=inside_transaction) %}\n commit;\n {% endcall %}\n {% endif %}\n {% set rendered = render(hook.get('sql')) | trim %}\n {% if (rendered | length) > 0 %}\n {% call statement(auto_begin=inside_transaction) %}\n {{ rendered }}\n {% endcall %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.552557, "supported_languages": null}, "macro.dbt.make_hook_config": {"name": "make_hook_config", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.make_hook_config", "macro_sql": "{% macro make_hook_config(sql, inside_transaction) %}\n {{ tojson({\"sql\": sql, \"transaction\": inside_transaction}) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.552752, "supported_languages": null}, "macro.dbt.before_begin": {"name": "before_begin", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.before_begin", "macro_sql": "{% macro before_begin(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.552885, "supported_languages": null}, "macro.dbt.in_transaction": {"name": "in_transaction", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.in_transaction", "macro_sql": "{% macro in_transaction(sql) %}\n {{ make_hook_config(sql, inside_transaction=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.553014, "supported_languages": null}, "macro.dbt.after_commit": {"name": "after_commit", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/hooks.sql", "original_file_path": "macros/materializations/hooks.sql", "unique_id": "macro.dbt.after_commit", "macro_sql": "{% macro after_commit(sql) %}\n {{ make_hook_config(sql, inside_transaction=False) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_hook_config"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5531452, "supported_languages": null}, "macro.dbt.set_sql_header": {"name": "set_sql_header", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.set_sql_header", "macro_sql": "{% macro set_sql_header(config) -%}\n {{ config.set('sql_header', caller()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.553487, "supported_languages": null}, "macro.dbt.should_full_refresh": {"name": "should_full_refresh", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_full_refresh", "macro_sql": "{% macro should_full_refresh() %}\n {% set config_full_refresh = config.get('full_refresh') %}\n {% if config_full_refresh is none %}\n {% set config_full_refresh = flags.FULL_REFRESH %}\n {% endif %}\n {% do return(config_full_refresh) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5537732, "supported_languages": null}, "macro.dbt.should_store_failures": {"name": "should_store_failures", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/configs.sql", "original_file_path": "macros/materializations/configs.sql", "unique_id": "macro.dbt.should_store_failures", "macro_sql": "{% macro should_store_failures() %}\n {% set config_store_failures = config.get('store_failures') %}\n {% if config_store_failures is none %}\n {% set config_store_failures = flags.STORE_FAILURES %}\n {% endif %}\n {% do return(config_store_failures) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.554057, "supported_languages": null}, "macro.dbt.snapshot_merge_sql": {"name": "snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.snapshot_merge_sql", "macro_sql": "{% macro snapshot_merge_sql(target, source, insert_cols) -%}\n {{ adapter.dispatch('snapshot_merge_sql', 'dbt')(target, source, insert_cols) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.554442, "supported_languages": null}, "macro.dbt.default__snapshot_merge_sql": {"name": "default__snapshot_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot_merge.sql", "original_file_path": "macros/materializations/snapshots/snapshot_merge.sql", "unique_id": "macro.dbt.default__snapshot_merge_sql", "macro_sql": "{% macro default__snapshot_merge_sql(target, source, insert_cols) -%}\n {%- set insert_cols_csv = insert_cols | join(', ') -%}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on DBT_INTERNAL_SOURCE.dbt_scd_id = DBT_INTERNAL_DEST.dbt_scd_id\n\n when matched\n and DBT_INTERNAL_DEST.dbt_valid_to is null\n and DBT_INTERNAL_SOURCE.dbt_change_type in ('update', 'delete')\n then update\n set dbt_valid_to = DBT_INTERNAL_SOURCE.dbt_valid_to\n\n when not matched\n and DBT_INTERNAL_SOURCE.dbt_change_type = 'insert'\n then insert ({{ insert_cols_csv }})\n values ({{ insert_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.554688, "supported_languages": null}, "macro.dbt.strategy_dispatch": {"name": "strategy_dispatch", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.strategy_dispatch", "macro_sql": "{% macro strategy_dispatch(name) -%}\n{% set original_name = name %}\n {% if '.' in name %}\n {% set package_name, name = name.split(\".\", 1) %}\n {% else %}\n {% set package_name = none %}\n {% endif %}\n\n {% if package_name is none %}\n {% set package_context = context %}\n {% elif package_name in context %}\n {% set package_context = context[package_name] %}\n {% else %}\n {% set error_msg %}\n Could not find package '{{package_name}}', called with '{{original_name}}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n\n {%- set search_name = 'snapshot_' ~ name ~ '_strategy' -%}\n\n {% if search_name not in package_context %}\n {% set error_msg %}\n The specified strategy macro '{{name}}' was not found in package '{{ package_name }}'\n {% endset %}\n {{ exceptions.raise_compiler_error(error_msg | trim) }}\n {% endif %}\n {{ return(package_context[search_name]) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.558047, "supported_languages": null}, "macro.dbt.snapshot_hash_arguments": {"name": "snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_hash_arguments", "macro_sql": "{% macro snapshot_hash_arguments(args) -%}\n {{ adapter.dispatch('snapshot_hash_arguments', 'dbt')(args) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.558212, "supported_languages": null}, "macro.dbt.default__snapshot_hash_arguments": {"name": "default__snapshot_hash_arguments", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_hash_arguments", "macro_sql": "{% macro default__snapshot_hash_arguments(args) -%}\n md5({%- for arg in args -%}\n coalesce(cast({{ arg }} as varchar ), '')\n {% if not loop.last %} || '|' || {% endif %}\n {%- endfor -%})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5584168, "supported_languages": null}, "macro.dbt.snapshot_timestamp_strategy": {"name": "snapshot_timestamp_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_timestamp_strategy", "macro_sql": "{% macro snapshot_timestamp_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set primary_key = config['unique_key'] %}\n {% set updated_at = config['updated_at'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n\n {#/*\n The snapshot relation might not have an {{ updated_at }} value if the\n snapshot strategy is changed from `check` to `timestamp`. We\n should use a dbt-created column for the comparison in the snapshot\n table instead of assuming that the user-supplied {{ updated_at }}\n will be present in the historical data.\n\n See https://github.com/dbt-labs/dbt-core/issues/2350\n */ #}\n {% set row_changed_expr -%}\n ({{ snapshotted_rel }}.dbt_valid_from < {{ current_rel }}.{{ updated_at }})\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.559086, "supported_languages": null}, "macro.dbt.snapshot_string_as_time": {"name": "snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_string_as_time", "macro_sql": "{% macro snapshot_string_as_time(timestamp) -%}\n {{ adapter.dispatch('snapshot_string_as_time', 'dbt')(timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_string_as_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.559239, "supported_languages": null}, "macro.dbt.default__snapshot_string_as_time": {"name": "default__snapshot_string_as_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.default__snapshot_string_as_time", "macro_sql": "{% macro default__snapshot_string_as_time(timestamp) %}\n {% do exceptions.raise_not_implemented(\n 'snapshot_string_as_time macro not implemented for adapter '+adapter.type()\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.559396, "supported_languages": null}, "macro.dbt.snapshot_check_all_get_existing_columns": {"name": "snapshot_check_all_get_existing_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_all_get_existing_columns", "macro_sql": "{% macro snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) -%}\n {%- if not target_exists -%}\n {#-- no table yet -> return whatever the query does --#}\n {{ return((false, query_columns)) }}\n {%- endif -%}\n\n {#-- handle any schema changes --#}\n {%- set target_relation = adapter.get_relation(database=node.database, schema=node.schema, identifier=node.alias) -%}\n\n {% if check_cols_config == 'all' %}\n {%- set query_columns = get_columns_in_query(node['compiled_code']) -%}\n\n {% elif check_cols_config is iterable and (check_cols_config | length) > 0 %}\n {#-- query for proper casing/quoting, to support comparison below --#}\n {%- set select_check_cols_from_target -%}\n {#-- N.B. The whitespace below is necessary to avoid edge case issue with comments --#}\n {#-- See: https://github.com/dbt-labs/dbt-core/issues/6781 --#}\n select {{ check_cols_config | join(', ') }} from (\n {{ node['compiled_code'] }}\n ) subq\n {%- endset -%}\n {% set query_columns = get_columns_in_query(select_check_cols_from_target) %}\n\n {% else %}\n {% do exceptions.raise_compiler_error(\"Invalid value for 'check_cols': \" ~ check_cols_config) %}\n {% endif %}\n\n {%- set existing_cols = adapter.get_columns_in_relation(target_relation) | map(attribute = 'name') | list -%}\n {%- set ns = namespace() -%} {#-- handle for-loop scoping with a namespace --#}\n {%- set ns.column_added = false -%}\n\n {%- set intersection = [] -%}\n {%- for col in query_columns -%}\n {%- if col in existing_cols -%}\n {%- do intersection.append(adapter.quote(col)) -%}\n {%- else -%}\n {% set ns.column_added = true %}\n {%- endif -%}\n {%- endfor -%}\n {{ return((ns.column_added, intersection)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.560671, "supported_languages": null}, "macro.dbt.snapshot_check_strategy": {"name": "snapshot_check_strategy", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/strategies.sql", "original_file_path": "macros/materializations/snapshots/strategies.sql", "unique_id": "macro.dbt.snapshot_check_strategy", "macro_sql": "{% macro snapshot_check_strategy(node, snapshotted_rel, current_rel, config, target_exists) %}\n {% set check_cols_config = config['check_cols'] %}\n {% set primary_key = config['unique_key'] %}\n {% set invalidate_hard_deletes = config.get('invalidate_hard_deletes', false) %}\n {% set updated_at = config.get('updated_at', snapshot_get_time()) %}\n\n {% set column_added = false %}\n\n {% set column_added, check_cols = snapshot_check_all_get_existing_columns(node, target_exists, check_cols_config) %}\n\n {%- set row_changed_expr -%}\n (\n {%- if column_added -%}\n {{ get_true_sql() }}\n {%- else -%}\n {%- for col in check_cols -%}\n {{ snapshotted_rel }}.{{ col }} != {{ current_rel }}.{{ col }}\n or\n (\n (({{ snapshotted_rel }}.{{ col }} is null) and not ({{ current_rel }}.{{ col }} is null))\n or\n ((not {{ snapshotted_rel }}.{{ col }} is null) and ({{ current_rel }}.{{ col }} is null))\n )\n {%- if not loop.last %} or {% endif -%}\n {%- endfor -%}\n {%- endif -%}\n )\n {%- endset %}\n\n {% set scd_id_expr = snapshot_hash_arguments([primary_key, updated_at]) %}\n\n {% do return({\n \"unique_key\": primary_key,\n \"updated_at\": updated_at,\n \"row_changed\": row_changed_expr,\n \"scd_id\": scd_id_expr,\n \"invalidate_hard_deletes\": invalidate_hard_deletes\n }) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time", "macro.dbt.snapshot_check_all_get_existing_columns", "macro.dbt.get_true_sql", "macro.dbt.snapshot_hash_arguments"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5618691, "supported_languages": null}, "macro.dbt.create_columns": {"name": "create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.create_columns", "macro_sql": "{% macro create_columns(relation, columns) %}\n {{ adapter.dispatch('create_columns', 'dbt')(relation, columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.56553, "supported_languages": null}, "macro.dbt.default__create_columns": {"name": "default__create_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__create_columns", "macro_sql": "{% macro default__create_columns(relation, columns) %}\n {% for column in columns %}\n {% call statement() %}\n alter table {{ relation }} add column \"{{ column.name }}\" {{ column.data_type }};\n {% endcall %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.565845, "supported_languages": null}, "macro.dbt.post_snapshot": {"name": "post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.post_snapshot", "macro_sql": "{% macro post_snapshot(staging_relation) %}\n {{ adapter.dispatch('post_snapshot', 'dbt')(staging_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5660212, "supported_languages": null}, "macro.dbt.default__post_snapshot": {"name": "default__post_snapshot", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__post_snapshot", "macro_sql": "{% macro default__post_snapshot(staging_relation) %}\n {# no-op #}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.566108, "supported_languages": null}, "macro.dbt.get_true_sql": {"name": "get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.get_true_sql", "macro_sql": "{% macro get_true_sql() %}\n {{ adapter.dispatch('get_true_sql', 'dbt')() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_true_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.566252, "supported_languages": null}, "macro.dbt.default__get_true_sql": {"name": "default__get_true_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__get_true_sql", "macro_sql": "{% macro default__get_true_sql() %}\n {{ return('TRUE') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5663638, "supported_languages": null}, "macro.dbt.snapshot_staging_table": {"name": "snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.snapshot_staging_table", "macro_sql": "{% macro snapshot_staging_table(strategy, source_sql, target_relation) -%}\n {{ adapter.dispatch('snapshot_staging_table', 'dbt')(strategy, source_sql, target_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__snapshot_staging_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.566561, "supported_languages": null}, "macro.dbt.default__snapshot_staging_table": {"name": "default__snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__snapshot_staging_table", "macro_sql": "{% macro default__snapshot_staging_table(strategy, source_sql, target_relation) -%}\n\n with snapshot_query as (\n\n {{ source_sql }}\n\n ),\n\n snapshotted_data as (\n\n select *,\n {{ strategy.unique_key }} as dbt_unique_key\n\n from {{ target_relation }}\n where dbt_valid_to is null\n\n ),\n\n insertions_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to,\n {{ strategy.scd_id }} as dbt_scd_id\n\n from snapshot_query\n ),\n\n updates_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n {{ strategy.updated_at }} as dbt_valid_to\n\n from snapshot_query\n ),\n\n {%- if strategy.invalidate_hard_deletes %}\n\n deletes_source_data as (\n\n select\n *,\n {{ strategy.unique_key }} as dbt_unique_key\n from snapshot_query\n ),\n {% endif %}\n\n insertions as (\n\n select\n 'insert' as dbt_change_type,\n source_data.*\n\n from insertions_source_data as source_data\n left outer join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where snapshotted_data.dbt_unique_key is null\n or (\n snapshotted_data.dbt_unique_key is not null\n and (\n {{ strategy.row_changed }}\n )\n )\n\n ),\n\n updates as (\n\n select\n 'update' as dbt_change_type,\n source_data.*,\n snapshotted_data.dbt_scd_id\n\n from updates_source_data as source_data\n join snapshotted_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where (\n {{ strategy.row_changed }}\n )\n )\n\n {%- if strategy.invalidate_hard_deletes -%}\n ,\n\n deletes as (\n\n select\n 'delete' as dbt_change_type,\n source_data.*,\n {{ snapshot_get_time() }} as dbt_valid_from,\n {{ snapshot_get_time() }} as dbt_updated_at,\n {{ snapshot_get_time() }} as dbt_valid_to,\n snapshotted_data.dbt_scd_id\n\n from snapshotted_data\n left join deletes_source_data as source_data on snapshotted_data.dbt_unique_key = source_data.dbt_unique_key\n where source_data.dbt_unique_key is null\n )\n {%- endif %}\n\n select * from insertions\n union all\n select * from updates\n {%- if strategy.invalidate_hard_deletes %}\n union all\n select * from deletes\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.567431, "supported_languages": null}, "macro.dbt.build_snapshot_table": {"name": "build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_table", "macro_sql": "{% macro build_snapshot_table(strategy, sql) -%}\n {{ adapter.dispatch('build_snapshot_table', 'dbt')(strategy, sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__build_snapshot_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.567608, "supported_languages": null}, "macro.dbt.default__build_snapshot_table": {"name": "default__build_snapshot_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.default__build_snapshot_table", "macro_sql": "{% macro default__build_snapshot_table(strategy, sql) %}\n\n select *,\n {{ strategy.scd_id }} as dbt_scd_id,\n {{ strategy.updated_at }} as dbt_updated_at,\n {{ strategy.updated_at }} as dbt_valid_from,\n nullif({{ strategy.updated_at }}, {{ strategy.updated_at }}) as dbt_valid_to\n from (\n {{ sql }}\n ) sbq\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.567841, "supported_languages": null}, "macro.dbt.build_snapshot_staging_table": {"name": "build_snapshot_staging_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/helpers.sql", "original_file_path": "macros/materializations/snapshots/helpers.sql", "unique_id": "macro.dbt.build_snapshot_staging_table", "macro_sql": "{% macro build_snapshot_staging_table(strategy, sql, target_relation) %}\n {% set temp_relation = make_temp_relation(target_relation) %}\n\n {% set select = snapshot_staging_table(strategy, sql, target_relation) %}\n\n {% call statement('build_snapshot_staging_relation') %}\n {{ create_table_as(True, temp_relation, select) }}\n {% endcall %}\n\n {% do return(temp_relation) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.make_temp_relation", "macro.dbt.snapshot_staging_table", "macro.dbt.statement", "macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.568259, "supported_languages": null}, "macro.dbt.materialization_snapshot_default": {"name": "materialization_snapshot_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/snapshots/snapshot.sql", "original_file_path": "macros/materializations/snapshots/snapshot.sql", "unique_id": "macro.dbt.materialization_snapshot_default", "macro_sql": "{% materialization snapshot, default %}\n {%- set config = model['config'] -%}\n\n {%- set target_table = model.get('alias', model.get('name')) -%}\n\n {%- set strategy_name = config.get('strategy') -%}\n {%- set unique_key = config.get('unique_key') %}\n -- grab current tables grants config for comparision later on\n {%- set grant_config = config.get('grants') -%}\n\n {% set target_relation_exists, target_relation = get_or_create_relation(\n database=model.database,\n schema=model.schema,\n identifier=target_table,\n type='table') -%}\n\n {%- if not target_relation.is_table -%}\n {% do exceptions.relation_wrong_type(target_relation, 'table') %}\n {%- endif -%}\n\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set strategy_macro = strategy_dispatch(strategy_name) %}\n {% set strategy = strategy_macro(model, \"snapshotted_data\", \"source_data\", config, target_relation_exists) %}\n\n {% if not target_relation_exists %}\n\n {% set build_sql = build_snapshot_table(strategy, model['compiled_code']) %}\n {% set final_sql = create_table_as(False, target_relation, build_sql) %}\n\n {% else %}\n\n {{ adapter.valid_snapshot_target(target_relation) }}\n\n {% set staging_table = build_snapshot_staging_table(strategy, sql, target_relation) %}\n\n -- this may no-op if the database does not require column expansion\n {% do adapter.expand_target_column_types(from_relation=staging_table,\n to_relation=target_relation) %}\n\n {% set missing_columns = adapter.get_missing_columns(staging_table, target_relation)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% do create_columns(target_relation, missing_columns) %}\n\n {% set source_columns = adapter.get_columns_in_relation(staging_table)\n | rejectattr('name', 'equalto', 'dbt_change_type')\n | rejectattr('name', 'equalto', 'DBT_CHANGE_TYPE')\n | rejectattr('name', 'equalto', 'dbt_unique_key')\n | rejectattr('name', 'equalto', 'DBT_UNIQUE_KEY')\n | list %}\n\n {% set quoted_source_columns = [] %}\n {% for column in source_columns %}\n {% do quoted_source_columns.append(adapter.quote(column.name)) %}\n {% endfor %}\n\n {% set final_sql = snapshot_merge_sql(\n target = target_relation,\n source = staging_table,\n insert_cols = quoted_source_columns\n )\n %}\n\n {% endif %}\n\n {% call statement('main') %}\n {{ final_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(target_relation_exists, full_refresh_mode=False) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if not target_relation_exists %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {% if staging_table is defined %}\n {% do post_snapshot(staging_table) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.get_or_create_relation", "macro.dbt.run_hooks", "macro.dbt.strategy_dispatch", "macro.dbt.build_snapshot_table", "macro.dbt.create_table_as", "macro.dbt.build_snapshot_staging_table", "macro.dbt.create_columns", "macro.dbt.snapshot_merge_sql", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes", "macro.dbt.post_snapshot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.573691, "supported_languages": ["sql"]}, "macro.dbt.materialization_test_default": {"name": "materialization_test_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/test.sql", "original_file_path": "macros/materializations/tests/test.sql", "unique_id": "macro.dbt.materialization_test_default", "macro_sql": "{%- materialization test, default -%}\n\n {% set relations = [] %}\n\n {% if should_store_failures() %}\n\n {% set identifier = model['alias'] %}\n {% set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% set store_failures_as = config.get('store_failures_as') %}\n -- if `--store-failures` is invoked via command line and `store_failures_as` is not set,\n -- config.get('store_failures_as', 'table') returns None, not 'table'\n {% if store_failures_as == none %}{% set store_failures_as = 'table' %}{% endif %}\n {% if store_failures_as not in ['table', 'view'] %}\n {{ exceptions.raise_compiler_error(\n \"'\" ~ store_failures_as ~ \"' is not a valid value for `store_failures_as`. \"\n \"Accepted values are: ['ephemeral', 'table', 'view']\"\n ) }}\n {% endif %}\n\n {% set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database, type=store_failures_as) -%} %}\n\n {% if old_relation %}\n {% do adapter.drop_relation(old_relation) %}\n {% endif %}\n\n {% call statement(auto_begin=True) %}\n {{ get_create_sql(target_relation, sql) }}\n {% endcall %}\n\n {% do relations.append(target_relation) %}\n\n {% set main_sql %}\n select *\n from {{ target_relation }}\n {% endset %}\n\n {{ adapter.commit() }}\n\n {% else %}\n\n {% set main_sql = sql %}\n\n {% endif %}\n\n {% set limit = config.get('limit') %}\n {% set fail_calc = config.get('fail_calc') %}\n {% set warn_if = config.get('warn_if') %}\n {% set error_if = config.get('error_if') %}\n\n {% call statement('main', fetch_result=True) -%}\n\n {{ get_test_sql(main_sql, fail_calc, warn_if, error_if, limit)}}\n\n {%- endcall %}\n\n {{ return({'relations': relations}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.should_store_failures", "macro.dbt.statement", "macro.dbt.get_create_sql", "macro.dbt.get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.576042, "supported_languages": ["sql"]}, "macro.dbt.get_test_sql": {"name": "get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.get_test_sql", "macro_sql": "{% macro get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n {{ adapter.dispatch('get_test_sql', 'dbt')(main_sql, fail_calc, warn_if, error_if, limit) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_test_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.576442, "supported_languages": null}, "macro.dbt.default__get_test_sql": {"name": "default__get_test_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/helpers.sql", "original_file_path": "macros/materializations/tests/helpers.sql", "unique_id": "macro.dbt.default__get_test_sql", "macro_sql": "{% macro default__get_test_sql(main_sql, fail_calc, warn_if, error_if, limit) -%}\n select\n {{ fail_calc }} as failures,\n {{ fail_calc }} {{ warn_if }} as should_warn,\n {{ fail_calc }} {{ error_if }} as should_error\n from (\n {{ main_sql }}\n {{ \"limit \" ~ limit if limit != none }}\n ) dbt_internal_test\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.57671, "supported_languages": null}, "macro.dbt.get_where_subquery": {"name": "get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.get_where_subquery", "macro_sql": "{% macro get_where_subquery(relation) -%}\n {% do return(adapter.dispatch('get_where_subquery', 'dbt')(relation)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_where_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.577031, "supported_languages": null}, "macro.dbt.default__get_where_subquery": {"name": "default__get_where_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/tests/where_subquery.sql", "original_file_path": "macros/materializations/tests/where_subquery.sql", "unique_id": "macro.dbt.default__get_where_subquery", "macro_sql": "{% macro default__get_where_subquery(relation) -%}\n {% set where = config.get('where', '') %}\n {% if where %}\n {%- set filtered -%}\n (select * from {{ relation }} where {{ where }}) dbt_subquery\n {%- endset -%}\n {% do return(filtered) %}\n {%- else -%}\n {% do return(relation) %}\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.577368, "supported_languages": null}, "macro.dbt.materialization_materialized_view_default": {"name": "materialization_materialized_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialization_materialized_view_default", "macro_sql": "{% materialization materialized_view, default %}\n {% set existing_relation = load_cached_relation(this) %}\n {% set target_relation = this.incorporate(type=this.MaterializedView) %}\n {% set intermediate_relation = make_intermediate_relation(target_relation) %}\n {% set backup_relation_type = target_relation.MaterializedView if existing_relation is none else existing_relation.type %}\n {% set backup_relation = make_backup_relation(target_relation, backup_relation_type) %}\n\n {{ materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) }}\n\n {% set build_sql = materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% if build_sql == '' %}\n {{ materialized_view_execute_no_op(target_relation) }}\n {% else %}\n {{ materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) }}\n {% endif %}\n\n {{ materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.materialized_view_setup", "macro.dbt.materialized_view_get_build_sql", "macro.dbt.materialized_view_execute_no_op", "macro.dbt.materialized_view_execute_build_sql", "macro.dbt.materialized_view_teardown"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.581831, "supported_languages": ["sql"]}, "macro.dbt.materialized_view_setup": {"name": "materialized_view_setup", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_setup", "macro_sql": "{% macro materialized_view_setup(backup_relation, intermediate_relation, pre_hooks) %}\n\n -- backup_relation and intermediate_relation should not already exist in the database\n -- it's possible these exist because of a previous run that exited unexpectedly\n {% set preexisting_backup_relation = load_cached_relation(backup_relation) %}\n {% set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.582201, "supported_languages": null}, "macro.dbt.materialized_view_teardown": {"name": "materialized_view_teardown", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_teardown", "macro_sql": "{% macro materialized_view_teardown(backup_relation, intermediate_relation, post_hooks) %}\n\n -- drop the temp relations if they exist to leave the database clean for the next run\n {{ drop_relation_if_exists(backup_relation) }}\n {{ drop_relation_if_exists(intermediate_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.582428, "supported_languages": null}, "macro.dbt.materialized_view_get_build_sql": {"name": "materialized_view_get_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_get_build_sql", "macro_sql": "{% macro materialized_view_get_build_sql(existing_relation, target_relation, backup_relation, intermediate_relation) %}\n\n {% set full_refresh_mode = should_full_refresh() %}\n\n -- determine the scenario we're in: create, full_refresh, alter, refresh data\n {% if existing_relation is none %}\n {% set build_sql = get_create_materialized_view_as_sql(target_relation, sql) %}\n {% elif full_refresh_mode or not existing_relation.is_materialized_view %}\n {% set build_sql = get_replace_sql(existing_relation, target_relation, sql) %}\n {% else %}\n\n -- get config options\n {% set on_configuration_change = config.get('on_configuration_change') %}\n {% set configuration_changes = get_materialized_view_configuration_changes(existing_relation, config) %}\n\n {% if configuration_changes is none %}\n {% set build_sql = refresh_materialized_view(target_relation) %}\n\n {% elif on_configuration_change == 'apply' %}\n {% set build_sql = get_alter_materialized_view_as_sql(target_relation, configuration_changes, sql, existing_relation, backup_relation, intermediate_relation) %}\n {% elif on_configuration_change == 'continue' %}\n {% set build_sql = '' %}\n {{ exceptions.warn(\"Configuration changes were identified and `on_configuration_change` was set to `continue` for `\" ~ target_relation ~ \"`\") }}\n {% elif on_configuration_change == 'fail' %}\n {{ exceptions.raise_fail_fast_error(\"Configuration changes were identified and `on_configuration_change` was set to `fail` for `\" ~ target_relation ~ \"`\") }}\n\n {% else %}\n -- this only happens if the user provides a value other than `apply`, 'skip', 'fail'\n {{ exceptions.raise_compiler_error(\"Unexpected configuration scenario\") }}\n\n {% endif %}\n\n {% endif %}\n\n {% do return(build_sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.get_create_materialized_view_as_sql", "macro.dbt.get_replace_sql", "macro.dbt.get_materialized_view_configuration_changes", "macro.dbt.refresh_materialized_view", "macro.dbt.get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.583591, "supported_languages": null}, "macro.dbt.materialized_view_execute_no_op": {"name": "materialized_view_execute_no_op", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_no_op", "macro_sql": "{% macro materialized_view_execute_no_op(target_relation) %}\n {% do store_raw_result(\n name=\"main\",\n message=\"skip \" ~ target_relation,\n code=\"skip\",\n rows_affected=\"-1\"\n ) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.583798, "supported_languages": null}, "macro.dbt.materialized_view_execute_build_sql": {"name": "materialized_view_execute_build_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/materialized_view.sql", "original_file_path": "macros/materializations/models/materialized_view.sql", "unique_id": "macro.dbt.materialized_view_execute_build_sql", "macro_sql": "{% macro materialized_view_execute_build_sql(build_sql, existing_relation, target_relation, post_hooks) %}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set grant_config = config.get('grants') %}\n\n {% call statement(name=\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5843961, "supported_languages": null}, "macro.dbt.materialization_view_default": {"name": "materialization_view_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/view.sql", "original_file_path": "macros/materializations/models/view.sql", "unique_id": "macro.dbt.materialization_view_default", "macro_sql": "{%- materialization view, default -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='view') -%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n This relation (probably) doesn't exist yet. If it does exist, it's a leftover from\n a previous run, and we're going to try to drop it immediately. At the end of this\n materialization, we're going to rename the \"existing_relation\" to this identifier,\n and then we're going to drop it. In order to make sure we run the correct one of:\n - drop view ...\n - drop table ...\n\n We need to set the type of this relation to be the type of the existing_relation, if it exists,\n or else \"view\" as a sane default if it does not. Note that if the existing_relation does not\n exist, then there is nothing to move out of the way and subsequentally drop. In that case,\n this relation will be effectively unused.\n */\n {%- set backup_relation_type = 'view' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n -- move the existing view out of the way\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {{ adapter.commit() }}\n\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.run_hooks", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.587113, "supported_languages": ["sql"]}, "macro.dbt.materialization_table_default": {"name": "materialization_table_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/table.sql", "original_file_path": "macros/materializations/models/table.sql", "unique_id": "macro.dbt.materialization_table_default", "macro_sql": "{% materialization table, default %}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') %}\n {%- set intermediate_relation = make_intermediate_relation(target_relation) -%}\n -- the intermediate_relation should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation) -%}\n /*\n See ../view/view.sql for more information about this relation.\n */\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n -- as above, the backup_relation should not already exist\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n\n -- drop the temp relations if they exist already in the database\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_table_as_sql(False, intermediate_relation, sql) }}\n {%- endcall %}\n\n -- cleanup\n {% if existing_relation is not none %}\n /* Do the equivalent of rename_if_exists. 'existing_relation' could have been dropped\n since the variable was first set. */\n {% set existing_relation = load_cached_relation(existing_relation) %}\n {% if existing_relation is not none %}\n {{ adapter.rename_relation(existing_relation, backup_relation) }}\n {% endif %}\n {% endif %}\n\n {{ adapter.rename_relation(intermediate_relation, target_relation) }}\n\n {% do create_indexes(target_relation) %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n -- finally, drop the existing/backup relation after the commit\n {{ drop_relation_if_exists(backup_relation) }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.statement", "macro.dbt.get_create_table_as_sql", "macro.dbt.create_indexes", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.589822, "supported_languages": ["sql"]}, "macro.dbt.get_quoted_csv": {"name": "get_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_quoted_csv", "macro_sql": "{% macro get_quoted_csv(column_names) %}\n\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote(col)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5912828, "supported_languages": null}, "macro.dbt.diff_columns": {"name": "diff_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_columns", "macro_sql": "{% macro diff_columns(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% set source_names = source_columns | map(attribute = 'column') | list %}\n {% set target_names = target_columns | map(attribute = 'column') | list %}\n\n {# --check whether the name attribute exists in the target - this does not perform a data type check #}\n {% for sc in source_columns %}\n {% if sc.name not in target_names %}\n {{ result.append(sc) }}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.591808, "supported_languages": null}, "macro.dbt.diff_column_data_types": {"name": "diff_column_data_types", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.diff_column_data_types", "macro_sql": "{% macro diff_column_data_types(source_columns, target_columns) %}\n\n {% set result = [] %}\n {% for sc in source_columns %}\n {% set tc = target_columns | selectattr(\"name\", \"equalto\", sc.name) | list | first %}\n {% if tc %}\n {% if sc.data_type != tc.data_type and not sc.can_expand_to(other_column=tc) %}\n {{ result.append( { 'column_name': tc.name, 'new_type': sc.data_type } ) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n\n {{ return(result) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5923982, "supported_languages": null}, "macro.dbt.get_merge_update_columns": {"name": "get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.get_merge_update_columns", "macro_sql": "{% macro get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {{ return(adapter.dispatch('get_merge_update_columns', 'dbt')(merge_update_columns, merge_exclude_columns, dest_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.592622, "supported_languages": null}, "macro.dbt.default__get_merge_update_columns": {"name": "default__get_merge_update_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/column_helpers.sql", "original_file_path": "macros/materializations/models/incremental/column_helpers.sql", "unique_id": "macro.dbt.default__get_merge_update_columns", "macro_sql": "{% macro default__get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) %}\n {%- set default_cols = dest_columns | map(attribute=\"quoted\") | list -%}\n\n {%- if merge_update_columns and merge_exclude_columns -%}\n {{ exceptions.raise_compiler_error(\n 'Model cannot specify merge_update_columns and merge_exclude_columns. Please update model to use only one config'\n )}}\n {%- elif merge_update_columns -%}\n {%- set update_columns = merge_update_columns -%}\n {%- elif merge_exclude_columns -%}\n {%- set update_columns = [] -%}\n {%- for column in dest_columns -%}\n {% if column.column | lower not in merge_exclude_columns | map(\"lower\") | list %}\n {%- do update_columns.append(column.quoted) -%}\n {% endif %}\n {%- endfor -%}\n {%- else -%}\n {%- set update_columns = default_cols -%}\n {%- endif -%}\n\n {{ return(update_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.593327, "supported_languages": null}, "macro.dbt.get_merge_sql": {"name": "get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_merge_sql", "macro_sql": "{% macro get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n -- back compat for old kwarg name\n {% set incremental_predicates = kwargs.get('predicates', incremental_predicates) %}\n {{ adapter.dispatch('get_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.5987031, "supported_languages": null}, "macro.dbt.default__get_merge_sql": {"name": "default__get_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_merge_sql", "macro_sql": "{% macro default__get_merge_sql(target, source, unique_key, dest_columns, incremental_predicates=none) -%}\n {%- set predicates = [] if incremental_predicates is none else [] + incremental_predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set merge_update_columns = config.get('merge_update_columns') -%}\n {%- set merge_exclude_columns = config.get('merge_exclude_columns') -%}\n {%- set update_columns = get_merge_update_columns(merge_update_columns, merge_exclude_columns, dest_columns) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not mapping and unique_key is not string %}\n {% for key in unique_key %}\n {% set this_key_match %}\n DBT_INTERNAL_SOURCE.{{ key }} = DBT_INTERNAL_DEST.{{ key }}\n {% endset %}\n {% do predicates.append(this_key_match) %}\n {% endfor %}\n {% else %}\n {% set unique_key_match %}\n DBT_INTERNAL_SOURCE.{{ unique_key }} = DBT_INTERNAL_DEST.{{ unique_key }}\n {% endset %}\n {% do predicates.append(unique_key_match) %}\n {% endif %}\n {% else %}\n {% do predicates.append('FALSE') %}\n {% endif %}\n\n {{ sql_header if sql_header is not none }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on {{\"(\" ~ predicates | join(\") and (\") ~ \")\"}}\n\n {% if unique_key %}\n when matched then update set\n {% for column_name in update_columns -%}\n {{ column_name }} = DBT_INTERNAL_SOURCE.{{ column_name }}\n {%- if not loop.last %}, {%- endif %}\n {%- endfor %}\n {% endif %}\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv", "macro.dbt.get_merge_update_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6002429, "supported_languages": null}, "macro.dbt.get_delete_insert_merge_sql": {"name": "get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_delete_insert_merge_sql", "macro_sql": "{% macro get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n {{ adapter.dispatch('get_delete_insert_merge_sql', 'dbt')(target, source, unique_key, dest_columns, incremental_predicates) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.600537, "supported_languages": null}, "macro.dbt.default__get_delete_insert_merge_sql": {"name": "default__get_delete_insert_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_delete_insert_merge_sql", "macro_sql": "{% macro default__get_delete_insert_merge_sql(target, source, unique_key, dest_columns, incremental_predicates) -%}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n {% if unique_key %}\n {% if unique_key is sequence and unique_key is not string %}\n delete from {{target }}\n using {{ source }}\n where (\n {% for key in unique_key %}\n {{ source }}.{{ key }} = {{ target }}.{{ key }}\n {{ \"and \" if not loop.last}}\n {% endfor %}\n {% if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {% endif %}\n );\n {% else %}\n delete from {{ target }}\n where (\n {{ unique_key }}) in (\n select ({{ unique_key }})\n from {{ source }}\n )\n {%- if incremental_predicates %}\n {% for predicate in incremental_predicates %}\n and {{ predicate }}\n {% endfor %}\n {%- endif -%};\n\n {% endif %}\n {% endif %}\n\n insert into {{ target }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ source }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6014678, "supported_languages": null}, "macro.dbt.get_insert_overwrite_merge_sql": {"name": "get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.get_insert_overwrite_merge_sql", "macro_sql": "{% macro get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header=false) -%}\n {{ adapter.dispatch('get_insert_overwrite_merge_sql', 'dbt')(target, source, dest_columns, predicates, include_sql_header) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6017148, "supported_languages": null}, "macro.dbt.default__get_insert_overwrite_merge_sql": {"name": "default__get_insert_overwrite_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/merge.sql", "original_file_path": "macros/materializations/models/incremental/merge.sql", "unique_id": "macro.dbt.default__get_insert_overwrite_merge_sql", "macro_sql": "{% macro default__get_insert_overwrite_merge_sql(target, source, dest_columns, predicates, include_sql_header) -%}\n {#-- The only time include_sql_header is True: --#}\n {#-- BigQuery + insert_overwrite strategy + \"static\" partitions config --#}\n {#-- We should consider including the sql header at the materialization level instead --#}\n\n {%- set predicates = [] if predicates is none else [] + predicates -%}\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none and include_sql_header }}\n\n merge into {{ target }} as DBT_INTERNAL_DEST\n using {{ source }} as DBT_INTERNAL_SOURCE\n on FALSE\n\n when not matched by source\n {% if predicates %} and {{ predicates | join(' and ') }} {% endif %}\n then delete\n\n when not matched then insert\n ({{ dest_cols_csv }})\n values\n ({{ dest_cols_csv }})\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.602294, "supported_languages": null}, "macro.dbt.is_incremental": {"name": "is_incremental", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/is_incremental.sql", "original_file_path": "macros/materializations/models/incremental/is_incremental.sql", "unique_id": "macro.dbt.is_incremental", "macro_sql": "{% macro is_incremental() %}\n {#-- do not run introspective queries in parsing #}\n {% if not execute %}\n {{ return(False) }}\n {% else %}\n {% set relation = adapter.get_relation(this.database, this.schema, this.table) %}\n {{ return(relation is not none\n and relation.type == 'table'\n and model.config.materialized == 'incremental'\n and not should_full_refresh()) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.602869, "supported_languages": null}, "macro.dbt.get_incremental_append_sql": {"name": "get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_append_sql", "macro_sql": "{% macro get_incremental_append_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_append_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.603659, "supported_languages": null}, "macro.dbt.default__get_incremental_append_sql": {"name": "default__get_incremental_append_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_append_sql", "macro_sql": "{% macro default__get_incremental_append_sql(arg_dict) %}\n\n {% do return(get_insert_into_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_into_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6038752, "supported_languages": null}, "macro.dbt.get_incremental_delete_insert_sql": {"name": "get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_delete_insert_sql", "macro_sql": "{% macro get_incremental_delete_insert_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_delete_insert_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_delete_insert_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6040478, "supported_languages": null}, "macro.dbt.default__get_incremental_delete_insert_sql": {"name": "default__get_incremental_delete_insert_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_delete_insert_sql", "macro_sql": "{% macro default__get_incremental_delete_insert_sql(arg_dict) %}\n\n {% do return(get_delete_insert_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_delete_insert_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.604318, "supported_languages": null}, "macro.dbt.get_incremental_merge_sql": {"name": "get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_merge_sql", "macro_sql": "{% macro get_incremental_merge_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_merge_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6045618, "supported_languages": null}, "macro.dbt.default__get_incremental_merge_sql": {"name": "default__get_incremental_merge_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_merge_sql", "macro_sql": "{% macro default__get_incremental_merge_sql(arg_dict) %}\n\n {% do return(get_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"unique_key\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.60484, "supported_languages": null}, "macro.dbt.get_incremental_insert_overwrite_sql": {"name": "get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_insert_overwrite_sql", "macro_sql": "{% macro get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_insert_overwrite_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_incremental_insert_overwrite_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6050198, "supported_languages": null}, "macro.dbt.default__get_incremental_insert_overwrite_sql": {"name": "default__get_incremental_insert_overwrite_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_insert_overwrite_sql", "macro_sql": "{% macro default__get_incremental_insert_overwrite_sql(arg_dict) %}\n\n {% do return(get_insert_overwrite_merge_sql(arg_dict[\"target_relation\"], arg_dict[\"temp_relation\"], arg_dict[\"dest_columns\"], arg_dict[\"incremental_predicates\"])) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_insert_overwrite_merge_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.605254, "supported_languages": null}, "macro.dbt.get_incremental_default_sql": {"name": "get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_incremental_default_sql", "macro_sql": "{% macro get_incremental_default_sql(arg_dict) %}\n\n {{ return(adapter.dispatch('get_incremental_default_sql', 'dbt')(arg_dict)) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_incremental_default_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.605418, "supported_languages": null}, "macro.dbt.default__get_incremental_default_sql": {"name": "default__get_incremental_default_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.default__get_incremental_default_sql", "macro_sql": "{% macro default__get_incremental_default_sql(arg_dict) %}\n\n {% do return(get_incremental_append_sql(arg_dict)) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_incremental_append_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.605556, "supported_languages": null}, "macro.dbt.get_insert_into_sql": {"name": "get_insert_into_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/strategies.sql", "original_file_path": "macros/materializations/models/incremental/strategies.sql", "unique_id": "macro.dbt.get_insert_into_sql", "macro_sql": "{% macro get_insert_into_sql(target_relation, temp_relation, dest_columns) %}\n\n {%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute=\"name\")) -%}\n\n insert into {{ target_relation }} ({{ dest_cols_csv }})\n (\n select {{ dest_cols_csv }}\n from {{ temp_relation }}\n )\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_quoted_csv"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6058, "supported_languages": null}, "macro.dbt.materialization_incremental_default": {"name": "materialization_incremental_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/incremental.sql", "original_file_path": "macros/materializations/models/incremental/incremental.sql", "unique_id": "macro.dbt.materialization_incremental_default", "macro_sql": "{% materialization incremental, default -%}\n\n -- relations\n {%- set existing_relation = load_cached_relation(this) -%}\n {%- set target_relation = this.incorporate(type='table') -%}\n {%- set temp_relation = make_temp_relation(target_relation)-%}\n {%- set intermediate_relation = make_intermediate_relation(target_relation)-%}\n {%- set backup_relation_type = 'table' if existing_relation is none else existing_relation.type -%}\n {%- set backup_relation = make_backup_relation(target_relation, backup_relation_type) -%}\n\n -- configs\n {%- set unique_key = config.get('unique_key') -%}\n {%- set full_refresh_mode = (should_full_refresh() or existing_relation.is_view) -%}\n {%- set on_schema_change = incremental_validate_on_schema_change(config.get('on_schema_change'), default='ignore') -%}\n\n -- the temp_ and backup_ relations should not already exist in the database; get_relation\n -- will return None in that case. Otherwise, we get a relation that we can drop\n -- later, before we try to use this name for the current operation. This has to happen before\n -- BEGIN, in a separate transaction\n {%- set preexisting_intermediate_relation = load_cached_relation(intermediate_relation)-%}\n {%- set preexisting_backup_relation = load_cached_relation(backup_relation) -%}\n -- grab current tables grants config for comparision later on\n {% set grant_config = config.get('grants') %}\n {{ drop_relation_if_exists(preexisting_intermediate_relation) }}\n {{ drop_relation_if_exists(preexisting_backup_relation) }}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n {% set to_drop = [] %}\n\n {% if existing_relation is none %}\n {% set build_sql = get_create_table_as_sql(False, target_relation, sql) %}\n {% elif full_refresh_mode %}\n {% set build_sql = get_create_table_as_sql(False, intermediate_relation, sql) %}\n {% set need_swap = true %}\n {% else %}\n {% do run_query(get_create_table_as_sql(True, temp_relation, sql)) %}\n {% do adapter.expand_target_column_types(\n from_relation=temp_relation,\n to_relation=target_relation) %}\n {#-- Process schema changes. Returns dict of changes if successful. Use source columns for upserting/merging --#}\n {% set dest_columns = process_schema_changes(on_schema_change, temp_relation, existing_relation) %}\n {% if not dest_columns %}\n {% set dest_columns = adapter.get_columns_in_relation(existing_relation) %}\n {% endif %}\n\n {#-- Get the incremental_strategy, the macro to use for the strategy, and build the sql --#}\n {% set incremental_strategy = config.get('incremental_strategy') or 'default' %}\n {% set incremental_predicates = config.get('predicates', none) or config.get('incremental_predicates', none) %}\n {% set strategy_sql_macro_func = adapter.get_incremental_strategy_macro(context, incremental_strategy) %}\n {% set strategy_arg_dict = ({'target_relation': target_relation, 'temp_relation': temp_relation, 'unique_key': unique_key, 'dest_columns': dest_columns, 'incremental_predicates': incremental_predicates }) %}\n {% set build_sql = strategy_sql_macro_func(strategy_arg_dict) %}\n\n {% endif %}\n\n {% call statement(\"main\") %}\n {{ build_sql }}\n {% endcall %}\n\n {% if need_swap %}\n {% do adapter.rename_relation(target_relation, backup_relation) %}\n {% do adapter.rename_relation(intermediate_relation, target_relation) %}\n {% do to_drop.append(backup_relation) %}\n {% endif %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if existing_relation is none or existing_relation.is_view or should_full_refresh() %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {% do adapter.commit() %}\n\n {% for rel in to_drop %}\n {% do adapter.drop_relation(rel) %}\n {% endfor %}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{%- endmaterialization %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.make_temp_relation", "macro.dbt.make_intermediate_relation", "macro.dbt.make_backup_relation", "macro.dbt.should_full_refresh", "macro.dbt.incremental_validate_on_schema_change", "macro.dbt.drop_relation_if_exists", "macro.dbt.run_hooks", "macro.dbt.get_create_table_as_sql", "macro.dbt.run_query", "macro.dbt.process_schema_changes", "macro.dbt.statement", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.610355, "supported_languages": ["sql"]}, "macro.dbt.incremental_validate_on_schema_change": {"name": "incremental_validate_on_schema_change", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.incremental_validate_on_schema_change", "macro_sql": "{% macro incremental_validate_on_schema_change(on_schema_change, default='ignore') %}\n\n {% if on_schema_change not in ['sync_all_columns', 'append_new_columns', 'fail', 'ignore'] %}\n\n {% set log_message = 'Invalid value for on_schema_change (%s) specified. Setting default value of %s.' % (on_schema_change, default) %}\n {% do log(log_message) %}\n\n {{ return(default) }}\n\n {% else %}\n\n {{ return(on_schema_change) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.615222, "supported_languages": null}, "macro.dbt.check_for_schema_changes": {"name": "check_for_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.check_for_schema_changes", "macro_sql": "{% macro check_for_schema_changes(source_relation, target_relation) %}\n\n {% set schema_changed = False %}\n\n {%- set source_columns = adapter.get_columns_in_relation(source_relation) -%}\n {%- set target_columns = adapter.get_columns_in_relation(target_relation) -%}\n {%- set source_not_in_target = diff_columns(source_columns, target_columns) -%}\n {%- set target_not_in_source = diff_columns(target_columns, source_columns) -%}\n\n {% set new_target_types = diff_column_data_types(source_columns, target_columns) %}\n\n {% if source_not_in_target != [] %}\n {% set schema_changed = True %}\n {% elif target_not_in_source != [] or new_target_types != [] %}\n {% set schema_changed = True %}\n {% elif new_target_types != [] %}\n {% set schema_changed = True %}\n {% endif %}\n\n {% set changes_dict = {\n 'schema_changed': schema_changed,\n 'source_not_in_target': source_not_in_target,\n 'target_not_in_source': target_not_in_source,\n 'source_columns': source_columns,\n 'target_columns': target_columns,\n 'new_target_types': new_target_types\n } %}\n\n {% set msg %}\n In {{ target_relation }}:\n Schema changed: {{ schema_changed }}\n Source columns not in target: {{ source_not_in_target }}\n Target columns not in source: {{ target_not_in_source }}\n New column types: {{ new_target_types }}\n {% endset %}\n\n {% do log(msg) %}\n\n {{ return(changes_dict) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.diff_columns", "macro.dbt.diff_column_data_types"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.616321, "supported_languages": null}, "macro.dbt.sync_column_schemas": {"name": "sync_column_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.sync_column_schemas", "macro_sql": "{% macro sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {%- set add_to_target_arr = schema_changes_dict['source_not_in_target'] -%}\n\n {%- if on_schema_change == 'append_new_columns'-%}\n {%- if add_to_target_arr | length > 0 -%}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, none) -%}\n {%- endif -%}\n\n {% elif on_schema_change == 'sync_all_columns' %}\n {%- set remove_from_target_arr = schema_changes_dict['target_not_in_source'] -%}\n {%- set new_target_types = schema_changes_dict['new_target_types'] -%}\n\n {% if add_to_target_arr | length > 0 or remove_from_target_arr | length > 0 %}\n {%- do alter_relation_add_remove_columns(target_relation, add_to_target_arr, remove_from_target_arr) -%}\n {% endif %}\n\n {% if new_target_types != [] %}\n {% for ntt in new_target_types %}\n {% set column_name = ntt['column_name'] %}\n {% set new_type = ntt['new_type'] %}\n {% do alter_column_type(target_relation, column_name, new_type) %}\n {% endfor %}\n {% endif %}\n\n {% endif %}\n\n {% set schema_change_message %}\n In {{ target_relation }}:\n Schema change approach: {{ on_schema_change }}\n Columns added: {{ add_to_target_arr }}\n Columns removed: {{ remove_from_target_arr }}\n Data types changed: {{ new_target_types }}\n {% endset %}\n\n {% do log(schema_change_message) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.alter_relation_add_remove_columns", "macro.dbt.alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.617441, "supported_languages": null}, "macro.dbt.process_schema_changes": {"name": "process_schema_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/incremental/on_schema_change.sql", "original_file_path": "macros/materializations/models/incremental/on_schema_change.sql", "unique_id": "macro.dbt.process_schema_changes", "macro_sql": "{% macro process_schema_changes(on_schema_change, source_relation, target_relation) %}\n\n {% if on_schema_change == 'ignore' %}\n\n {{ return({}) }}\n\n {% else %}\n\n {% set schema_changes_dict = check_for_schema_changes(source_relation, target_relation) %}\n\n {% if schema_changes_dict['schema_changed'] %}\n\n {% if on_schema_change == 'fail' %}\n\n {% set fail_msg %}\n The source and target schemas on this incremental model are out of sync!\n They can be reconciled in several ways:\n - set the `on_schema_change` config to either append_new_columns or sync_all_columns, depending on your situation.\n - Re-run the incremental model with `full_refresh: True` to update the target schema.\n - update the schema manually and re-run the process.\n\n Additional troubleshooting context:\n Source columns not in target: {{ schema_changes_dict['source_not_in_target'] }}\n Target columns not in source: {{ schema_changes_dict['target_not_in_source'] }}\n New column types: {{ schema_changes_dict['new_target_types'] }}\n {% endset %}\n\n {% do exceptions.raise_compiler_error(fail_msg) %}\n\n {# -- unless we ignore, run the sync operation per the config #}\n {% else %}\n\n {% do sync_column_schemas(on_schema_change, target_relation, schema_changes_dict) %}\n\n {% endif %}\n\n {% endif %}\n\n {{ return(schema_changes_dict['source_columns']) }}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.check_for_schema_changes", "macro.dbt.sync_column_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.61823, "supported_languages": null}, "macro.dbt.can_clone_table": {"name": "can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.can_clone_table", "macro_sql": "{% macro can_clone_table() %}\n {{ return(adapter.dispatch('can_clone_table', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__can_clone_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.618465, "supported_languages": null}, "macro.dbt.default__can_clone_table": {"name": "default__can_clone_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/can_clone_table.sql", "original_file_path": "macros/materializations/models/clone/can_clone_table.sql", "unique_id": "macro.dbt.default__can_clone_table", "macro_sql": "{% macro default__can_clone_table() %}\n {{ return(False) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.618575, "supported_languages": null}, "macro.dbt.create_or_replace_clone": {"name": "create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.create_or_replace_clone", "macro_sql": "{% macro create_or_replace_clone(this_relation, defer_relation) %}\n {{ return(adapter.dispatch('create_or_replace_clone', 'dbt')(this_relation, defer_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_or_replace_clone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6188521, "supported_languages": null}, "macro.dbt.default__create_or_replace_clone": {"name": "default__create_or_replace_clone", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/create_or_replace_clone.sql", "original_file_path": "macros/materializations/models/clone/create_or_replace_clone.sql", "unique_id": "macro.dbt.default__create_or_replace_clone", "macro_sql": "{% macro default__create_or_replace_clone(this_relation, defer_relation) %}\n create or replace table {{ this_relation }} clone {{ defer_relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.618978, "supported_languages": null}, "macro.dbt.materialization_clone_default": {"name": "materialization_clone_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/models/clone/clone.sql", "original_file_path": "macros/materializations/models/clone/clone.sql", "unique_id": "macro.dbt.materialization_clone_default", "macro_sql": "{%- materialization clone, default -%}\n\n {%- set relations = {'relations': []} -%}\n\n {%- if not defer_relation -%}\n -- nothing to do\n {{ log(\"No relation found in state manifest for \" ~ model.unique_id, info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set existing_relation = load_cached_relation(this) -%}\n\n {%- if existing_relation and not flags.FULL_REFRESH -%}\n -- noop!\n {{ log(\"Relation \" ~ existing_relation ~ \" already exists\", info=True) }}\n {{ return(relations) }}\n {%- endif -%}\n\n {%- set other_existing_relation = load_cached_relation(defer_relation) -%}\n\n -- If this is a database that can do zero-copy cloning of tables, and the other relation is a table, then this will be a table\n -- Otherwise, this will be a view\n\n {% set can_clone_table = can_clone_table() %}\n\n {%- if other_existing_relation and other_existing_relation.type == 'table' and can_clone_table -%}\n\n {%- set target_relation = this.incorporate(type='table') -%}\n {% if existing_relation is not none and not existing_relation.is_table %}\n {{ log(\"Dropping relation \" ~ existing_relation ~ \" because it is of type \" ~ existing_relation.type) }}\n {{ drop_relation_if_exists(existing_relation) }}\n {% endif %}\n\n -- as a general rule, data platforms that can clone tables can also do atomic 'create or replace'\n {% call statement('main') %}\n {% if target_relation and defer_relation and target_relation == defer_relation %}\n {{ log(\"Target relation and defer relation are the same, skipping clone for relation: \" ~ target_relation) }}\n {% else %}\n {{ create_or_replace_clone(target_relation, defer_relation) }}\n {% endif %}\n\n {% endcall %}\n\n {% set should_revoke = should_revoke(existing_relation, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n {% do persist_docs(target_relation, model) %}\n\n {{ return({'relations': [target_relation]}) }}\n\n {%- else -%}\n\n {%- set target_relation = this.incorporate(type='view') -%}\n\n -- reuse the view materialization\n -- TODO: support actual dispatch for materialization macros\n -- Tracking ticket: https://github.com/dbt-labs/dbt-core/issues/7799\n {% set search_name = \"materialization_view_\" ~ adapter.type() %}\n {% if not search_name in context %}\n {% set search_name = \"materialization_view_default\" %}\n {% endif %}\n {% set materialization_macro = context[search_name] %}\n {% set relations = materialization_macro() %}\n {{ return(relations) }}\n\n {%- endif -%}\n\n{%- endmaterialization -%}", "depends_on": {"macros": ["macro.dbt.load_cached_relation", "macro.dbt.can_clone_table", "macro.dbt.drop_relation_if_exists", "macro.dbt.statement", "macro.dbt.create_or_replace_clone", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.622278, "supported_languages": ["sql"]}, "macro.dbt.materialization_seed_default": {"name": "materialization_seed_default", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/seed.sql", "original_file_path": "macros/materializations/seeds/seed.sql", "unique_id": "macro.dbt.materialization_seed_default", "macro_sql": "{% materialization seed, default %}\n\n {%- set identifier = model['alias'] -%}\n {%- set full_refresh_mode = (should_full_refresh()) -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n\n {%- set exists_as_table = (old_relation is not none and old_relation.is_table) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set grant_config = config.get('grants') -%}\n {%- set agate_table = load_agate_table() -%}\n -- grab current tables grants config for comparison later on\n\n {%- do store_result('agate_table', response='OK', agate_table=agate_table) -%}\n\n {{ run_hooks(pre_hooks, inside_transaction=False) }}\n\n -- `BEGIN` happens here:\n {{ run_hooks(pre_hooks, inside_transaction=True) }}\n\n -- build model\n {% set create_table_sql = \"\" %}\n {% if exists_as_view %}\n {{ exceptions.raise_compiler_error(\"Cannot seed to '{}', it is a view\".format(old_relation)) }}\n {% elif exists_as_table %}\n {% set create_table_sql = reset_csv_table(model, full_refresh_mode, old_relation, agate_table) %}\n {% else %}\n {% set create_table_sql = create_csv_table(model, agate_table) %}\n {% endif %}\n\n {% set code = 'CREATE' if full_refresh_mode else 'INSERT' %}\n {% set rows_affected = (agate_table.rows | length) %}\n {% set sql = load_csv_rows(model, agate_table) %}\n\n {% call noop_statement('main', code ~ ' ' ~ rows_affected, code, rows_affected) %}\n {{ get_csv_sql(create_table_sql, sql) }};\n {% endcall %}\n\n {% set target_relation = this.incorporate(type='table') %}\n\n {% set should_revoke = should_revoke(old_relation, full_refresh_mode) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {% do persist_docs(target_relation, model) %}\n\n {% if full_refresh_mode or not exists_as_table %}\n {% do create_indexes(target_relation) %}\n {% endif %}\n\n {{ run_hooks(post_hooks, inside_transaction=True) }}\n\n -- `COMMIT` happens here\n {{ adapter.commit() }}\n\n {{ run_hooks(post_hooks, inside_transaction=False) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmaterialization %}", "depends_on": {"macros": ["macro.dbt.should_full_refresh", "macro.dbt.run_hooks", "macro.dbt.reset_csv_table", "macro.dbt.create_csv_table", "macro.dbt.load_csv_rows", "macro.dbt.noop_statement", "macro.dbt.get_csv_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants", "macro.dbt.persist_docs", "macro.dbt.create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.625154, "supported_languages": ["sql"]}, "macro.dbt.create_csv_table": {"name": "create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.create_csv_table", "macro_sql": "{% macro create_csv_table(model, agate_table) -%}\n {{ adapter.dispatch('create_csv_table', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6294248, "supported_languages": null}, "macro.dbt.default__create_csv_table": {"name": "default__create_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__create_csv_table", "macro_sql": "{% macro default__create_csv_table(model, agate_table) %}\n {%- set column_override = model['config'].get('column_types', {}) -%}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n\n {% set sql %}\n create table {{ this.render() }} (\n {%- for col_name in agate_table.column_names -%}\n {%- set inferred_type = adapter.convert_type(agate_table, loop.index0) -%}\n {%- set type = column_override.get(col_name, inferred_type) -%}\n {%- set column_name = (col_name | string) -%}\n {{ adapter.quote_seed_column(column_name, quote_seed_column) }} {{ type }} {%- if not loop.last -%}, {%- endif -%}\n {%- endfor -%}\n )\n {% endset %}\n\n {% call statement('_') -%}\n {{ sql }}\n {%- endcall %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6302311, "supported_languages": null}, "macro.dbt.reset_csv_table": {"name": "reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.reset_csv_table", "macro_sql": "{% macro reset_csv_table(model, full_refresh, old_relation, agate_table) -%}\n {{ adapter.dispatch('reset_csv_table', 'dbt')(model, full_refresh, old_relation, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__reset_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.630475, "supported_languages": null}, "macro.dbt.default__reset_csv_table": {"name": "default__reset_csv_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__reset_csv_table", "macro_sql": "{% macro default__reset_csv_table(model, full_refresh, old_relation, agate_table) %}\n {% set sql = \"\" %}\n {% if full_refresh %}\n {{ adapter.drop_relation(old_relation) }}\n {% set sql = create_csv_table(model, agate_table) %}\n {% else %}\n {{ adapter.truncate_relation(old_relation) }}\n {% set sql = \"truncate table \" ~ old_relation %}\n {% endif %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_csv_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.63104, "supported_languages": null}, "macro.dbt.get_csv_sql": {"name": "get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_csv_sql", "macro_sql": "{% macro get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ adapter.dispatch('get_csv_sql', 'dbt')(create_or_truncate_sql, insert_sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_csv_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.63124, "supported_languages": null}, "macro.dbt.default__get_csv_sql": {"name": "default__get_csv_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_csv_sql", "macro_sql": "{% macro default__get_csv_sql(create_or_truncate_sql, insert_sql) %}\n {{ create_or_truncate_sql }};\n -- dbt seed --\n {{ insert_sql }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.63136, "supported_languages": null}, "macro.dbt.get_binding_char": {"name": "get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_binding_char", "macro_sql": "{% macro get_binding_char() -%}\n {{ adapter.dispatch('get_binding_char', 'dbt')() }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.631485, "supported_languages": null}, "macro.dbt.default__get_binding_char": {"name": "default__get_binding_char", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_binding_char", "macro_sql": "{% macro default__get_binding_char() %}\n {{ return('%s') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.631587, "supported_languages": null}, "macro.dbt.get_batch_size": {"name": "get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_batch_size", "macro_sql": "{% macro get_batch_size() -%}\n {{ return(adapter.dispatch('get_batch_size', 'dbt')()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_batch_size"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.631729, "supported_languages": null}, "macro.dbt.default__get_batch_size": {"name": "default__get_batch_size", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__get_batch_size", "macro_sql": "{% macro default__get_batch_size() %}\n {{ return(10000) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.631836, "supported_languages": null}, "macro.dbt.get_seed_column_quoted_csv": {"name": "get_seed_column_quoted_csv", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.get_seed_column_quoted_csv", "macro_sql": "{% macro get_seed_column_quoted_csv(model, column_names) %}\n {%- set quote_seed_column = model['config'].get('quote_columns', None) -%}\n {% set quoted = [] %}\n {% for col in column_names -%}\n {%- do quoted.append(adapter.quote_seed_column(col, quote_seed_column)) -%}\n {%- endfor %}\n\n {%- set dest_cols_csv = quoted | join(', ') -%}\n {{ return(dest_cols_csv) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.632257, "supported_languages": null}, "macro.dbt.load_csv_rows": {"name": "load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.load_csv_rows", "macro_sql": "{% macro load_csv_rows(model, agate_table) -%}\n {{ adapter.dispatch('load_csv_rows', 'dbt')(model, agate_table) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__load_csv_rows"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.632419, "supported_languages": null}, "macro.dbt.default__load_csv_rows": {"name": "default__load_csv_rows", "resource_type": "macro", "package_name": "dbt", "path": "macros/materializations/seeds/helpers.sql", "original_file_path": "macros/materializations/seeds/helpers.sql", "unique_id": "macro.dbt.default__load_csv_rows", "macro_sql": "{% macro default__load_csv_rows(model, agate_table) %}\n\n {% set batch_size = get_batch_size() %}\n\n {% set cols_sql = get_seed_column_quoted_csv(model, agate_table.column_names) %}\n {% set bindings = [] %}\n\n {% set statements = [] %}\n\n {% for chunk in agate_table.rows | batch(batch_size) %}\n {% set bindings = [] %}\n\n {% for row in chunk %}\n {% do bindings.extend(row) %}\n {% endfor %}\n\n {% set sql %}\n insert into {{ this.render() }} ({{ cols_sql }}) values\n {% for row in chunk -%}\n ({%- for column in agate_table.column_names -%}\n {{ get_binding_char() }}\n {%- if not loop.last%},{%- endif %}\n {%- endfor -%})\n {%- if not loop.last%},{%- endif %}\n {%- endfor %}\n {% endset %}\n\n {% do adapter.add_query(sql, bindings=bindings, abridge_sql_log=True) %}\n\n {% if loop.index0 == 0 %}\n {% do statements.append(sql) %}\n {% endif %}\n {% endfor %}\n\n {# Return SQL so we can render it out into the compiled files #}\n {{ return(statements[0]) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_batch_size", "macro.dbt.get_seed_column_quoted_csv", "macro.dbt.get_binding_char"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6335502, "supported_languages": null}, "macro.dbt.generate_alias_name": {"name": "generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.generate_alias_name", "macro_sql": "{% macro generate_alias_name(custom_alias_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_alias_name', 'dbt')(custom_alias_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_alias_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6339421, "supported_languages": null}, "macro.dbt.default__generate_alias_name": {"name": "default__generate_alias_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_alias.sql", "original_file_path": "macros/get_custom_name/get_custom_alias.sql", "unique_id": "macro.dbt.default__generate_alias_name", "macro_sql": "{% macro default__generate_alias_name(custom_alias_name=none, node=none) -%}\n\n {%- if custom_alias_name -%}\n\n {{ custom_alias_name | trim }}\n\n {%- elif node.version -%}\n\n {{ return(node.name ~ \"_v\" ~ (node.version | replace(\".\", \"_\"))) }}\n\n {%- else -%}\n\n {{ node.name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.634277, "supported_languages": null}, "macro.dbt.generate_schema_name": {"name": "generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name", "macro_sql": "{% macro generate_schema_name(custom_schema_name=none, node=none) -%}\n {{ return(adapter.dispatch('generate_schema_name', 'dbt')(custom_schema_name, node)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.634749, "supported_languages": null}, "macro.dbt.default__generate_schema_name": {"name": "default__generate_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.default__generate_schema_name", "macro_sql": "{% macro default__generate_schema_name(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if custom_schema_name is none -%}\n\n {{ default_schema }}\n\n {%- else -%}\n\n {{ default_schema }}_{{ custom_schema_name | trim }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.634978, "supported_languages": null}, "macro.dbt.generate_schema_name_for_env": {"name": "generate_schema_name_for_env", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_schema.sql", "original_file_path": "macros/get_custom_name/get_custom_schema.sql", "unique_id": "macro.dbt.generate_schema_name_for_env", "macro_sql": "{% macro generate_schema_name_for_env(custom_schema_name, node) -%}\n\n {%- set default_schema = target.schema -%}\n {%- if target.name == 'prod' and custom_schema_name is not none -%}\n\n {{ custom_schema_name | trim }}\n\n {%- else -%}\n\n {{ default_schema }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.635234, "supported_languages": null}, "macro.dbt.generate_database_name": {"name": "generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.generate_database_name", "macro_sql": "{% macro generate_database_name(custom_database_name=none, node=none) -%}\n {% do return(adapter.dispatch('generate_database_name', 'dbt')(custom_database_name, node)) %}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_database_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.635585, "supported_languages": null}, "macro.dbt.default__generate_database_name": {"name": "default__generate_database_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/get_custom_name/get_custom_database.sql", "original_file_path": "macros/get_custom_name/get_custom_database.sql", "unique_id": "macro.dbt.default__generate_database_name", "macro_sql": "{% macro default__generate_database_name(custom_database_name=none, node=none) -%}\n {%- set default_database = target.database -%}\n {%- if custom_database_name is none -%}\n\n {{ default_database }}\n\n {%- else -%}\n\n {{ custom_database_name }}\n\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.635809, "supported_languages": null}, "macro.dbt.get_drop_sql": {"name": "get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.get_drop_sql", "macro_sql": "{%- macro get_drop_sql(relation) -%}\n {{- log('Applying DROP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6364312, "supported_languages": null}, "macro.dbt.default__get_drop_sql": {"name": "default__get_drop_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__get_drop_sql", "macro_sql": "{%- macro default__get_drop_sql(relation) -%}\n\n {%- if relation.is_view -%}\n {{ drop_view(relation) }}\n\n {%- elif relation.is_table -%}\n {{ drop_table(relation) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ drop_materialized_view(relation) }}\n\n {%- else -%}\n drop {{ relation.type }} if exists {{ relation }} cascade\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.drop_view", "macro.dbt.drop_table", "macro.dbt.drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.636757, "supported_languages": null}, "macro.dbt.drop_relation": {"name": "drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation", "macro_sql": "{% macro drop_relation(relation) -%}\n {{ return(adapter.dispatch('drop_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__drop_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.63692, "supported_languages": null}, "macro.dbt.default__drop_relation": {"name": "default__drop_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.default__drop_relation", "macro_sql": "{% macro default__drop_relation(relation) -%}\n {% call statement('drop_relation', auto_begin=False) -%}\n {{ get_drop_sql(relation) }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.637098, "supported_languages": null}, "macro.dbt.drop_relation_if_exists": {"name": "drop_relation_if_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop.sql", "original_file_path": "macros/relations/drop.sql", "unique_id": "macro.dbt.drop_relation_if_exists", "macro_sql": "{% macro drop_relation_if_exists(relation) %}\n {% if relation is not none %}\n {{ adapter.drop_relation(relation) }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.637271, "supported_languages": null}, "macro.dbt.get_replace_sql": {"name": "get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.get_replace_sql", "macro_sql": "{% macro get_replace_sql(existing_relation, target_relation, sql) %}\n {{- log('Applying REPLACE to: ' ~ existing_relation) -}}\n {{- adapter.dispatch('get_replace_sql', 'dbt')(existing_relation, target_relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.638006, "supported_languages": null}, "macro.dbt.default__get_replace_sql": {"name": "default__get_replace_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/replace.sql", "original_file_path": "macros/relations/replace.sql", "unique_id": "macro.dbt.default__get_replace_sql", "macro_sql": "{% macro default__get_replace_sql(existing_relation, target_relation, sql) %}\n\n {# /* use a create or replace statement if possible */ #}\n\n {% set is_replaceable = existing_relation.type == target_relation_type and existing_relation.can_be_replaced %}\n\n {% if is_replaceable and existing_relation.is_view %}\n {{ get_replace_view_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_table %}\n {{ get_replace_table_sql(target_relation, sql) }}\n\n {% elif is_replaceable and existing_relation.is_materialized_view %}\n {{ get_replace_materialized_view_sql(target_relation, sql) }}\n\n {# /* a create or replace statement is not possible, so try to stage and/or backup to be safe */ #}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one using a backup */ #}\n {%- elif target_relation.can_be_renamed and existing_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* create target_relation as an intermediate relation, then swap it out with the existing one without using a backup */ #}\n {%- elif target_relation.can_be_renamed -%}\n {{ get_create_intermediate_sql(target_relation, sql) }};\n {{ get_drop_sql(existing_relation) }};\n {{ get_rename_intermediate_sql(target_relation) }}\n\n {# /* create target_relation in place by first backing up the existing relation */ #}\n {%- elif existing_relation.can_be_renamed -%}\n {{ get_create_backup_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }};\n {{ get_drop_backup_sql(existing_relation) }}\n\n {# /* no renaming is allowed, so just drop and create */ #}\n {%- else -%}\n {{ get_drop_sql(existing_relation) }};\n {{ get_create_sql(target_relation, sql) }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_replace_view_sql", "macro.dbt.get_replace_table_sql", "macro.dbt.get_replace_materialized_view_sql", "macro.dbt.get_create_intermediate_sql", "macro.dbt.get_create_backup_sql", "macro.dbt.get_rename_intermediate_sql", "macro.dbt.get_drop_backup_sql", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.639271, "supported_languages": null}, "macro.dbt.get_create_intermediate_sql": {"name": "get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.get_create_intermediate_sql", "macro_sql": "{%- macro get_create_intermediate_sql(relation, sql) -%}\n {{- log('Applying CREATE INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_intermediate_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.639605, "supported_languages": null}, "macro.dbt.default__get_create_intermediate_sql": {"name": "default__get_create_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_intermediate.sql", "original_file_path": "macros/relations/create_intermediate.sql", "unique_id": "macro.dbt.default__get_create_intermediate_sql", "macro_sql": "{%- macro default__get_create_intermediate_sql(relation, sql) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n -- drop any pre-existing intermediate\n {{ get_drop_sql(intermediate_relation) }};\n\n {{ get_create_sql(intermediate_relation, sql) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.639827, "supported_languages": null}, "macro.dbt.get_drop_backup_sql": {"name": "get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.get_drop_backup_sql", "macro_sql": "{%- macro get_drop_backup_sql(relation) -%}\n {{- log('Applying DROP BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_drop_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_drop_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.640105, "supported_languages": null}, "macro.dbt.default__get_drop_backup_sql": {"name": "default__get_drop_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/drop_backup.sql", "original_file_path": "macros/relations/drop_backup.sql", "unique_id": "macro.dbt.default__get_drop_backup_sql", "macro_sql": "{%- macro default__get_drop_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n {{ get_drop_sql(backup_relation) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.640288, "supported_languages": null}, "macro.dbt.get_rename_sql": {"name": "get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.get_rename_sql", "macro_sql": "{%- macro get_rename_sql(relation, new_name) -%}\n {{- log('Applying RENAME to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_sql', 'dbt')(relation, new_name) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.641144, "supported_languages": null}, "macro.dbt.default__get_rename_sql": {"name": "default__get_rename_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__get_rename_sql", "macro_sql": "{%- macro default__get_rename_sql(relation, new_name) -%}\n\n {%- if relation.is_view -%}\n {{ get_rename_view_sql(relation, new_name) }}\n\n {%- elif relation.is_table -%}\n {{ get_rename_table_sql(relation, new_name) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_rename_materialized_view_sql(relation, new_name) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_rename_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.get_rename_view_sql", "macro.dbt.get_rename_table_sql", "macro.dbt.get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.641597, "supported_languages": null}, "macro.dbt.rename_relation": {"name": "rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.rename_relation", "macro_sql": "{% macro rename_relation(from_relation, to_relation) -%}\n {{ return(adapter.dispatch('rename_relation', 'dbt')(from_relation, to_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__rename_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.641796, "supported_languages": null}, "macro.dbt.default__rename_relation": {"name": "default__rename_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename.sql", "original_file_path": "macros/relations/rename.sql", "unique_id": "macro.dbt.default__rename_relation", "macro_sql": "{% macro default__rename_relation(from_relation, to_relation) -%}\n {% set target_name = adapter.quote_as_configured(to_relation.identifier, 'identifier') %}\n {% call statement('rename_relation') -%}\n alter table {{ from_relation }} rename to {{ target_name }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.642071, "supported_languages": null}, "macro.dbt.get_create_backup_sql": {"name": "get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.get_create_backup_sql", "macro_sql": "{%- macro get_create_backup_sql(relation) -%}\n {{- log('Applying CREATE BACKUP to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_backup_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_backup_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.642427, "supported_languages": null}, "macro.dbt.default__get_create_backup_sql": {"name": "default__get_create_backup_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create_backup.sql", "original_file_path": "macros/relations/create_backup.sql", "unique_id": "macro.dbt.default__get_create_backup_sql", "macro_sql": "{%- macro default__get_create_backup_sql(relation) -%}\n\n -- get the standard backup name\n {% set backup_relation = make_backup_relation(relation, relation.type) %}\n\n -- drop any pre-existing backup\n {{ get_drop_sql(backup_relation) }};\n\n {{ get_rename_sql(relation, backup_relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_backup_relation", "macro.dbt.get_drop_sql", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.642694, "supported_languages": null}, "macro.dbt.get_create_sql": {"name": "get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.get_create_sql", "macro_sql": "{%- macro get_create_sql(relation, sql) -%}\n {{- log('Applying CREATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_create_sql', 'dbt')(relation, sql) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_create_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6431448, "supported_languages": null}, "macro.dbt.default__get_create_sql": {"name": "default__get_create_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/create.sql", "original_file_path": "macros/relations/create.sql", "unique_id": "macro.dbt.default__get_create_sql", "macro_sql": "{%- macro default__get_create_sql(relation, sql) -%}\n\n {%- if relation.is_view -%}\n {{ get_create_view_as_sql(relation, sql) }}\n\n {%- elif relation.is_table -%}\n {{ get_create_table_as_sql(False, relation, sql) }}\n\n {%- elif relation.is_materialized_view -%}\n {{ get_create_materialized_view_as_sql(relation, sql) }}\n\n {%- else -%}\n {{- exceptions.raise_compiler_error(\"`get_create_sql` has not been implemented for: \" ~ relation.type ) -}}\n\n {%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.get_create_view_as_sql", "macro.dbt.get_create_table_as_sql", "macro.dbt.get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.643593, "supported_languages": null}, "macro.dbt.get_rename_intermediate_sql": {"name": "get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.get_rename_intermediate_sql", "macro_sql": "{%- macro get_rename_intermediate_sql(relation) -%}\n {{- log('Applying RENAME INTERMEDIATE to: ' ~ relation) -}}\n {{- adapter.dispatch('get_rename_intermediate_sql', 'dbt')(relation) -}}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": ["macro.dbt.default__get_rename_intermediate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.643906, "supported_languages": null}, "macro.dbt.default__get_rename_intermediate_sql": {"name": "default__get_rename_intermediate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/rename_intermediate.sql", "original_file_path": "macros/relations/rename_intermediate.sql", "unique_id": "macro.dbt.default__get_rename_intermediate_sql", "macro_sql": "{%- macro default__get_rename_intermediate_sql(relation) -%}\n\n -- get the standard intermediate name\n {% set intermediate_relation = make_intermediate_relation(relation) %}\n\n {{ get_rename_sql(intermediate_relation, relation.identifier) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.make_intermediate_relation", "macro.dbt.get_rename_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.644107, "supported_languages": null}, "macro.dbt.drop_materialized_view": {"name": "drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.drop_materialized_view", "macro_sql": "{% macro drop_materialized_view(relation) -%}\n {{ return(adapter.dispatch('drop_materialized_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.644371, "supported_languages": null}, "macro.dbt.default__drop_materialized_view": {"name": "default__drop_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/drop.sql", "original_file_path": "macros/relations/materialized_view/drop.sql", "unique_id": "macro.dbt.default__drop_materialized_view", "macro_sql": "{% macro default__drop_materialized_view(relation) -%}\n drop materialized view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.644487, "supported_languages": null}, "macro.dbt.get_replace_materialized_view_sql": {"name": "get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.get_replace_materialized_view_sql", "macro_sql": "{% macro get_replace_materialized_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_materialized_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_replace_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.644748, "supported_languages": null}, "macro.dbt.default__get_replace_materialized_view_sql": {"name": "default__get_replace_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/replace.sql", "original_file_path": "macros/relations/materialized_view/replace.sql", "unique_id": "macro.dbt.default__get_replace_materialized_view_sql", "macro_sql": "{% macro default__get_replace_materialized_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.644908, "supported_languages": null}, "macro.dbt.refresh_materialized_view": {"name": "refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.refresh_materialized_view", "macro_sql": "{% macro refresh_materialized_view(relation) %}\n {{- log('Applying REFRESH to: ' ~ relation) -}}\n {{- adapter.dispatch('refresh_materialized_view', 'dbt')(relation) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__refresh_materialized_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.645215, "supported_languages": null}, "macro.dbt.default__refresh_materialized_view": {"name": "default__refresh_materialized_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/refresh.sql", "original_file_path": "macros/relations/materialized_view/refresh.sql", "unique_id": "macro.dbt.default__refresh_materialized_view", "macro_sql": "{% macro default__refresh_materialized_view(relation) %}\n {{ exceptions.raise_compiler_error(\"`refresh_materialized_view` has not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6454241, "supported_languages": null}, "macro.dbt.get_rename_materialized_view_sql": {"name": "get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.get_rename_materialized_view_sql", "macro_sql": "{% macro get_rename_materialized_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_materialized_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_materialized_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.645678, "supported_languages": null}, "macro.dbt.default__get_rename_materialized_view_sql": {"name": "default__get_rename_materialized_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/rename.sql", "original_file_path": "macros/relations/materialized_view/rename.sql", "unique_id": "macro.dbt.default__get_rename_materialized_view_sql", "macro_sql": "{% macro default__get_rename_materialized_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_materialized_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.645816, "supported_languages": null}, "macro.dbt.get_alter_materialized_view_as_sql": {"name": "get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_alter_materialized_view_as_sql", "macro_sql": "{% macro get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{- log('Applying ALTER to: ' ~ relation) -}}\n {{- adapter.dispatch('get_alter_materialized_view_as_sql', 'dbt')(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n ) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_alter_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.646386, "supported_languages": null}, "macro.dbt.default__get_alter_materialized_view_as_sql": {"name": "default__get_alter_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_alter_materialized_view_as_sql", "macro_sql": "{% macro default__get_alter_materialized_view_as_sql(\n relation,\n configuration_changes,\n sql,\n existing_relation,\n backup_relation,\n intermediate_relation\n) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.646564, "supported_languages": null}, "macro.dbt.get_materialized_view_configuration_changes": {"name": "get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.get_materialized_view_configuration_changes", "macro_sql": "{% macro get_materialized_view_configuration_changes(existing_relation, new_config) %}\n /* {#\n It's recommended that configuration changes be formatted as follows:\n {\"\": [{\"action\": \"\", \"context\": ...}]}\n\n For example:\n {\n \"indexes\": [\n {\"action\": \"drop\", \"context\": \"index_abc\"},\n {\"action\": \"create\", \"context\": {\"columns\": [\"column_1\", \"column_2\"], \"type\": \"hash\", \"unique\": True}},\n ],\n }\n\n Either way, `get_materialized_view_configuration_changes` needs to align with `get_alter_materialized_view_as_sql`.\n #} */\n {{- log('Determining configuration changes on: ' ~ existing_relation) -}}\n {%- do return(adapter.dispatch('get_materialized_view_configuration_changes', 'dbt')(existing_relation, new_config)) -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_materialized_view_configuration_changes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6468391, "supported_languages": null}, "macro.dbt.default__get_materialized_view_configuration_changes": {"name": "default__get_materialized_view_configuration_changes", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/alter.sql", "original_file_path": "macros/relations/materialized_view/alter.sql", "unique_id": "macro.dbt.default__get_materialized_view_configuration_changes", "macro_sql": "{% macro default__get_materialized_view_configuration_changes(existing_relation, new_config) %}\n {{ exceptions.raise_compiler_error(\"Materialized views have not been implemented for this adapter.\") }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6469781, "supported_languages": null}, "macro.dbt.get_create_materialized_view_as_sql": {"name": "get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.get_create_materialized_view_as_sql", "macro_sql": "{% macro get_create_materialized_view_as_sql(relation, sql) -%}\n {{- adapter.dispatch('get_create_materialized_view_as_sql', 'dbt')(relation, sql) -}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_materialized_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.647222, "supported_languages": null}, "macro.dbt.default__get_create_materialized_view_as_sql": {"name": "default__get_create_materialized_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/materialized_view/create.sql", "original_file_path": "macros/relations/materialized_view/create.sql", "unique_id": "macro.dbt.default__get_create_materialized_view_as_sql", "macro_sql": "{% macro default__get_create_materialized_view_as_sql(relation, sql) -%}\n {{ exceptions.raise_compiler_error(\n \"`get_create_materialized_view_as_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.647352, "supported_languages": null}, "macro.dbt.get_table_columns_and_constraints": {"name": "get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_table_columns_and_constraints", "macro_sql": "{%- macro get_table_columns_and_constraints() -%}\n {{ adapter.dispatch('get_table_columns_and_constraints', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.648273, "supported_languages": null}, "macro.dbt.default__get_table_columns_and_constraints": {"name": "default__get_table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_table_columns_and_constraints", "macro_sql": "{% macro default__get_table_columns_and_constraints() -%}\n {{ return(table_columns_and_constraints()) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.table_columns_and_constraints"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.648384, "supported_languages": null}, "macro.dbt.table_columns_and_constraints": {"name": "table_columns_and_constraints", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.table_columns_and_constraints", "macro_sql": "{% macro table_columns_and_constraints() %}\n {# loop through user_provided_columns to create DDL with data types and constraints #}\n {%- set raw_column_constraints = adapter.render_raw_columns_constraints(raw_columns=model['columns']) -%}\n {%- set raw_model_constraints = adapter.render_raw_model_constraints(raw_constraints=model['constraints']) -%}\n (\n {% for c in raw_column_constraints -%}\n {{ c }}{{ \",\" if not loop.last or raw_model_constraints }}\n {% endfor %}\n {% for c in raw_model_constraints -%}\n {{ c }}{{ \",\" if not loop.last }}\n {% endfor -%}\n )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6489012, "supported_languages": null}, "macro.dbt.get_assert_columns_equivalent": {"name": "get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.get_assert_columns_equivalent", "macro_sql": "\n\n{%- macro get_assert_columns_equivalent(sql) -%}\n {{ adapter.dispatch('get_assert_columns_equivalent', 'dbt')(sql) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.649057, "supported_languages": null}, "macro.dbt.default__get_assert_columns_equivalent": {"name": "default__get_assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__get_assert_columns_equivalent", "macro_sql": "{% macro default__get_assert_columns_equivalent(sql) -%}\n {{ return(assert_columns_equivalent(sql)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.649179, "supported_languages": null}, "macro.dbt.assert_columns_equivalent": {"name": "assert_columns_equivalent", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.assert_columns_equivalent", "macro_sql": "{% macro assert_columns_equivalent(sql) %}\n\n {#-- First ensure the user has defined 'columns' in yaml specification --#}\n {%- set user_defined_columns = model['columns'] -%}\n {%- if not user_defined_columns -%}\n {{ exceptions.raise_contract_error([], []) }}\n {%- endif -%}\n\n {#-- Obtain the column schema provided by sql file. #}\n {%- set sql_file_provided_columns = get_column_schema_from_query(sql, config.get('sql_header', none)) -%}\n {#--Obtain the column schema provided by the schema file by generating an 'empty schema' query from the model's columns. #}\n {%- set schema_file_provided_columns = get_column_schema_from_query(get_empty_schema_sql(user_defined_columns)) -%}\n\n {#-- create dictionaries with name and formatted data type and strings for exception #}\n {%- set sql_columns = format_columns(sql_file_provided_columns) -%}\n {%- set yaml_columns = format_columns(schema_file_provided_columns) -%}\n\n {%- if sql_columns|length != yaml_columns|length -%}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n\n {%- for sql_col in sql_columns -%}\n {%- set yaml_col = [] -%}\n {%- for this_col in yaml_columns -%}\n {%- if this_col['name'] == sql_col['name'] -%}\n {%- do yaml_col.append(this_col) -%}\n {%- break -%}\n {%- endif -%}\n {%- endfor -%}\n {%- if not yaml_col -%}\n {#-- Column with name not found in yaml #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- if sql_col['formatted'] != yaml_col[0]['formatted'] -%}\n {#-- Column data types don't match #}\n {%- do exceptions.raise_contract_error(yaml_columns, sql_columns) -%}\n {%- endif -%}\n {%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_column_schema_from_query", "macro.dbt.get_empty_schema_sql", "macro.dbt.format_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.650442, "supported_languages": null}, "macro.dbt.format_columns": {"name": "format_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.format_columns", "macro_sql": "{% macro format_columns(columns) %}\n {% set formatted_columns = [] %}\n {% for column in columns %}\n {%- set formatted_column = adapter.dispatch('format_column', 'dbt')(column) -%}\n {%- do formatted_columns.append(formatted_column) -%}\n {% endfor %}\n {{ return(formatted_columns) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__format_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.650801, "supported_languages": null}, "macro.dbt.default__format_column": {"name": "default__format_column", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/column/columns_spec_ddl.sql", "original_file_path": "macros/relations/column/columns_spec_ddl.sql", "unique_id": "macro.dbt.default__format_column", "macro_sql": "{% macro default__format_column(column) -%}\n {% set data_type = column.dtype %}\n {% set formatted = column.column.lower() ~ \" \" ~ data_type %}\n {{ return({'name': column.name, 'data_type': data_type, 'formatted': formatted}) }}\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.651113, "supported_languages": null}, "macro.dbt.drop_table": {"name": "drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.drop_table", "macro_sql": "{% macro drop_table(relation) -%}\n {{ return(adapter.dispatch('drop_table', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.651364, "supported_languages": null}, "macro.dbt.default__drop_table": {"name": "default__drop_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/drop.sql", "original_file_path": "macros/relations/table/drop.sql", "unique_id": "macro.dbt.default__drop_table", "macro_sql": "{% macro default__drop_table(relation) -%}\n drop table if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6514618, "supported_languages": null}, "macro.dbt.get_replace_table_sql": {"name": "get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.get_replace_table_sql", "macro_sql": "{% macro get_replace_table_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_table_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6517172, "supported_languages": null}, "macro.dbt.default__get_replace_table_sql": {"name": "default__get_replace_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/replace.sql", "original_file_path": "macros/relations/table/replace.sql", "unique_id": "macro.dbt.default__get_replace_table_sql", "macro_sql": "{% macro default__get_replace_table_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6518579, "supported_languages": null}, "macro.dbt.get_rename_table_sql": {"name": "get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.get_rename_table_sql", "macro_sql": "{% macro get_rename_table_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_table_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_table_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.652119, "supported_languages": null}, "macro.dbt.default__get_rename_table_sql": {"name": "default__get_rename_table_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/rename.sql", "original_file_path": "macros/relations/table/rename.sql", "unique_id": "macro.dbt.default__get_rename_table_sql", "macro_sql": "{% macro default__get_rename_table_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_table_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.652259, "supported_languages": null}, "macro.dbt.get_create_table_as_sql": {"name": "get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_create_table_as_sql", "macro_sql": "{% macro get_create_table_as_sql(temporary, relation, sql) -%}\n {{ adapter.dispatch('get_create_table_as_sql', 'dbt')(temporary, relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_table_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.653054, "supported_languages": null}, "macro.dbt.default__get_create_table_as_sql": {"name": "default__get_create_table_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_create_table_as_sql", "macro_sql": "{% macro default__get_create_table_as_sql(temporary, relation, sql) -%}\n {{ return(create_table_as(temporary, relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.653227, "supported_languages": null}, "macro.dbt.create_table_as": {"name": "create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.create_table_as", "macro_sql": "{% macro create_table_as(temporary, relation, compiled_code, language='sql') -%}\n {# backward compatibility for create_table_as that does not support language #}\n {% if language == \"sql\" %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code)}}\n {% else %}\n {{ adapter.dispatch('create_table_as', 'dbt')(temporary, relation, compiled_code, language) }}\n {% endif %}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_table_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.653621, "supported_languages": null}, "macro.dbt.default__create_table_as": {"name": "default__create_table_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__create_table_as", "macro_sql": "{% macro default__create_table_as(temporary, relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n\n create {% if temporary: -%}temporary{%- endif %} table\n {{ relation.include(database=(not temporary), schema=(not temporary)) }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced and (not temporary) %}\n {{ get_assert_columns_equivalent(sql) }}\n {{ get_table_columns_and_constraints() }}\n {%- set sql = get_select_subquery(sql) %}\n {% endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent", "macro.dbt.get_table_columns_and_constraints", "macro.dbt.get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6542878, "supported_languages": null}, "macro.dbt.default__get_column_names": {"name": "default__get_column_names", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_column_names", "macro_sql": "{% macro default__get_column_names() %}\n {#- loop through user_provided_columns to get column names -#}\n {%- set user_provided_columns = model['columns'] -%}\n {%- for i in user_provided_columns %}\n {%- set col = user_provided_columns[i] -%}\n {%- set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] -%}\n {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6547132, "supported_languages": null}, "macro.dbt.get_select_subquery": {"name": "get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.get_select_subquery", "macro_sql": "{% macro get_select_subquery(sql) %}\n {{ return(adapter.dispatch('get_select_subquery', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_select_subquery"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.65489, "supported_languages": null}, "macro.dbt.default__get_select_subquery": {"name": "default__get_select_subquery", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/table/create.sql", "original_file_path": "macros/relations/table/create.sql", "unique_id": "macro.dbt.default__get_select_subquery", "macro_sql": "{% macro default__get_select_subquery(sql) %}\n select {{ adapter.dispatch('get_column_names', 'dbt')() }}\n from (\n {{ sql }}\n ) as model_subq\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_column_names"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.655068, "supported_languages": null}, "macro.dbt.drop_view": {"name": "drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.drop_view", "macro_sql": "{% macro drop_view(relation) -%}\n {{ return(adapter.dispatch('drop_view', 'dbt')(relation)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_view"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.655327, "supported_languages": null}, "macro.dbt.default__drop_view": {"name": "default__drop_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/drop.sql", "original_file_path": "macros/relations/view/drop.sql", "unique_id": "macro.dbt.default__drop_view", "macro_sql": "{% macro default__drop_view(relation) -%}\n drop view if exists {{ relation }} cascade\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6554298, "supported_languages": null}, "macro.dbt.get_replace_view_sql": {"name": "get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.get_replace_view_sql", "macro_sql": "{% macro get_replace_view_sql(relation, sql) %}\n {{- adapter.dispatch('get_replace_view_sql', 'dbt')(relation, sql) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_replace_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.656215, "supported_languages": null}, "macro.dbt.default__get_replace_view_sql": {"name": "default__get_replace_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__get_replace_view_sql", "macro_sql": "{% macro default__get_replace_view_sql(relation, sql) %}\n {{ exceptions.raise_compiler_error(\n \"`get_replace_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.656354, "supported_languages": null}, "macro.dbt.create_or_replace_view": {"name": "create_or_replace_view", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.create_or_replace_view", "macro_sql": "{% macro create_or_replace_view() %}\n {%- set identifier = model['alias'] -%}\n\n {%- set old_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) -%}\n {%- set exists_as_view = (old_relation is not none and old_relation.is_view) -%}\n\n {%- set target_relation = api.Relation.create(\n identifier=identifier, schema=schema, database=database,\n type='view') -%}\n {% set grant_config = config.get('grants') %}\n\n {{ run_hooks(pre_hooks) }}\n\n -- If there's a table with the same name and we weren't told to full refresh,\n -- that's an error. If we were told to full refresh, drop it. This behavior differs\n -- for Snowflake and BigQuery, so multiple dispatch is used.\n {%- if old_relation is not none and old_relation.is_table -%}\n {{ handle_existing_table(should_full_refresh(), old_relation) }}\n {%- endif -%}\n\n -- build model\n {% call statement('main') -%}\n {{ get_create_view_as_sql(target_relation, sql) }}\n {%- endcall %}\n\n {% set should_revoke = should_revoke(exists_as_view, full_refresh_mode=True) %}\n {% do apply_grants(target_relation, grant_config, should_revoke=should_revoke) %}\n\n {{ run_hooks(post_hooks) }}\n\n {{ return({'relations': [target_relation]}) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_hooks", "macro.dbt.handle_existing_table", "macro.dbt.should_full_refresh", "macro.dbt.statement", "macro.dbt.get_create_view_as_sql", "macro.dbt.should_revoke", "macro.dbt.apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6574452, "supported_languages": null}, "macro.dbt.handle_existing_table": {"name": "handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.handle_existing_table", "macro_sql": "{% macro handle_existing_table(full_refresh, old_relation) %}\n {{ adapter.dispatch('handle_existing_table', 'dbt')(full_refresh, old_relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__handle_existing_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.657629, "supported_languages": null}, "macro.dbt.default__handle_existing_table": {"name": "default__handle_existing_table", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/replace.sql", "original_file_path": "macros/relations/view/replace.sql", "unique_id": "macro.dbt.default__handle_existing_table", "macro_sql": "{% macro default__handle_existing_table(full_refresh, old_relation) %}\n {{ log(\"Dropping relation \" ~ old_relation ~ \" because it is of type \" ~ old_relation.type) }}\n {{ adapter.drop_relation(old_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.657828, "supported_languages": null}, "macro.dbt.get_rename_view_sql": {"name": "get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.get_rename_view_sql", "macro_sql": "{% macro get_rename_view_sql(relation, new_name) %}\n {{- adapter.dispatch('get_rename_view_sql', 'dbt')(relation, new_name) -}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_rename_view_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6580758, "supported_languages": null}, "macro.dbt.default__get_rename_view_sql": {"name": "default__get_rename_view_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/rename.sql", "original_file_path": "macros/relations/view/rename.sql", "unique_id": "macro.dbt.default__get_rename_view_sql", "macro_sql": "{% macro default__get_rename_view_sql(relation, new_name) %}\n {{ exceptions.raise_compiler_error(\n \"`get_rename_view_sql` has not been implemented for this adapter.\"\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.658209, "supported_languages": null}, "macro.dbt.get_create_view_as_sql": {"name": "get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.get_create_view_as_sql", "macro_sql": "{% macro get_create_view_as_sql(relation, sql) -%}\n {{ adapter.dispatch('get_create_view_as_sql', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_create_view_as_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.658587, "supported_languages": null}, "macro.dbt.default__get_create_view_as_sql": {"name": "default__get_create_view_as_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__get_create_view_as_sql", "macro_sql": "{% macro default__get_create_view_as_sql(relation, sql) -%}\n {{ return(create_view_as(relation, sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.658738, "supported_languages": null}, "macro.dbt.create_view_as": {"name": "create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.create_view_as", "macro_sql": "{% macro create_view_as(relation, sql) -%}\n {{ adapter.dispatch('create_view_as', 'dbt')(relation, sql) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_view_as"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.658904, "supported_languages": null}, "macro.dbt.default__create_view_as": {"name": "default__create_view_as", "resource_type": "macro", "package_name": "dbt", "path": "macros/relations/view/create.sql", "original_file_path": "macros/relations/view/create.sql", "unique_id": "macro.dbt.default__create_view_as", "macro_sql": "{% macro default__create_view_as(relation, sql) -%}\n {%- set sql_header = config.get('sql_header', none) -%}\n\n {{ sql_header if sql_header is not none }}\n create view {{ relation }}\n {% set contract_config = config.get('contract') %}\n {% if contract_config.enforced %}\n {{ get_assert_columns_equivalent(sql) }}\n {%- endif %}\n as (\n {{ sql }}\n );\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.get_assert_columns_equivalent"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6592908, "supported_languages": null}, "macro.dbt.default__test_relationships": {"name": "default__test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/relationships.sql", "original_file_path": "macros/generic_test_sql/relationships.sql", "unique_id": "macro.dbt.default__test_relationships", "macro_sql": "{% macro default__test_relationships(model, column_name, to, field) %}\n\nwith child as (\n select {{ column_name }} as from_field\n from {{ model }}\n where {{ column_name }} is not null\n),\n\nparent as (\n select {{ field }} as to_field\n from {{ to }}\n)\n\nselect\n from_field\n\nfrom child\nleft join parent\n on child.from_field = parent.to_field\n\nwhere parent.to_field is null\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.659623, "supported_languages": null}, "macro.dbt.default__test_not_null": {"name": "default__test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/not_null.sql", "original_file_path": "macros/generic_test_sql/not_null.sql", "unique_id": "macro.dbt.default__test_not_null", "macro_sql": "{% macro default__test_not_null(model, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else column_name %}\n\nselect {{ column_list }}\nfrom {{ model }}\nwhere {{ column_name }} is null\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6598961, "supported_languages": null}, "macro.dbt.default__test_unique": {"name": "default__test_unique", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/unique.sql", "original_file_path": "macros/generic_test_sql/unique.sql", "unique_id": "macro.dbt.default__test_unique", "macro_sql": "{% macro default__test_unique(model, column_name) %}\n\nselect\n {{ column_name }} as unique_field,\n count(*) as n_records\n\nfrom {{ model }}\nwhere {{ column_name }} is not null\ngroup by {{ column_name }}\nhaving count(*) > 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.660125, "supported_languages": null}, "macro.dbt.default__test_accepted_values": {"name": "default__test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "macros/generic_test_sql/accepted_values.sql", "original_file_path": "macros/generic_test_sql/accepted_values.sql", "unique_id": "macro.dbt.default__test_accepted_values", "macro_sql": "{% macro default__test_accepted_values(model, column_name, values, quote=True) %}\n\nwith all_values as (\n\n select\n {{ column_name }} as value_field,\n count(*) as n_records\n\n from {{ model }}\n group by {{ column_name }}\n\n)\n\nselect *\nfrom all_values\nwhere value_field not in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n)\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.660655, "supported_languages": null}, "macro.dbt.statement": {"name": "statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.statement", "macro_sql": "\n{%- macro statement(name=None, fetch_result=False, auto_begin=True, language='sql') -%}\n {%- if execute: -%}\n {%- set compiled_code = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime {} for node \"{}\"'.format(language, model['unique_id'])) }}\n {{ write(compiled_code) }}\n {%- endif -%}\n {%- if language == 'sql'-%}\n {%- set res, table = adapter.execute(compiled_code, auto_begin=auto_begin, fetch=fetch_result) -%}\n {%- elif language == 'python' -%}\n {%- set res = submit_python_job(model, compiled_code) -%}\n {#-- TODO: What should table be for python models? --#}\n {%- set table = None -%}\n {%- else -%}\n {% do exceptions.raise_compiler_error(\"statement macro didn't get supported language\") %}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_result(name, response=res, agate_table=table) }}\n {%- endif -%}\n\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.662014, "supported_languages": null}, "macro.dbt.noop_statement": {"name": "noop_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.noop_statement", "macro_sql": "{% macro noop_statement(name=None, message=None, code=None, rows_affected=None, res=None) -%}\n {%- set sql = caller() -%}\n\n {%- if name == 'main' -%}\n {{ log('Writing runtime SQL for node \"{}\"'.format(model['unique_id'])) }}\n {{ write(sql) }}\n {%- endif -%}\n\n {%- if name is not none -%}\n {{ store_raw_result(name, message=message, code=code, rows_affected=rows_affected, agate_table=res) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6625369, "supported_languages": null}, "macro.dbt.run_query": {"name": "run_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/statement.sql", "original_file_path": "macros/etc/statement.sql", "unique_id": "macro.dbt.run_query", "macro_sql": "{% macro run_query(sql) %}\n {% call statement(\"run_query_statement\", fetch_result=true, auto_begin=false) %}\n {{ sql }}\n {% endcall %}\n\n {% do return(load_result(\"run_query_statement\").table) %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.662805, "supported_languages": null}, "macro.dbt.convert_datetime": {"name": "convert_datetime", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.convert_datetime", "macro_sql": "{% macro convert_datetime(date_str, date_fmt) %}\n\n {% set error_msg -%}\n The provided partition date '{{ date_str }}' does not match the expected format '{{ date_fmt }}'\n {%- endset %}\n\n {% set res = try_or_compiler_error(error_msg, modules.datetime.datetime.strptime, date_str.strip(), date_fmt) %}\n {{ return(res) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.664503, "supported_languages": null}, "macro.dbt.dates_in_range": {"name": "dates_in_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.dates_in_range", "macro_sql": "{% macro dates_in_range(start_date_str, end_date_str=none, in_fmt=\"%Y%m%d\", out_fmt=\"%Y%m%d\") %}\n {% set end_date_str = start_date_str if end_date_str is none else end_date_str %}\n\n {% set start_date = convert_datetime(start_date_str, in_fmt) %}\n {% set end_date = convert_datetime(end_date_str, in_fmt) %}\n\n {% set day_count = (end_date - start_date).days %}\n {% if day_count < 0 %}\n {% set msg -%}\n Partition start date is after the end date ({{ start_date }}, {{ end_date }})\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg, model) }}\n {% endif %}\n\n {% set date_list = [] %}\n {% for i in range(0, day_count + 1) %}\n {% set the_date = (modules.datetime.timedelta(days=i) + start_date) %}\n {% if not out_fmt %}\n {% set _ = date_list.append(the_date) %}\n {% else %}\n {% set _ = date_list.append(the_date.strftime(out_fmt)) %}\n {% endif %}\n {% endfor %}\n\n {{ return(date_list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.convert_datetime"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.665689, "supported_languages": null}, "macro.dbt.partition_range": {"name": "partition_range", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.partition_range", "macro_sql": "{% macro partition_range(raw_partition_date, date_fmt='%Y%m%d') %}\n {% set partition_range = (raw_partition_date | string).split(\",\") %}\n\n {% if (partition_range | length) == 1 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = none %}\n {% elif (partition_range | length) == 2 %}\n {% set start_date = partition_range[0] %}\n {% set end_date = partition_range[1] %}\n {% else %}\n {{ exceptions.raise_compiler_error(\"Invalid partition time. Expected format: {Start Date}[,{End Date}]. Got: \" ~ raw_partition_date) }}\n {% endif %}\n\n {{ return(dates_in_range(start_date, end_date, in_fmt=date_fmt)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dates_in_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.666361, "supported_languages": null}, "macro.dbt.py_current_timestring": {"name": "py_current_timestring", "resource_type": "macro", "package_name": "dbt", "path": "macros/etc/datetime.sql", "original_file_path": "macros/etc/datetime.sql", "unique_id": "macro.dbt.py_current_timestring", "macro_sql": "{% macro py_current_timestring() %}\n {% set dt = modules.datetime.datetime.now() %}\n {% do return(dt.strftime(\"%Y%m%d%H%M%S%f\")) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6665869, "supported_languages": null}, "macro.dbt.except": {"name": "except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.except", "macro_sql": "{% macro except() %}\n {{ return(adapter.dispatch('except', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.666821, "supported_languages": null}, "macro.dbt.default__except": {"name": "default__except", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/except.sql", "original_file_path": "macros/utils/except.sql", "unique_id": "macro.dbt.default__except", "macro_sql": "{% macro default__except() %}\n\n except\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6668959, "supported_languages": null}, "macro.dbt.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.667653, "supported_languages": null}, "macro.dbt.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.668221, "supported_languages": null}, "macro.dbt.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.668431, "supported_languages": null}, "macro.dbt.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_spine.sql", "original_file_path": "macros/utils/date_spine.sql", "unique_id": "macro.dbt.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n {# call as follows:\n\n date_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n ) #}\n\n\n with rawdata as (\n\n {{dbt.generate_series(\n dbt.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.generate_series", "macro.dbt.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.668757, "supported_languages": null}, "macro.dbt.replace": {"name": "replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.replace", "macro_sql": "{% macro replace(field, old_chars, new_chars) -%}\n {{ return(adapter.dispatch('replace', 'dbt') (field, old_chars, new_chars)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__replace"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.669051, "supported_languages": null}, "macro.dbt.default__replace": {"name": "default__replace", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/replace.sql", "original_file_path": "macros/utils/replace.sql", "unique_id": "macro.dbt.default__replace", "macro_sql": "{% macro default__replace(field, old_chars, new_chars) %}\n\n replace(\n {{ field }},\n {{ old_chars }},\n {{ new_chars }}\n )\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.669194, "supported_languages": null}, "macro.dbt.concat": {"name": "concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.concat", "macro_sql": "{% macro concat(fields) -%}\n {{ return(adapter.dispatch('concat', 'dbt')(fields)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.66941, "supported_languages": null}, "macro.dbt.default__concat": {"name": "default__concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/concat.sql", "original_file_path": "macros/utils/concat.sql", "unique_id": "macro.dbt.default__concat", "macro_sql": "{% macro default__concat(fields) -%}\n {{ fields|join(' || ') }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6695218, "supported_languages": null}, "macro.dbt.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.670244, "supported_languages": null}, "macro.dbt.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.670611, "supported_languages": null}, "macro.dbt.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.670777, "supported_languages": null}, "macro.dbt.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/generate_series.sql", "original_file_path": "macros/utils/generate_series.sql", "unique_id": "macro.dbt.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.671223, "supported_languages": null}, "macro.dbt.length": {"name": "length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.length", "macro_sql": "{% macro length(expression) -%}\n {{ return(adapter.dispatch('length', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__length"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6714518, "supported_languages": null}, "macro.dbt.default__length": {"name": "default__length", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/length.sql", "original_file_path": "macros/utils/length.sql", "unique_id": "macro.dbt.default__length", "macro_sql": "{% macro default__length(expression) %}\n\n length(\n {{ expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6715488, "supported_languages": null}, "macro.dbt.dateadd": {"name": "dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.dateadd", "macro_sql": "{% macro dateadd(datepart, interval, from_date_or_timestamp) %}\n {{ return(adapter.dispatch('dateadd', 'dbt')(datepart, interval, from_date_or_timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.671851, "supported_languages": null}, "macro.dbt.default__dateadd": {"name": "default__dateadd", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/dateadd.sql", "original_file_path": "macros/utils/dateadd.sql", "unique_id": "macro.dbt.default__dateadd", "macro_sql": "{% macro default__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_date_or_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.671998, "supported_languages": null}, "macro.dbt.intersect": {"name": "intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.intersect", "macro_sql": "{% macro intersect() %}\n {{ return(adapter.dispatch('intersect', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__intersect"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.672199, "supported_languages": null}, "macro.dbt.default__intersect": {"name": "default__intersect", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/intersect.sql", "original_file_path": "macros/utils/intersect.sql", "unique_id": "macro.dbt.default__intersect", "macro_sql": "{% macro default__intersect() %}\n\n intersect\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.672344, "supported_languages": null}, "macro.dbt.escape_single_quotes": {"name": "escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.escape_single_quotes", "macro_sql": "{% macro escape_single_quotes(expression) %}\n {{ return(adapter.dispatch('escape_single_quotes', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__escape_single_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.672582, "supported_languages": null}, "macro.dbt.default__escape_single_quotes": {"name": "default__escape_single_quotes", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/escape_single_quotes.sql", "original_file_path": "macros/utils/escape_single_quotes.sql", "unique_id": "macro.dbt.default__escape_single_quotes", "macro_sql": "{% macro default__escape_single_quotes(expression) -%}\n{{ expression | replace(\"'\",\"''\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.67271, "supported_languages": null}, "macro.dbt.right": {"name": "right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.right", "macro_sql": "{% macro right(string_text, length_expression) -%}\n {{ return(adapter.dispatch('right', 'dbt') (string_text, length_expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__right"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6729732, "supported_languages": null}, "macro.dbt.default__right": {"name": "default__right", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/right.sql", "original_file_path": "macros/utils/right.sql", "unique_id": "macro.dbt.default__right", "macro_sql": "{% macro default__right(string_text, length_expression) %}\n\n right(\n {{ string_text }},\n {{ length_expression }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.673098, "supported_languages": null}, "macro.dbt.listagg": {"name": "listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.listagg", "macro_sql": "{% macro listagg(measure, delimiter_text=\"','\", order_by_clause=none, limit_num=none) -%}\n {{ return(adapter.dispatch('listagg', 'dbt') (measure, delimiter_text, order_by_clause, limit_num)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__listagg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.673653, "supported_languages": null}, "macro.dbt.default__listagg": {"name": "default__listagg", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/listagg.sql", "original_file_path": "macros/utils/listagg.sql", "unique_id": "macro.dbt.default__listagg", "macro_sql": "{% macro default__listagg(measure, delimiter_text, order_by_clause, limit_num) -%}\n\n {% if limit_num -%}\n array_to_string(\n array_slice(\n array_agg(\n {{ measure }}\n ){% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n ,0\n ,{{ limit_num }}\n ),\n {{ delimiter_text }}\n )\n {%- else %}\n listagg(\n {{ measure }},\n {{ delimiter_text }}\n )\n {% if order_by_clause -%}\n within group ({{ order_by_clause }})\n {%- endif %}\n {%- endif %}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6740842, "supported_languages": null}, "macro.dbt.datediff": {"name": "datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.datediff", "macro_sql": "{% macro datediff(first_date, second_date, datepart) %}\n {{ return(adapter.dispatch('datediff', 'dbt')(first_date, second_date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6745, "supported_languages": null}, "macro.dbt.default__datediff": {"name": "default__datediff", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/datediff.sql", "original_file_path": "macros/utils/datediff.sql", "unique_id": "macro.dbt.default__datediff", "macro_sql": "{% macro default__datediff(first_date, second_date, datepart) -%}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.67467, "supported_languages": null}, "macro.dbt.safe_cast": {"name": "safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.safe_cast", "macro_sql": "{% macro safe_cast(field, type) %}\n {{ return(adapter.dispatch('safe_cast', 'dbt') (field, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__safe_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.674952, "supported_languages": null}, "macro.dbt.default__safe_cast": {"name": "default__safe_cast", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/safe_cast.sql", "original_file_path": "macros/utils/safe_cast.sql", "unique_id": "macro.dbt.default__safe_cast", "macro_sql": "{% macro default__safe_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6750898, "supported_languages": null}, "macro.dbt.hash": {"name": "hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.hash", "macro_sql": "{% macro hash(field) -%}\n {{ return(adapter.dispatch('hash', 'dbt') (field)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__hash"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.675332, "supported_languages": null}, "macro.dbt.default__hash": {"name": "default__hash", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/hash.sql", "original_file_path": "macros/utils/hash.sql", "unique_id": "macro.dbt.default__hash", "macro_sql": "{% macro default__hash(field) -%}\n md5(cast({{ field }} as {{ api.Column.translate_type('string') }}))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.675483, "supported_languages": null}, "macro.dbt.cast_bool_to_text": {"name": "cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.cast_bool_to_text", "macro_sql": "{% macro cast_bool_to_text(field) %}\n {{ adapter.dispatch('cast_bool_to_text', 'dbt') (field) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6757052, "supported_languages": null}, "macro.dbt.default__cast_bool_to_text": {"name": "default__cast_bool_to_text", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/cast_bool_to_text.sql", "original_file_path": "macros/utils/cast_bool_to_text.sql", "unique_id": "macro.dbt.default__cast_bool_to_text", "macro_sql": "{% macro default__cast_bool_to_text(field) %}\n cast({{ field }} as {{ api.Column.translate_type('string') }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.675851, "supported_languages": null}, "macro.dbt.any_value": {"name": "any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.any_value", "macro_sql": "{% macro any_value(expression) -%}\n {{ return(adapter.dispatch('any_value', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__any_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.676121, "supported_languages": null}, "macro.dbt.default__any_value": {"name": "default__any_value", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/any_value.sql", "original_file_path": "macros/utils/any_value.sql", "unique_id": "macro.dbt.default__any_value", "macro_sql": "{% macro default__any_value(expression) -%}\n\n any_value({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.67623, "supported_languages": null}, "macro.dbt.position": {"name": "position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.position", "macro_sql": "{% macro position(substring_text, string_text) -%}\n {{ return(adapter.dispatch('position', 'dbt') (substring_text, string_text)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__position"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.676497, "supported_languages": null}, "macro.dbt.default__position": {"name": "default__position", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/position.sql", "original_file_path": "macros/utils/position.sql", "unique_id": "macro.dbt.default__position", "macro_sql": "{% macro default__position(substring_text, string_text) %}\n\n position(\n {{ substring_text }} in {{ string_text }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.676619, "supported_languages": null}, "macro.dbt.string_literal": {"name": "string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.string_literal", "macro_sql": "{%- macro string_literal(value) -%}\n {{ return(adapter.dispatch('string_literal', 'dbt') (value)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__string_literal"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.676833, "supported_languages": null}, "macro.dbt.default__string_literal": {"name": "default__string_literal", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/literal.sql", "original_file_path": "macros/utils/literal.sql", "unique_id": "macro.dbt.default__string_literal", "macro_sql": "{% macro default__string_literal(value) -%}\n '{{ value }}'\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.677006, "supported_languages": null}, "macro.dbt.type_string": {"name": "type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_string", "macro_sql": "\n\n{%- macro type_string() -%}\n {{ return(adapter.dispatch('type_string', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.677828, "supported_languages": null}, "macro.dbt.default__type_string": {"name": "default__type_string", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_string", "macro_sql": "{% macro default__type_string() %}\n {{ return(api.Column.translate_type(\"string\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.677964, "supported_languages": null}, "macro.dbt.type_timestamp": {"name": "type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_timestamp", "macro_sql": "\n\n{%- macro type_timestamp() -%}\n {{ return(adapter.dispatch('type_timestamp', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.678108, "supported_languages": null}, "macro.dbt.default__type_timestamp": {"name": "default__type_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_timestamp", "macro_sql": "{% macro default__type_timestamp() %}\n {{ return(api.Column.translate_type(\"timestamp\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.678247, "supported_languages": null}, "macro.dbt.type_float": {"name": "type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_float", "macro_sql": "\n\n{%- macro type_float() -%}\n {{ return(adapter.dispatch('type_float', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_float"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.678394, "supported_languages": null}, "macro.dbt.default__type_float": {"name": "default__type_float", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_float", "macro_sql": "{% macro default__type_float() %}\n {{ return(api.Column.translate_type(\"float\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.678528, "supported_languages": null}, "macro.dbt.type_numeric": {"name": "type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_numeric", "macro_sql": "\n\n{%- macro type_numeric() -%}\n {{ return(adapter.dispatch('type_numeric', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6786718, "supported_languages": null}, "macro.dbt.default__type_numeric": {"name": "default__type_numeric", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_numeric", "macro_sql": "{% macro default__type_numeric() %}\n {{ return(api.Column.numeric_type(\"numeric\", 28, 6)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.678831, "supported_languages": null}, "macro.dbt.type_bigint": {"name": "type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_bigint", "macro_sql": "\n\n{%- macro type_bigint() -%}\n {{ return(adapter.dispatch('type_bigint', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6789818, "supported_languages": null}, "macro.dbt.default__type_bigint": {"name": "default__type_bigint", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_bigint", "macro_sql": "{% macro default__type_bigint() %}\n {{ return(api.Column.translate_type(\"bigint\")) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6791182, "supported_languages": null}, "macro.dbt.type_int": {"name": "type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_int", "macro_sql": "\n\n{%- macro type_int() -%}\n {{ return(adapter.dispatch('type_int', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.679261, "supported_languages": null}, "macro.dbt.default__type_int": {"name": "default__type_int", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_int", "macro_sql": "{%- macro default__type_int() -%}\n {{ return(api.Column.translate_type(\"integer\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.679392, "supported_languages": null}, "macro.dbt.type_boolean": {"name": "type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.type_boolean", "macro_sql": "\n\n{%- macro type_boolean() -%}\n {{ return(adapter.dispatch('type_boolean', 'dbt')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.default__type_boolean"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.679533, "supported_languages": null}, "macro.dbt.default__type_boolean": {"name": "default__type_boolean", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/data_types.sql", "original_file_path": "macros/utils/data_types.sql", "unique_id": "macro.dbt.default__type_boolean", "macro_sql": "{%- macro default__type_boolean() -%}\n {{ return(api.Column.translate_type(\"boolean\")) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.679661, "supported_languages": null}, "macro.dbt.array_concat": {"name": "array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.array_concat", "macro_sql": "{% macro array_concat(array_1, array_2) -%}\n {{ return(adapter.dispatch('array_concat', 'dbt')(array_1, array_2)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.679913, "supported_languages": null}, "macro.dbt.default__array_concat": {"name": "default__array_concat", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_concat.sql", "original_file_path": "macros/utils/array_concat.sql", "unique_id": "macro.dbt.default__array_concat", "macro_sql": "{% macro default__array_concat(array_1, array_2) -%}\n array_cat({{ array_1 }}, {{ array_2 }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.680036, "supported_languages": null}, "macro.dbt.bool_or": {"name": "bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.bool_or", "macro_sql": "{% macro bool_or(expression) -%}\n {{ return(adapter.dispatch('bool_or', 'dbt') (expression)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__bool_or"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.680257, "supported_languages": null}, "macro.dbt.default__bool_or": {"name": "default__bool_or", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/bool_or.sql", "original_file_path": "macros/utils/bool_or.sql", "unique_id": "macro.dbt.default__bool_or", "macro_sql": "{% macro default__bool_or(expression) -%}\n\n bool_or({{ expression }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.680353, "supported_languages": null}, "macro.dbt.last_day": {"name": "last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.last_day", "macro_sql": "{% macro last_day(date, datepart) %}\n {{ return(adapter.dispatch('last_day', 'dbt') (date, datepart)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.680725, "supported_languages": null}, "macro.dbt.default_last_day": {"name": "default_last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default_last_day", "macro_sql": "\n\n{%- macro default_last_day(date, datepart) -%}\n cast(\n {{dbt.dateadd('day', '-1',\n dbt.dateadd(datepart, '1', dbt.date_trunc(datepart, date))\n )}}\n as date)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.680969, "supported_languages": null}, "macro.dbt.default__last_day": {"name": "default__last_day", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/last_day.sql", "original_file_path": "macros/utils/last_day.sql", "unique_id": "macro.dbt.default__last_day", "macro_sql": "{% macro default__last_day(date, datepart) -%}\n {{dbt.default_last_day(date, datepart)}}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default_last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.681099, "supported_languages": null}, "macro.dbt.split_part": {"name": "split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.split_part", "macro_sql": "{% macro split_part(string_text, delimiter_text, part_number) %}\n {{ return(adapter.dispatch('split_part', 'dbt') (string_text, delimiter_text, part_number)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.681547, "supported_languages": null}, "macro.dbt.default__split_part": {"name": "default__split_part", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt.default__split_part", "macro_sql": "{% macro default__split_part(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.681692, "supported_languages": null}, "macro.dbt._split_part_negative": {"name": "_split_part_negative", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/split_part.sql", "original_file_path": "macros/utils/split_part.sql", "unique_id": "macro.dbt._split_part_negative", "macro_sql": "{% macro _split_part_negative(string_text, delimiter_text, part_number) %}\n\n split_part(\n {{ string_text }},\n {{ delimiter_text }},\n length({{ string_text }})\n - length(\n replace({{ string_text }}, {{ delimiter_text }}, '')\n ) + 2 + {{ part_number }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.681886, "supported_languages": null}, "macro.dbt.date_trunc": {"name": "date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.date_trunc", "macro_sql": "{% macro date_trunc(datepart, date) -%}\n {{ return(adapter.dispatch('date_trunc', 'dbt') (datepart, date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6821241, "supported_languages": null}, "macro.dbt.default__date_trunc": {"name": "default__date_trunc", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/date_trunc.sql", "original_file_path": "macros/utils/date_trunc.sql", "unique_id": "macro.dbt.default__date_trunc", "macro_sql": "{% macro default__date_trunc(datepart, date) -%}\n date_trunc('{{datepart}}', {{date}})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.682239, "supported_languages": null}, "macro.dbt.array_construct": {"name": "array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.array_construct", "macro_sql": "{% macro array_construct(inputs=[], data_type=api.Column.translate_type('integer')) -%}\n {{ return(adapter.dispatch('array_construct', 'dbt')(inputs, data_type)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_construct"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6825678, "supported_languages": null}, "macro.dbt.default__array_construct": {"name": "default__array_construct", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_construct.sql", "original_file_path": "macros/utils/array_construct.sql", "unique_id": "macro.dbt.default__array_construct", "macro_sql": "{% macro default__array_construct(inputs, data_type) -%}\n {% if inputs|length > 0 %}\n array[ {{ inputs|join(' , ') }} ]\n {% else %}\n array[]::{{data_type}}[]\n {% endif %}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.682782, "supported_languages": null}, "macro.dbt.array_append": {"name": "array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.array_append", "macro_sql": "{% macro array_append(array, new_element) -%}\n {{ return(adapter.dispatch('array_append', 'dbt')(array, new_element)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__array_append"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6830301, "supported_languages": null}, "macro.dbt.default__array_append": {"name": "default__array_append", "resource_type": "macro", "package_name": "dbt", "path": "macros/utils/array_append.sql", "original_file_path": "macros/utils/array_append.sql", "unique_id": "macro.dbt.default__array_append", "macro_sql": "{% macro default__array_append(array, new_element) -%}\n array_append({{ array }}, {{ new_element }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.683146, "supported_languages": null}, "macro.dbt.create_schema": {"name": "create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.create_schema", "macro_sql": "{% macro create_schema(relation) -%}\n {{ adapter.dispatch('create_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__create_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.683469, "supported_languages": null}, "macro.dbt.default__create_schema": {"name": "default__create_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__create_schema", "macro_sql": "{% macro default__create_schema(relation) -%}\n {%- call statement('create_schema') -%}\n create schema if not exists {{ relation.without_identifier() }}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.683629, "supported_languages": null}, "macro.dbt.drop_schema": {"name": "drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.drop_schema", "macro_sql": "{% macro drop_schema(relation) -%}\n {{ adapter.dispatch('drop_schema', 'dbt')(relation) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__drop_schema"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6837718, "supported_languages": null}, "macro.dbt.default__drop_schema": {"name": "default__drop_schema", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/schema.sql", "original_file_path": "macros/adapters/schema.sql", "unique_id": "macro.dbt.default__drop_schema", "macro_sql": "{% macro default__drop_schema(relation) -%}\n {%- call statement('drop_schema') -%}\n drop schema if exists {{ relation.without_identifier() }} cascade\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.683934, "supported_languages": null}, "macro.dbt.current_timestamp": {"name": "current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp", "macro_sql": "{%- macro current_timestamp() -%}\n {{ adapter.dispatch('current_timestamp', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.684586, "supported_languages": null}, "macro.dbt.default__current_timestamp": {"name": "default__current_timestamp", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp", "macro_sql": "{% macro default__current_timestamp() -%}\n {{ exceptions.raise_not_implemented(\n 'current_timestamp macro not implemented for adapter ' + adapter.type()) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.684761, "supported_languages": null}, "macro.dbt.snapshot_get_time": {"name": "snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.snapshot_get_time", "macro_sql": "\n\n{%- macro snapshot_get_time() -%}\n {{ adapter.dispatch('snapshot_get_time', 'dbt')() }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_postgres.postgres__snapshot_get_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.684905, "supported_languages": null}, "macro.dbt.default__snapshot_get_time": {"name": "default__snapshot_get_time", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__snapshot_get_time", "macro_sql": "{% macro default__snapshot_get_time() %}\n {{ current_timestamp() }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.685009, "supported_languages": null}, "macro.dbt.current_timestamp_backcompat": {"name": "current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_backcompat", "macro_sql": "{% macro current_timestamp_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6852841, "supported_languages": null}, "macro.dbt.default__current_timestamp_backcompat": {"name": "default__current_timestamp_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_backcompat", "macro_sql": "{% macro default__current_timestamp_backcompat() %}\n current_timestamp::timestamp\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.685355, "supported_languages": null}, "macro.dbt.current_timestamp_in_utc_backcompat": {"name": "current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.current_timestamp_in_utc_backcompat", "macro_sql": "{% macro current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_in_utc_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__current_timestamp_in_utc_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.68551, "supported_languages": null}, "macro.dbt.default__current_timestamp_in_utc_backcompat": {"name": "default__current_timestamp_in_utc_backcompat", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/timestamps.sql", "original_file_path": "macros/adapters/timestamps.sql", "unique_id": "macro.dbt.default__current_timestamp_in_utc_backcompat", "macro_sql": "{% macro default__current_timestamp_in_utc_backcompat() %}\n {{ return(adapter.dispatch('current_timestamp_backcompat', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.current_timestamp_backcompat", "macro.dbt_postgres.postgres__current_timestamp_backcompat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.685666, "supported_languages": null}, "macro.dbt.get_create_index_sql": {"name": "get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_create_index_sql", "macro_sql": "{% macro get_create_index_sql(relation, index_dict) -%}\n {{ return(adapter.dispatch('get_create_index_sql', 'dbt')(relation, index_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_create_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.68644, "supported_languages": null}, "macro.dbt.default__get_create_index_sql": {"name": "default__get_create_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_create_index_sql", "macro_sql": "{% macro default__get_create_index_sql(relation, index_dict) -%}\n {% do return(None) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.68657, "supported_languages": null}, "macro.dbt.create_indexes": {"name": "create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.create_indexes", "macro_sql": "{% macro create_indexes(relation) -%}\n {{ adapter.dispatch('create_indexes', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.default__create_indexes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.686713, "supported_languages": null}, "macro.dbt.default__create_indexes": {"name": "default__create_indexes", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__create_indexes", "macro_sql": "{% macro default__create_indexes(relation) -%}\n {%- set _indexes = config.get('indexes', default=[]) -%}\n\n {% for _index_dict in _indexes %}\n {% set create_index_sql = get_create_index_sql(relation, _index_dict) %}\n {% if create_index_sql %}\n {% do run_query(create_index_sql) %}\n {% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_create_index_sql", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.687092, "supported_languages": null}, "macro.dbt.get_drop_index_sql": {"name": "get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_drop_index_sql", "macro_sql": "{% macro get_drop_index_sql(relation, index_name) -%}\n {{ adapter.dispatch('get_drop_index_sql', 'dbt')(relation, index_name) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_drop_index_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.687262, "supported_languages": null}, "macro.dbt.default__get_drop_index_sql": {"name": "default__get_drop_index_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_drop_index_sql", "macro_sql": "{% macro default__get_drop_index_sql(relation, index_name) -%}\n {{ exceptions.raise_compiler_error(\"`get_drop_index_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.687381, "supported_languages": null}, "macro.dbt.get_show_indexes_sql": {"name": "get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.get_show_indexes_sql", "macro_sql": "{% macro get_show_indexes_sql(relation) -%}\n {{ adapter.dispatch('get_show_indexes_sql', 'dbt')(relation) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_indexes_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.68752, "supported_languages": null}, "macro.dbt.default__get_show_indexes_sql": {"name": "default__get_show_indexes_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/indexes.sql", "original_file_path": "macros/adapters/indexes.sql", "unique_id": "macro.dbt.default__get_show_indexes_sql", "macro_sql": "{% macro default__get_show_indexes_sql(relation) -%}\n {{ exceptions.raise_compiler_error(\"`get_show_indexes_sql has not been implemented for this adapter.\") }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.687642, "supported_languages": null}, "macro.dbt.make_intermediate_relation": {"name": "make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_intermediate_relation", "macro_sql": "{% macro make_intermediate_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_intermediate_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_intermediate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6894069, "supported_languages": null}, "macro.dbt.default__make_intermediate_relation": {"name": "default__make_intermediate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_intermediate_relation", "macro_sql": "{% macro default__make_intermediate_relation(base_relation, suffix) %}\n {{ return(default__make_temp_relation(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.689565, "supported_languages": null}, "macro.dbt.make_temp_relation": {"name": "make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_temp_relation", "macro_sql": "{% macro make_temp_relation(base_relation, suffix='__dbt_tmp') %}\n {{ return(adapter.dispatch('make_temp_relation', 'dbt')(base_relation, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_temp_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6897569, "supported_languages": null}, "macro.dbt.default__make_temp_relation": {"name": "default__make_temp_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_temp_relation", "macro_sql": "{% macro default__make_temp_relation(base_relation, suffix) %}\n {%- set temp_identifier = base_relation.identifier ~ suffix -%}\n {%- set temp_relation = base_relation.incorporate(\n path={\"identifier\": temp_identifier}) -%}\n\n {{ return(temp_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.690014, "supported_languages": null}, "macro.dbt.make_backup_relation": {"name": "make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.make_backup_relation", "macro_sql": "{% macro make_backup_relation(base_relation, backup_relation_type, suffix='__dbt_backup') %}\n {{ return(adapter.dispatch('make_backup_relation', 'dbt')(base_relation, backup_relation_type, suffix)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__make_backup_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6902359, "supported_languages": null}, "macro.dbt.default__make_backup_relation": {"name": "default__make_backup_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__make_backup_relation", "macro_sql": "{% macro default__make_backup_relation(base_relation, backup_relation_type, suffix) %}\n {%- set backup_identifier = base_relation.identifier ~ suffix -%}\n {%- set backup_relation = base_relation.incorporate(\n path={\"identifier\": backup_identifier},\n type=backup_relation_type\n ) -%}\n {{ return(backup_relation) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6905222, "supported_languages": null}, "macro.dbt.truncate_relation": {"name": "truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.truncate_relation", "macro_sql": "{% macro truncate_relation(relation) -%}\n {{ return(adapter.dispatch('truncate_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__truncate_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.690687, "supported_languages": null}, "macro.dbt.default__truncate_relation": {"name": "default__truncate_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__truncate_relation", "macro_sql": "{% macro default__truncate_relation(relation) -%}\n {% call statement('truncate_relation') -%}\n truncate table {{ relation }}\n {%- endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6908998, "supported_languages": null}, "macro.dbt.get_or_create_relation": {"name": "get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.get_or_create_relation", "macro_sql": "{% macro get_or_create_relation(database, schema, identifier, type) -%}\n {{ return(adapter.dispatch('get_or_create_relation', 'dbt')(database, schema, identifier, type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_or_create_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.691124, "supported_languages": null}, "macro.dbt.default__get_or_create_relation": {"name": "default__get_or_create_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.default__get_or_create_relation", "macro_sql": "{% macro default__get_or_create_relation(database, schema, identifier, type) %}\n {%- set target_relation = adapter.get_relation(database=database, schema=schema, identifier=identifier) %}\n\n {% if target_relation %}\n {% do return([true, target_relation]) %}\n {% endif %}\n\n {%- set new_relation = api.Relation.create(\n database=database,\n schema=schema,\n identifier=identifier,\n type=type\n ) -%}\n {% do return([false, new_relation]) %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.691608, "supported_languages": null}, "macro.dbt.load_cached_relation": {"name": "load_cached_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_cached_relation", "macro_sql": "{% macro load_cached_relation(relation) %}\n {% do return(adapter.get_relation(\n database=relation.database,\n schema=relation.schema,\n identifier=relation.identifier\n )) -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.691814, "supported_languages": null}, "macro.dbt.load_relation": {"name": "load_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/relation.sql", "original_file_path": "macros/adapters/relation.sql", "unique_id": "macro.dbt.load_relation", "macro_sql": "{% macro load_relation(relation) %}\n {{ return(load_cached_relation(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.load_cached_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.691943, "supported_languages": null}, "macro.dbt.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness', 'dbt')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.692323, "supported_languages": null}, "macro.dbt.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/freshness.sql", "original_file_path": "macros/adapters/freshness.sql", "unique_id": "macro.dbt.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n select\n max({{ loaded_at_field }}) as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endcall %}\n {{ return(load_result('collect_freshness')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.692698, "supported_languages": null}, "macro.dbt.validate_sql": {"name": "validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.validate_sql", "macro_sql": "{% macro validate_sql(sql) -%}\n {{ return(adapter.dispatch('validate_sql', 'dbt')(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__validate_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.692996, "supported_languages": null}, "macro.dbt.default__validate_sql": {"name": "default__validate_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/validate_sql.sql", "original_file_path": "macros/adapters/validate_sql.sql", "unique_id": "macro.dbt.default__validate_sql", "macro_sql": "{% macro default__validate_sql(sql) -%}\n {% call statement('validate_sql') -%}\n explain {{ sql }}\n {% endcall %}\n {{ return(load_result('validate_sql')) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6932049, "supported_languages": null}, "macro.dbt.copy_grants": {"name": "copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.copy_grants", "macro_sql": "{% macro copy_grants() %}\n {{ return(adapter.dispatch('copy_grants', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.694747, "supported_languages": null}, "macro.dbt.default__copy_grants": {"name": "default__copy_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__copy_grants", "macro_sql": "{% macro default__copy_grants() %}\n {{ return(True) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6948538, "supported_languages": null}, "macro.dbt.support_multiple_grantees_per_dcl_statement": {"name": "support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.support_multiple_grantees_per_dcl_statement", "macro_sql": "{% macro support_multiple_grantees_per_dcl_statement() %}\n {{ return(adapter.dispatch('support_multiple_grantees_per_dcl_statement', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6950111, "supported_languages": null}, "macro.dbt.default__support_multiple_grantees_per_dcl_statement": {"name": "default__support_multiple_grantees_per_dcl_statement", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__support_multiple_grantees_per_dcl_statement", "macro_sql": "\n\n{%- macro default__support_multiple_grantees_per_dcl_statement() -%}\n {{ return(True) }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.695117, "supported_languages": null}, "macro.dbt.should_revoke": {"name": "should_revoke", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.should_revoke", "macro_sql": "{% macro should_revoke(existing_relation, full_refresh_mode=True) %}\n\n {% if not existing_relation %}\n {#-- The table doesn't already exist, so no grants to copy over --#}\n {{ return(False) }}\n {% elif full_refresh_mode %}\n {#-- The object is being REPLACED -- whether grants are copied over depends on the value of user config --#}\n {{ return(copy_grants()) }}\n {% else %}\n {#-- The table is being merged/upserted/inserted -- grants will be carried over --#}\n {{ return(True) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.copy_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.695442, "supported_languages": null}, "macro.dbt.get_show_grant_sql": {"name": "get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_show_grant_sql", "macro_sql": "{% macro get_show_grant_sql(relation) %}\n {{ return(adapter.dispatch(\"get_show_grant_sql\", \"dbt\")(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_show_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6956239, "supported_languages": null}, "macro.dbt.default__get_show_grant_sql": {"name": "default__get_show_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_show_grant_sql", "macro_sql": "{% macro default__get_show_grant_sql(relation) %}\n show grants on {{ relation }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.695719, "supported_languages": null}, "macro.dbt.get_grant_sql": {"name": "get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_grant_sql", "macro_sql": "{% macro get_grant_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_grant_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_grant_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.695931, "supported_languages": null}, "macro.dbt.default__get_grant_sql": {"name": "default__get_grant_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_grant_sql", "macro_sql": "\n\n{%- macro default__get_grant_sql(relation, privilege, grantees) -%}\n grant {{ privilege }} on {{ relation }} to {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6961122, "supported_languages": null}, "macro.dbt.get_revoke_sql": {"name": "get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_revoke_sql", "macro_sql": "{% macro get_revoke_sql(relation, privilege, grantees) %}\n {{ return(adapter.dispatch('get_revoke_sql', 'dbt')(relation, privilege, grantees)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_revoke_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.696327, "supported_languages": null}, "macro.dbt.default__get_revoke_sql": {"name": "default__get_revoke_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_revoke_sql", "macro_sql": "\n\n{%- macro default__get_revoke_sql(relation, privilege, grantees) -%}\n revoke {{ privilege }} on {{ relation }} from {{ grantees | join(', ') }}\n{%- endmacro -%}\n\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6965828, "supported_languages": null}, "macro.dbt.get_dcl_statement_list": {"name": "get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.get_dcl_statement_list", "macro_sql": "{% macro get_dcl_statement_list(relation, grant_config, get_dcl_macro) %}\n {{ return(adapter.dispatch('get_dcl_statement_list', 'dbt')(relation, grant_config, get_dcl_macro)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_dcl_statement_list"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6968, "supported_languages": null}, "macro.dbt.default__get_dcl_statement_list": {"name": "default__get_dcl_statement_list", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__get_dcl_statement_list", "macro_sql": "\n\n{%- macro default__get_dcl_statement_list(relation, grant_config, get_dcl_macro) -%}\n {#\n -- Unpack grant_config into specific privileges and the set of users who need them granted/revoked.\n -- Depending on whether this database supports multiple grantees per statement, pass in the list of\n -- all grantees per privilege, or (if not) template one statement per privilege-grantee pair.\n -- `get_dcl_macro` will be either `get_grant_sql` or `get_revoke_sql`\n #}\n {%- set dcl_statements = [] -%}\n {%- for privilege, grantees in grant_config.items() %}\n {%- if support_multiple_grantees_per_dcl_statement() and grantees -%}\n {%- set dcl = get_dcl_macro(relation, privilege, grantees) -%}\n {%- do dcl_statements.append(dcl) -%}\n {%- else -%}\n {%- for grantee in grantees -%}\n {% set dcl = get_dcl_macro(relation, privilege, [grantee]) %}\n {%- do dcl_statements.append(dcl) -%}\n {% endfor -%}\n {%- endif -%}\n {%- endfor -%}\n {{ return(dcl_statements) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.support_multiple_grantees_per_dcl_statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.697421, "supported_languages": null}, "macro.dbt.call_dcl_statements": {"name": "call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.call_dcl_statements", "macro_sql": "{% macro call_dcl_statements(dcl_statement_list) %}\n {{ return(adapter.dispatch(\"call_dcl_statements\", \"dbt\")(dcl_statement_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.697601, "supported_languages": null}, "macro.dbt.default__call_dcl_statements": {"name": "default__call_dcl_statements", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__call_dcl_statements", "macro_sql": "{% macro default__call_dcl_statements(dcl_statement_list) %}\n {#\n -- By default, supply all grant + revoke statements in a single semicolon-separated block,\n -- so that they're all processed together.\n\n -- Some databases do not support this. Those adapters will need to override this macro\n -- to run each statement individually.\n #}\n {% call statement('grants') %}\n {% for dcl_statement in dcl_statement_list %}\n {{ dcl_statement }};\n {% endfor %}\n {% endcall %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.697828, "supported_languages": null}, "macro.dbt.apply_grants": {"name": "apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.apply_grants", "macro_sql": "{% macro apply_grants(relation, grant_config, should_revoke) %}\n {{ return(adapter.dispatch(\"apply_grants\", \"dbt\")(relation, grant_config, should_revoke)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__apply_grants"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.6980422, "supported_languages": null}, "macro.dbt.default__apply_grants": {"name": "default__apply_grants", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/apply_grants.sql", "original_file_path": "macros/adapters/apply_grants.sql", "unique_id": "macro.dbt.default__apply_grants", "macro_sql": "{% macro default__apply_grants(relation, grant_config, should_revoke=True) %}\n {#-- If grant_config is {} or None, this is a no-op --#}\n {% if grant_config %}\n {% if should_revoke %}\n {#-- We think previous grants may have carried over --#}\n {#-- Show current grants and calculate diffs --#}\n {% set current_grants_table = run_query(get_show_grant_sql(relation)) %}\n {% set current_grants_dict = adapter.standardize_grants_dict(current_grants_table) %}\n {% set needs_granting = diff_of_two_dicts(grant_config, current_grants_dict) %}\n {% set needs_revoking = diff_of_two_dicts(current_grants_dict, grant_config) %}\n {% if not (needs_granting or needs_revoking) %}\n {{ log('On ' ~ relation ~': All grants are in place, no revocation or granting needed.')}}\n {% endif %}\n {% else %}\n {#-- We don't think there's any chance of previous grants having carried over. --#}\n {#-- Jump straight to granting what the user has configured. --#}\n {% set needs_revoking = {} %}\n {% set needs_granting = grant_config %}\n {% endif %}\n {% if needs_granting or needs_revoking %}\n {% set revoke_statement_list = get_dcl_statement_list(relation, needs_revoking, get_revoke_sql) %}\n {% set grant_statement_list = get_dcl_statement_list(relation, needs_granting, get_grant_sql) %}\n {% set dcl_statement_list = revoke_statement_list + grant_statement_list %}\n {% if dcl_statement_list %}\n {{ call_dcl_statements(dcl_statement_list) }}\n {% endif %}\n {% endif %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.get_show_grant_sql", "macro.dbt.get_dcl_statement_list", "macro.dbt.call_dcl_statements"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.699203, "supported_languages": null}, "macro.dbt.get_show_sql": {"name": "get_show_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_show_sql", "macro_sql": "{% macro get_show_sql(compiled_code, sql_header, limit) -%}\n {%- if sql_header -%}\n {{ sql_header }}\n {%- endif -%}\n {%- if limit is not none -%}\n {{ get_limit_subquery_sql(compiled_code, limit) }}\n {%- else -%}\n {{ compiled_code }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.699723, "supported_languages": null}, "macro.dbt.get_limit_subquery_sql": {"name": "get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.get_limit_subquery_sql", "macro_sql": "{% macro get_limit_subquery_sql(sql, limit) %}\n {{ adapter.dispatch('get_limit_subquery_sql', 'dbt')(sql, limit) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_limit_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.699908, "supported_languages": null}, "macro.dbt.default__get_limit_subquery_sql": {"name": "default__get_limit_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/show.sql", "original_file_path": "macros/adapters/show.sql", "unique_id": "macro.dbt.default__get_limit_subquery_sql", "macro_sql": "{% macro default__get_limit_subquery_sql(sql, limit) %}\n select *\n from (\n {{ sql }}\n ) as model_limit_subq\n limit {{ limit }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.700032, "supported_languages": null}, "macro.dbt.alter_column_comment": {"name": "alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_column_comment", "macro_sql": "{% macro alter_column_comment(relation, column_dict) -%}\n {{ return(adapter.dispatch('alter_column_comment', 'dbt')(relation, column_dict)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.700662, "supported_languages": null}, "macro.dbt.default__alter_column_comment": {"name": "default__alter_column_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_column_comment", "macro_sql": "{% macro default__alter_column_comment(relation, column_dict) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_column_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.700824, "supported_languages": null}, "macro.dbt.alter_relation_comment": {"name": "alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.alter_relation_comment", "macro_sql": "{% macro alter_relation_comment(relation, relation_comment) -%}\n {{ return(adapter.dispatch('alter_relation_comment', 'dbt')(relation, relation_comment)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__alter_relation_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.701016, "supported_languages": null}, "macro.dbt.default__alter_relation_comment": {"name": "default__alter_relation_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__alter_relation_comment", "macro_sql": "{% macro default__alter_relation_comment(relation, relation_comment) -%}\n {{ exceptions.raise_not_implemented(\n 'alter_relation_comment macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.701173, "supported_languages": null}, "macro.dbt.persist_docs": {"name": "persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.persist_docs", "macro_sql": "{% macro persist_docs(relation, model, for_relation=true, for_columns=true) -%}\n {{ return(adapter.dispatch('persist_docs', 'dbt')(relation, model, for_relation, for_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__persist_docs"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.701428, "supported_languages": null}, "macro.dbt.default__persist_docs": {"name": "default__persist_docs", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/persist_docs.sql", "original_file_path": "macros/adapters/persist_docs.sql", "unique_id": "macro.dbt.default__persist_docs", "macro_sql": "{% macro default__persist_docs(relation, model, for_relation, for_columns) -%}\n {% if for_relation and config.persist_relation_docs() and model.description %}\n {% do run_query(alter_relation_comment(relation, model.description)) %}\n {% endif %}\n\n {% if for_columns and config.persist_column_docs() and model.columns %}\n {% do run_query(alter_column_comment(relation, model.columns)) %}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query", "macro.dbt.alter_relation_comment", "macro.dbt.alter_column_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.701879, "supported_languages": null}, "macro.dbt.get_catalog_relations": {"name": "get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog_relations", "macro_sql": "{% macro get_catalog_relations(information_schema, relations) -%}\n {{ return(adapter.dispatch('get_catalog_relations', 'dbt')(information_schema, relations)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.704512, "supported_languages": null}, "macro.dbt.default__get_catalog_relations": {"name": "default__get_catalog_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog_relations", "macro_sql": "{% macro default__get_catalog_relations(information_schema, relations) -%}\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog_relations not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.704797, "supported_languages": null}, "macro.dbt.get_catalog": {"name": "get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_catalog", "macro_sql": "{% macro get_catalog(information_schema, schemas) -%}\n {{ return(adapter.dispatch('get_catalog', 'dbt')(information_schema, schemas)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_catalog"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.70499, "supported_languages": null}, "macro.dbt.default__get_catalog": {"name": "default__get_catalog", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_catalog", "macro_sql": "{% macro default__get_catalog(information_schema, schemas) -%}\n\n {% set typename = adapter.type() %}\n {% set msg -%}\n get_catalog not implemented for {{ typename }}\n {%- endset %}\n\n {{ exceptions.raise_compiler_error(msg) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.705319, "supported_languages": null}, "macro.dbt.information_schema_name": {"name": "information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.information_schema_name", "macro_sql": "{% macro information_schema_name(database) %}\n {{ return(adapter.dispatch('information_schema_name', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__information_schema_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7054799, "supported_languages": null}, "macro.dbt.default__information_schema_name": {"name": "default__information_schema_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__information_schema_name", "macro_sql": "{% macro default__information_schema_name(database) -%}\n {%- if database -%}\n {{ database }}.INFORMATION_SCHEMA\n {%- else -%}\n INFORMATION_SCHEMA\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7056189, "supported_languages": null}, "macro.dbt.list_schemas": {"name": "list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_schemas", "macro_sql": "{% macro list_schemas(database) -%}\n {{ return(adapter.dispatch('list_schemas', 'dbt')(database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_schemas"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.70578, "supported_languages": null}, "macro.dbt.default__list_schemas": {"name": "default__list_schemas", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_schemas", "macro_sql": "{% macro default__list_schemas(database) -%}\n {% set sql %}\n select distinct schema_name\n from {{ information_schema_name(database) }}.SCHEMATA\n where catalog_name ilike '{{ database }}'\n {% endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.information_schema_name", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.706003, "supported_languages": null}, "macro.dbt.check_schema_exists": {"name": "check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.check_schema_exists", "macro_sql": "{% macro check_schema_exists(information_schema, schema) -%}\n {{ return(adapter.dispatch('check_schema_exists', 'dbt')(information_schema, schema)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__check_schema_exists"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.70619, "supported_languages": null}, "macro.dbt.default__check_schema_exists": {"name": "default__check_schema_exists", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__check_schema_exists", "macro_sql": "{% macro default__check_schema_exists(information_schema, schema) -%}\n {% set sql -%}\n select count(*)\n from {{ information_schema.replace(information_schema_view='SCHEMATA') }}\n where catalog_name='{{ information_schema.database }}'\n and schema_name='{{ schema }}'\n {%- endset %}\n {{ return(run_query(sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7064698, "supported_languages": null}, "macro.dbt.list_relations_without_caching": {"name": "list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.list_relations_without_caching", "macro_sql": "{% macro list_relations_without_caching(schema_relation) %}\n {{ return(adapter.dispatch('list_relations_without_caching', 'dbt')(schema_relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__list_relations_without_caching"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.706635, "supported_languages": null}, "macro.dbt.default__list_relations_without_caching": {"name": "default__list_relations_without_caching", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__list_relations_without_caching", "macro_sql": "{% macro default__list_relations_without_caching(schema_relation) %}\n {{ exceptions.raise_not_implemented(\n 'list_relations_without_caching macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.70678, "supported_languages": null}, "macro.dbt.get_relations": {"name": "get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relations", "macro_sql": "{% macro get_relations() %}\n {{ return(adapter.dispatch('get_relations', 'dbt')()) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7069309, "supported_languages": null}, "macro.dbt.default__get_relations": {"name": "default__get_relations", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relations", "macro_sql": "{% macro default__get_relations() %}\n {{ exceptions.raise_not_implemented(\n 'get_relations macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.707078, "supported_languages": null}, "macro.dbt.get_relation_last_modified": {"name": "get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.get_relation_last_modified", "macro_sql": "{% macro get_relation_last_modified(information_schema, relations) %}\n {{ return(adapter.dispatch('get_relation_last_modified', 'dbt')(information_schema, relations)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_relation_last_modified"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.70728, "supported_languages": null}, "macro.dbt.default__get_relation_last_modified": {"name": "default__get_relation_last_modified", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/metadata.sql", "original_file_path": "macros/adapters/metadata.sql", "unique_id": "macro.dbt.default__get_relation_last_modified", "macro_sql": "{% macro default__get_relation_last_modified(information_schema, relations) %}\n {{ exceptions.raise_not_implemented(\n 'get_relation_last_modified macro not implemented for adapter ' + adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.707444, "supported_languages": null}, "macro.dbt.get_columns_in_relation": {"name": "get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_relation", "macro_sql": "{% macro get_columns_in_relation(relation) -%}\n {{ return(adapter.dispatch('get_columns_in_relation', 'dbt')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_postgres.postgres__get_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.709505, "supported_languages": null}, "macro.dbt.default__get_columns_in_relation": {"name": "default__get_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_relation", "macro_sql": "{% macro default__get_columns_in_relation(relation) -%}\n {{ exceptions.raise_not_implemented(\n 'get_columns_in_relation macro not implemented for adapter '+adapter.type()) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.709662, "supported_languages": null}, "macro.dbt.sql_convert_columns_in_relation": {"name": "sql_convert_columns_in_relation", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.sql_convert_columns_in_relation", "macro_sql": "{% macro sql_convert_columns_in_relation(table) -%}\n {% set columns = [] %}\n {% for row in table %}\n {% do columns.append(api.Column(*row)) %}\n {% endfor %}\n {{ return(columns) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.709961, "supported_languages": null}, "macro.dbt.get_empty_subquery_sql": {"name": "get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_subquery_sql", "macro_sql": "{% macro get_empty_subquery_sql(select_sql, select_sql_header=none) -%}\n {{ return(adapter.dispatch('get_empty_subquery_sql', 'dbt')(select_sql, select_sql_header)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.710169, "supported_languages": null}, "macro.dbt.default__get_empty_subquery_sql": {"name": "default__get_empty_subquery_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_subquery_sql", "macro_sql": "{% macro default__get_empty_subquery_sql(select_sql, select_sql_header=none) %}\n {%- if select_sql_header is not none -%}\n {{ select_sql_header }}\n {%- endif -%}\n select * from (\n {{ select_sql }}\n ) as __dbt_sbq\n where false\n limit 0\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.710361, "supported_languages": null}, "macro.dbt.get_empty_schema_sql": {"name": "get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_empty_schema_sql", "macro_sql": "{% macro get_empty_schema_sql(columns) -%}\n {{ return(adapter.dispatch('get_empty_schema_sql', 'dbt')(columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_empty_schema_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.710528, "supported_languages": null}, "macro.dbt.default__get_empty_schema_sql": {"name": "default__get_empty_schema_sql", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_empty_schema_sql", "macro_sql": "{% macro default__get_empty_schema_sql(columns) %}\n {%- set col_err = [] -%}\n {%- set col_naked_numeric = [] -%}\n select\n {% for i in columns %}\n {%- set col = columns[i] -%}\n {%- if col['data_type'] is not defined -%}\n {%- do col_err.append(col['name']) -%}\n {#-- If this column's type is just 'numeric' then it is missing precision/scale, raise a warning --#}\n {%- elif col['data_type'].strip().lower() in ('numeric', 'decimal', 'number') -%}\n {%- do col_naked_numeric.append(col['name']) -%}\n {%- endif -%}\n {% set col_name = adapter.quote(col['name']) if col.get('quote') else col['name'] %}\n cast(null as {{ col['data_type'] }}) as {{ col_name }}{{ \", \" if not loop.last }}\n {%- endfor -%}\n {%- if (col_err | length) > 0 -%}\n {{ exceptions.column_type_missing(column_names=col_err) }}\n {%- elif (col_naked_numeric | length) > 0 -%}\n {{ exceptions.warn(\"Detected columns with numeric type and unspecified precision/scale, this can lead to unintended rounding: \" ~ col_naked_numeric ~ \"`\") }}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.71188, "supported_languages": null}, "macro.dbt.get_column_schema_from_query": {"name": "get_column_schema_from_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_column_schema_from_query", "macro_sql": "{% macro get_column_schema_from_query(select_sql, select_sql_header=none) -%}\n {% set columns = [] %}\n {# -- Using an 'empty subquery' here to get the same schema as the given select_sql statement, without necessitating a data scan.#}\n {% set sql = get_empty_subquery_sql(select_sql, select_sql_header) %}\n {% set column_schema = adapter.get_column_schema_from_query(sql) %}\n {{ return(column_schema) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.712207, "supported_languages": null}, "macro.dbt.get_columns_in_query": {"name": "get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.get_columns_in_query", "macro_sql": "{% macro get_columns_in_query(select_sql) -%}\n {{ return(adapter.dispatch('get_columns_in_query', 'dbt')(select_sql)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__get_columns_in_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.71238, "supported_languages": null}, "macro.dbt.default__get_columns_in_query": {"name": "default__get_columns_in_query", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__get_columns_in_query", "macro_sql": "{% macro default__get_columns_in_query(select_sql) %}\n {% call statement('get_columns_in_query', fetch_result=True, auto_begin=False) -%}\n {{ get_empty_subquery_sql(select_sql) }}\n {% endcall %}\n {{ return(load_result('get_columns_in_query').table.columns | map(attribute='name') | list) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.get_empty_subquery_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.712703, "supported_languages": null}, "macro.dbt.alter_column_type": {"name": "alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_column_type", "macro_sql": "{% macro alter_column_type(relation, column_name, new_column_type) -%}\n {{ return(adapter.dispatch('alter_column_type', 'dbt')(relation, column_name, new_column_type)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_column_type"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.712914, "supported_languages": null}, "macro.dbt.default__alter_column_type": {"name": "default__alter_column_type", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_column_type", "macro_sql": "{% macro default__alter_column_type(relation, column_name, new_column_type) -%}\n {#\n 1. Create a new column (w/ temp name and correct type)\n 2. Copy data over to it\n 3. Drop the existing column (cascade!)\n 4. Rename the new column to existing column\n #}\n {%- set tmp_column = column_name + \"__dbt_alter\" -%}\n\n {% call statement('alter_column_type') %}\n alter table {{ relation }} add column {{ adapter.quote(tmp_column) }} {{ new_column_type }};\n update {{ relation }} set {{ adapter.quote(tmp_column) }} = {{ adapter.quote(column_name) }};\n alter table {{ relation }} drop column {{ adapter.quote(column_name) }} cascade;\n alter table {{ relation }} rename column {{ adapter.quote(tmp_column) }} to {{ adapter.quote(column_name) }}\n {% endcall %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.713475, "supported_languages": null}, "macro.dbt.alter_relation_add_remove_columns": {"name": "alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.alter_relation_add_remove_columns", "macro_sql": "{% macro alter_relation_add_remove_columns(relation, add_columns = none, remove_columns = none) -%}\n {{ return(adapter.dispatch('alter_relation_add_remove_columns', 'dbt')(relation, add_columns, remove_columns)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__alter_relation_add_remove_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7137122, "supported_languages": null}, "macro.dbt.default__alter_relation_add_remove_columns": {"name": "default__alter_relation_add_remove_columns", "resource_type": "macro", "package_name": "dbt", "path": "macros/adapters/columns.sql", "original_file_path": "macros/adapters/columns.sql", "unique_id": "macro.dbt.default__alter_relation_add_remove_columns", "macro_sql": "{% macro default__alter_relation_add_remove_columns(relation, add_columns, remove_columns) %}\n\n {% if add_columns is none %}\n {% set add_columns = [] %}\n {% endif %}\n {% if remove_columns is none %}\n {% set remove_columns = [] %}\n {% endif %}\n\n {% set sql -%}\n\n alter {{ relation.type }} {{ relation }}\n\n {% for column in add_columns %}\n add column {{ column.name }} {{ column.data_type }}{{ ',' if not loop.last }}\n {% endfor %}{{ ',' if add_columns and remove_columns }}\n\n {% for column in remove_columns %}\n drop column {{ column.name }}{{ ',' if not loop.last }}\n {% endfor %}\n\n {%- endset -%}\n\n {% do run_query(sql) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7144241, "supported_languages": null}, "macro.dbt.resolve_model_name": {"name": "resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.resolve_model_name", "macro_sql": "{% macro resolve_model_name(input_model_name) %}\n {{ return(adapter.dispatch('resolve_model_name', 'dbt')(input_model_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.default__resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.715914, "supported_languages": null}, "macro.dbt.default__resolve_model_name": {"name": "default__resolve_model_name", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.default__resolve_model_name", "macro_sql": "\n\n{%- macro default__resolve_model_name(input_model_name) -%}\n {{ input_model_name | string | replace('\"', '\\\"') }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.716056, "supported_languages": null}, "macro.dbt.build_ref_function": {"name": "build_ref_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_ref_function", "macro_sql": "{% macro build_ref_function(model) %}\n\n {%- set ref_dict = {} -%}\n {%- for _ref in model.refs -%}\n {% set _ref_args = [_ref.get('package'), _ref['name']] if _ref.get('package') else [_ref['name'],] %}\n {%- set resolved = ref(*_ref_args, v=_ref.get('version')) -%}\n {%- if _ref.get('version') -%}\n {% do _ref_args.extend([\"v\" ~ _ref['version']]) %}\n {%- endif -%}\n {%- do ref_dict.update({_ref_args | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef ref(*args, **kwargs):\n refs = {{ ref_dict | tojson }}\n key = '.'.join(args)\n version = kwargs.get(\"v\") or kwargs.get(\"version\")\n if version:\n key += f\".v{version}\"\n dbt_load_df_function = kwargs.get(\"dbt_load_df_function\")\n return dbt_load_df_function(refs[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.716869, "supported_languages": null}, "macro.dbt.build_source_function": {"name": "build_source_function", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_source_function", "macro_sql": "{% macro build_source_function(model) %}\n\n {%- set source_dict = {} -%}\n {%- for _source in model.sources -%}\n {%- set resolved = source(*_source) -%}\n {%- do source_dict.update({_source | join('.'): resolve_model_name(resolved)}) -%}\n {%- endfor -%}\n\ndef source(*args, dbt_load_df_function):\n sources = {{ source_dict | tojson }}\n key = '.'.join(args)\n return dbt_load_df_function(sources[key])\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.resolve_model_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.717248, "supported_languages": null}, "macro.dbt.build_config_dict": {"name": "build_config_dict", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.build_config_dict", "macro_sql": "{% macro build_config_dict(model) %}\n {%- set config_dict = {} -%}\n {% set config_dbt_used = zip(model.config.config_keys_used, model.config.config_keys_defaults) | list %}\n {%- for key, default in config_dbt_used -%}\n {# weird type testing with enum, would be much easier to write this logic in Python! #}\n {%- if key == \"language\" -%}\n {%- set value = \"python\" -%}\n {%- endif -%}\n {%- set value = model.config.get(key, default) -%}\n {%- do config_dict.update({key: value}) -%}\n {%- endfor -%}\nconfig_dict = {{ config_dict }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.717772, "supported_languages": null}, "macro.dbt.py_script_postfix": {"name": "py_script_postfix", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_postfix", "macro_sql": "{% macro py_script_postfix(model) %}\n# This part is user provided model code\n# you will need to copy the next section to run the code\n# COMMAND ----------\n# this part is dbt logic for get ref work, do not modify\n\n{{ build_ref_function(model ) }}\n{{ build_source_function(model ) }}\n{{ build_config_dict(model) }}\n\nclass config:\n def __init__(self, *args, **kwargs):\n pass\n\n @staticmethod\n def get(key, default=None):\n return config_dict.get(key, default)\n\nclass this:\n \"\"\"dbt.this() or dbt.this.identifier\"\"\"\n database = \"{{ this.database }}\"\n schema = \"{{ this.schema }}\"\n identifier = \"{{ this.identifier }}\"\n {% set this_relation_name = resolve_model_name(this) %}\n def __repr__(self):\n return '{{ this_relation_name }}'\n\n\nclass dbtObj:\n def __init__(self, load_df_function) -> None:\n self.source = lambda *args: source(*args, dbt_load_df_function=load_df_function)\n self.ref = lambda *args, **kwargs: ref(*args, **kwargs, dbt_load_df_function=load_df_function)\n self.config = config\n self.this = this()\n self.is_incremental = {{ is_incremental() }}\n\n# COMMAND ----------\n{{py_script_comment()}}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.build_ref_function", "macro.dbt.build_source_function", "macro.dbt.build_config_dict", "macro.dbt.resolve_model_name", "macro.dbt.is_incremental", "macro.dbt.py_script_comment"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.718205, "supported_languages": null}, "macro.dbt.py_script_comment": {"name": "py_script_comment", "resource_type": "macro", "package_name": "dbt", "path": "macros/python_model/python.sql", "original_file_path": "macros/python_model/python.sql", "unique_id": "macro.dbt.py_script_comment", "macro_sql": "{%macro py_script_comment()%}\n{%endmacro%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.718274, "supported_languages": null}, "macro.dbt.test_unique": {"name": "test_unique", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_unique", "macro_sql": "{% test unique(model, column_name) %}\n {% set macro = adapter.dispatch('test_unique', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_unique"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.718741, "supported_languages": null}, "macro.dbt.test_not_null": {"name": "test_not_null", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_not_null", "macro_sql": "{% test not_null(model, column_name) %}\n {% set macro = adapter.dispatch('test_not_null', 'dbt') %}\n {{ macro(model, column_name) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.718944, "supported_languages": null}, "macro.dbt.test_accepted_values": {"name": "test_accepted_values", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_accepted_values", "macro_sql": "{% test accepted_values(model, column_name, values, quote=True) %}\n {% set macro = adapter.dispatch('test_accepted_values', 'dbt') %}\n {{ macro(model, column_name, values, quote) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7192, "supported_languages": null}, "macro.dbt.test_relationships": {"name": "test_relationships", "resource_type": "macro", "package_name": "dbt", "path": "tests/generic/builtin.sql", "original_file_path": "tests/generic/builtin.sql", "unique_id": "macro.dbt.test_relationships", "macro_sql": "{% test relationships(model, column_name, to, field) %}\n {% set macro = adapter.dispatch('test_relationships', 'dbt') %}\n {{ macro(model, column_name, to, field) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt.default__test_relationships"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.719442, "supported_languages": null}, "macro.dbt_utils.get_url_host": {"name": "get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.get_url_host", "macro_sql": "{% macro get_url_host(field) -%}\n {{ return(adapter.dispatch('get_url_host', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_host"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.719751, "supported_languages": null}, "macro.dbt_utils.default__get_url_host": {"name": "default__get_url_host", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_host.sql", "original_file_path": "macros/web/get_url_host.sql", "unique_id": "macro.dbt_utils.default__get_url_host", "macro_sql": "{% macro default__get_url_host(field) -%}\n\n{%- set parsed =\n dbt.split_part(\n dbt.split_part(\n dbt.replace(\n dbt.replace(\n dbt.replace(field, \"'android-app://'\", \"''\"\n ), \"'http://'\", \"''\"\n ), \"'https://'\", \"''\"\n ), \"'/'\", 1\n ), \"'?'\", 1\n )\n\n-%}\n\n\n {{ dbt.safe_cast(\n parsed,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part", "macro.dbt.replace", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.720192, "supported_languages": null}, "macro.dbt_utils.get_url_path": {"name": "get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.get_url_path", "macro_sql": "{% macro get_url_path(field) -%}\n {{ return(adapter.dispatch('get_url_path', 'dbt_utils')(field)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_path"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.720623, "supported_languages": null}, "macro.dbt_utils.default__get_url_path": {"name": "default__get_url_path", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_path.sql", "original_file_path": "macros/web/get_url_path.sql", "unique_id": "macro.dbt_utils.default__get_url_path", "macro_sql": "{% macro default__get_url_path(field) -%}\n\n {%- set stripped_url =\n dbt.replace(\n dbt.replace(field, \"'http://'\", \"''\"), \"'https://'\", \"''\")\n -%}\n\n {%- set first_slash_pos -%}\n coalesce(\n nullif({{ dbt.position(\"'/'\", stripped_url) }}, 0),\n {{ dbt.position(\"'?'\", stripped_url) }} - 1\n )\n {%- endset -%}\n\n {%- set parsed_path =\n dbt.split_part(\n dbt.right(\n stripped_url,\n dbt.length(stripped_url) ~ \"-\" ~ first_slash_pos\n ),\n \"'?'\", 1\n )\n -%}\n\n {{ dbt.safe_cast(\n parsed_path,\n dbt.type_string()\n )}}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.replace", "macro.dbt.position", "macro.dbt.split_part", "macro.dbt.right", "macro.dbt.length", "macro.dbt.safe_cast", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7213068, "supported_languages": null}, "macro.dbt_utils.get_url_parameter": {"name": "get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.get_url_parameter", "macro_sql": "{% macro get_url_parameter(field, url_parameter) -%}\n {{ return(adapter.dispatch('get_url_parameter', 'dbt_utils')(field, url_parameter)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7216141, "supported_languages": null}, "macro.dbt_utils.default__get_url_parameter": {"name": "default__get_url_parameter", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/web/get_url_parameter.sql", "original_file_path": "macros/web/get_url_parameter.sql", "unique_id": "macro.dbt_utils.default__get_url_parameter", "macro_sql": "{% macro default__get_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"='\" -%}\n\n{%- set split = dbt.split_part(dbt.split_part(field, formatted_url_parameter, 2), \"'&'\", 1) -%}\n\nnullif({{ split }},'')\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.split_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7219179, "supported_languages": null}, "macro.dbt_utils.test_fewer_rows_than": {"name": "test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.test_fewer_rows_than", "macro_sql": "{% test fewer_rows_than(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_fewer_rows_than', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_fewer_rows_than"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.722913, "supported_languages": null}, "macro.dbt_utils.default__test_fewer_rows_than": {"name": "default__test_fewer_rows_than", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/fewer_rows_than.sql", "original_file_path": "macros/generic_tests/fewer_rows_than.sql", "unique_id": "macro.dbt_utils.default__test_fewer_rows_than", "macro_sql": "{% macro default__test_fewer_rows_than(model, compare_model, group_by_columns) %}\n\n{{ config(fail_calc = 'sum(coalesce(row_count_delta, 0))') }}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in equal_rowcount. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_fewer_rows_than'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_our_model \n from {{ model }}\n {{ groupby_gb_cols }}\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_fewer_rows_than,\n count(*) as count_comparison_model \n from {{ compare_model }}\n {{ groupby_gb_cols }}\n\n),\ncounts as (\n\n select\n\n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_our_model,\n count_comparison_model\n from a\n full join b on \n a.id_dbtutils_test_fewer_rows_than = b.id_dbtutils_test_fewer_rows_than\n {{ join_gb_cols }}\n\n),\nfinal as (\n\n select *,\n case\n -- fail the test if we have more rows than the reference model and return the row count delta\n when count_our_model > count_comparison_model then (count_our_model - count_comparison_model)\n -- fail the test if they are the same number\n when count_our_model = count_comparison_model then 1\n -- pass the test if the delta is positive (i.e. return the number 0)\n else 0\n end as row_count_delta\n from counts\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.723862, "supported_languages": null}, "macro.dbt_utils.test_equal_rowcount": {"name": "test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.test_equal_rowcount", "macro_sql": "{% test equal_rowcount(model, compare_model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_equal_rowcount', 'dbt_utils')(model, compare_model, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equal_rowcount"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7246552, "supported_languages": null}, "macro.dbt_utils.default__test_equal_rowcount": {"name": "default__test_equal_rowcount", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equal_rowcount.sql", "original_file_path": "macros/generic_tests/equal_rowcount.sql", "unique_id": "macro.dbt_utils.default__test_equal_rowcount", "macro_sql": "{% macro default__test_equal_rowcount(model, compare_model, group_by_columns) %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = 'sum(coalesce(diff_count, 0))') }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(', ') + ', ' %}\n {% set join_gb_cols %}\n {% for c in group_by_columns %}\n and a.{{c}} = b.{{c}}\n {% endfor %}\n {% endset %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n{#-- We must add a fake join key in case additional grouping variables are not provided --#}\n{#-- Redshift does not allow for dynamically created join conditions (e.g. full join on 1 = 1 --#}\n{#-- The same logic is used in fewer_rows_than. In case of changes, maintain consistent logic --#}\n{% set group_by_columns = ['id_dbtutils_test_equal_rowcount'] + group_by_columns %}\n{% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n\nwith a as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_a \n from {{ model }}\n {{groupby_gb_cols}}\n\n\n),\nb as (\n\n select \n {{select_gb_cols}}\n 1 as id_dbtutils_test_equal_rowcount,\n count(*) as count_b \n from {{ compare_model }}\n {{groupby_gb_cols}}\n\n),\nfinal as (\n\n select\n \n {% for c in group_by_columns -%}\n a.{{c}} as {{c}}_a,\n b.{{c}} as {{c}}_b,\n {% endfor %}\n\n count_a,\n count_b,\n abs(count_a - count_b) as diff_count\n\n from a\n full join b\n on\n a.id_dbtutils_test_equal_rowcount = b.id_dbtutils_test_equal_rowcount\n {{join_gb_cols}}\n\n\n)\n\nselect * from final\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.725646, "supported_languages": null}, "macro.dbt_utils.test_relationships_where": {"name": "test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.test_relationships_where", "macro_sql": "{% test relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n {{ return(adapter.dispatch('test_relationships_where', 'dbt_utils')(model, column_name, to, field, from_condition, to_condition)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_relationships_where"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.726353, "supported_languages": null}, "macro.dbt_utils.default__test_relationships_where": {"name": "default__test_relationships_where", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/relationships_where.sql", "original_file_path": "macros/generic_tests/relationships_where.sql", "unique_id": "macro.dbt_utils.default__test_relationships_where", "macro_sql": "{% macro default__test_relationships_where(model, column_name, to, field, from_condition=\"1=1\", to_condition=\"1=1\") %}\n\n{# T-SQL has no boolean data type so we use 1=1 which returns TRUE #}\n{# ref https://stackoverflow.com/a/7170753/3842610 #}\n\nwith left_table as (\n\n select\n {{column_name}} as id\n\n from {{model}}\n\n where {{column_name}} is not null\n and {{from_condition}}\n\n),\n\nright_table as (\n\n select\n {{field}} as id\n\n from {{to}}\n\n where {{field}} is not null\n and {{to_condition}}\n\n),\n\nexceptions as (\n\n select\n left_table.id,\n right_table.id as right_id\n\n from left_table\n\n left join right_table\n on left_table.id = right_table.id\n\n where right_table.id is null\n\n)\n\nselect * from exceptions\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.726698, "supported_languages": null}, "macro.dbt_utils.test_recency": {"name": "test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.test_recency", "macro_sql": "{% test recency(model, field, datepart, interval, ignore_time_component=False, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_recency', 'dbt_utils')(model, field, datepart, interval, ignore_time_component, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_recency"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.727364, "supported_languages": null}, "macro.dbt_utils.default__test_recency": {"name": "default__test_recency", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/recency.sql", "original_file_path": "macros/generic_tests/recency.sql", "unique_id": "macro.dbt_utils.default__test_recency", "macro_sql": "{% macro default__test_recency(model, field, datepart, interval, ignore_time_component, group_by_columns) %}\n\n{% set threshold = 'cast(' ~ dbt.dateadd(datepart, interval * -1, dbt.current_timestamp()) ~ ' as ' ~ ('date' if ignore_time_component else dbt.type_timestamp()) ~ ')' %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nwith recency as (\n\n select \n\n {{ select_gb_cols }}\n {% if ignore_time_component %}\n cast(max({{ field }}) as date) as most_recent\n {%- else %}\n max({{ field }}) as most_recent\n {%- endif %}\n\n from {{ model }}\n\n {{ groupby_gb_cols }}\n\n)\n\nselect\n\n {{ select_gb_cols }}\n most_recent,\n {{ threshold }} as threshold\n\nfrom recency\nwhere most_recent < {{ threshold }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.current_timestamp", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.72812, "supported_languages": null}, "macro.dbt_utils.test_not_constant": {"name": "test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.test_not_constant", "macro_sql": "{% test not_constant(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_constant', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_constant"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7285252, "supported_languages": null}, "macro.dbt_utils.default__test_not_constant": {"name": "default__test_not_constant", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_constant.sql", "original_file_path": "macros/generic_tests/not_constant.sql", "unique_id": "macro.dbt_utils.default__test_not_constant", "macro_sql": "{% macro default__test_not_constant(model, column_name, group_by_columns) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\n\nselect\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count(distinct {{ column_name }}) as filler_column\n\nfrom {{ model }}\n\n {{groupby_gb_cols}}\n\nhaving count(distinct {{ column_name }}) = 1\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.728927, "supported_languages": null}, "macro.dbt_utils.test_accepted_range": {"name": "test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.test_accepted_range", "macro_sql": "{% test accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n {{ return(adapter.dispatch('test_accepted_range', 'dbt_utils')(model, column_name, min_value, max_value, inclusive)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_accepted_range"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.729463, "supported_languages": null}, "macro.dbt_utils.default__test_accepted_range": {"name": "default__test_accepted_range", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/accepted_range.sql", "original_file_path": "macros/generic_tests/accepted_range.sql", "unique_id": "macro.dbt_utils.default__test_accepted_range", "macro_sql": "{% macro default__test_accepted_range(model, column_name, min_value=none, max_value=none, inclusive=true) %}\n\nwith meet_condition as(\n select *\n from {{ model }}\n),\n\nvalidation_errors as (\n select *\n from meet_condition\n where\n -- never true, defaults to an empty result set. Exists to ensure any combo of the `or` clauses below succeeds\n 1 = 2\n\n {%- if min_value is not none %}\n -- records with a value >= min_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} > {{- \"=\" if inclusive }} {{ min_value }}\n {%- endif %}\n\n {%- if max_value is not none %}\n -- records with a value <= max_value are permitted. The `not` flips this to find records that don't meet the rule.\n or not {{ column_name }} < {{- \"=\" if inclusive }} {{ max_value }}\n {%- endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.729876, "supported_languages": null}, "macro.dbt_utils.test_not_accepted_values": {"name": "test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.test_not_accepted_values", "macro_sql": "{% test not_accepted_values(model, column_name, values, quote=True) %}\n {{ return(adapter.dispatch('test_not_accepted_values', 'dbt_utils')(model, column_name, values, quote)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_accepted_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.730346, "supported_languages": null}, "macro.dbt_utils.default__test_not_accepted_values": {"name": "default__test_not_accepted_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_accepted_values.sql", "original_file_path": "macros/generic_tests/not_accepted_values.sql", "unique_id": "macro.dbt_utils.default__test_not_accepted_values", "macro_sql": "{% macro default__test_not_accepted_values(model, column_name, values, quote=True) %}\nwith all_values as (\n\n select distinct\n {{ column_name }} as value_field\n\n from {{ model }}\n\n),\n\nvalidation_errors as (\n\n select\n value_field\n\n from all_values\n where value_field in (\n {% for value in values -%}\n {% if quote -%}\n '{{ value }}'\n {%- else -%}\n {{ value }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {%- endfor %}\n )\n\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7306828, "supported_languages": null}, "macro.dbt_utils.test_at_least_one": {"name": "test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.test_at_least_one", "macro_sql": "{% test at_least_one(model, column_name, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_at_least_one', 'dbt_utils')(model, column_name, group_by_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_at_least_one"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.731261, "supported_languages": null}, "macro.dbt_utils.default__test_at_least_one": {"name": "default__test_at_least_one", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/at_least_one.sql", "original_file_path": "macros/generic_tests/at_least_one.sql", "unique_id": "macro.dbt_utils.default__test_at_least_one", "macro_sql": "{% macro default__test_at_least_one(model, column_name, group_by_columns) %}\n\n{% set pruned_cols = [column_name] %}\n\n{% if group_by_columns|length() > 0 %}\n\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n {% set pruned_cols = group_by_columns %}\n\n {% if column_name not in pruned_cols %}\n {% do pruned_cols.append(column_name) %}\n {% endif %}\n\n{% endif %}\n\n{% set select_pruned_cols = pruned_cols|join(' ,') %}\n\nselect *\nfrom (\n with pruned_rows as (\n select\n {{ select_pruned_cols }}\n from {{ model }}\n where {{ column_name }} is not null\n limit 1\n )\n select\n {# In TSQL, subquery aggregate columns need aliases #}\n {# thus: a filler col name, 'filler_column' #}\n {{select_gb_cols}}\n count({{ column_name }}) as filler_column\n\n from pruned_rows\n\n {{groupby_gb_cols}}\n\n having count({{ column_name }}) = 0\n\n) validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.732036, "supported_languages": null}, "macro.dbt_utils.test_unique_combination_of_columns": {"name": "test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.test_unique_combination_of_columns", "macro_sql": "{% test unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n {{ return(adapter.dispatch('test_unique_combination_of_columns', 'dbt_utils')(model, combination_of_columns, quote_columns)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_unique_combination_of_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.732575, "supported_languages": null}, "macro.dbt_utils.default__test_unique_combination_of_columns": {"name": "default__test_unique_combination_of_columns", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/unique_combination_of_columns.sql", "original_file_path": "macros/generic_tests/unique_combination_of_columns.sql", "unique_id": "macro.dbt_utils.default__test_unique_combination_of_columns", "macro_sql": "{% macro default__test_unique_combination_of_columns(model, combination_of_columns, quote_columns=false) %}\n\n{% if not quote_columns %}\n {%- set column_list=combination_of_columns %}\n{% elif quote_columns %}\n {%- set column_list=[] %}\n {% for column in combination_of_columns -%}\n {% set column_list = column_list.append( adapter.quote(column) ) %}\n {%- endfor %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`quote_columns` argument for unique_combination_of_columns test must be one of [True, False] Got: '\" ~ quote ~\"'.'\"\n ) }}\n{% endif %}\n\n{%- set columns_csv=column_list | join(', ') %}\n\n\nwith validation_errors as (\n\n select\n {{ columns_csv }}\n from {{ model }}\n group by {{ columns_csv }}\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.73314, "supported_languages": null}, "macro.dbt_utils.test_cardinality_equality": {"name": "test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.test_cardinality_equality", "macro_sql": "{% test cardinality_equality(model, column_name, to, field) %}\n {{ return(adapter.dispatch('test_cardinality_equality', 'dbt_utils')(model, column_name, to, field)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_cardinality_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7336361, "supported_languages": null}, "macro.dbt_utils.default__test_cardinality_equality": {"name": "default__test_cardinality_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/cardinality_equality.sql", "original_file_path": "macros/generic_tests/cardinality_equality.sql", "unique_id": "macro.dbt_utils.default__test_cardinality_equality", "macro_sql": "{% macro default__test_cardinality_equality(model, column_name, to, field) %}\n\n{# T-SQL does not let you use numbers as aliases for columns #}\n{# Thus, no \"GROUP BY 1\" #}\n\nwith table_a as (\nselect\n {{ column_name }},\n count(*) as num_rows\nfrom {{ model }}\ngroup by {{ column_name }}\n),\n\ntable_b as (\nselect\n {{ field }},\n count(*) as num_rows\nfrom {{ to }}\ngroup by {{ field }}\n),\n\nexcept_a as (\n select *\n from table_a\n {{ dbt.except() }}\n select *\n from table_b\n),\n\nexcept_b as (\n select *\n from table_b\n {{ dbt.except() }}\n select *\n from table_a\n),\n\nunioned as (\n select *\n from except_a\n union all\n select *\n from except_b\n)\n\nselect *\nfrom unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.733936, "supported_languages": null}, "macro.dbt_utils.test_expression_is_true": {"name": "test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.test_expression_is_true", "macro_sql": "{% test expression_is_true(model, expression, column_name=None) %}\n {{ return(adapter.dispatch('test_expression_is_true', 'dbt_utils')(model, expression, column_name)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_expression_is_true"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.734284, "supported_languages": null}, "macro.dbt_utils.default__test_expression_is_true": {"name": "default__test_expression_is_true", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/expression_is_true.sql", "original_file_path": "macros/generic_tests/expression_is_true.sql", "unique_id": "macro.dbt_utils.default__test_expression_is_true", "macro_sql": "{% macro default__test_expression_is_true(model, expression, column_name) %}\n\n{% set column_list = '*' if should_store_failures() else \"1\" %}\n\nselect\n {{ column_list }}\nfrom {{ model }}\n{% if column_name is none %}\nwhere not({{ expression }})\n{%- else %}\nwhere not({{ column_name }} {{ expression }})\n{%- endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.should_store_failures"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7345998, "supported_languages": null}, "macro.dbt_utils.test_not_null_proportion": {"name": "test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.test_not_null_proportion", "macro_sql": "{% macro test_not_null_proportion(model, group_by_columns = []) %}\n {{ return(adapter.dispatch('test_not_null_proportion', 'dbt_utils')(model, group_by_columns, **kwargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_null_proportion"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.735168, "supported_languages": null}, "macro.dbt_utils.default__test_not_null_proportion": {"name": "default__test_not_null_proportion", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_null_proportion.sql", "original_file_path": "macros/generic_tests/not_null_proportion.sql", "unique_id": "macro.dbt_utils.default__test_not_null_proportion", "macro_sql": "{% macro default__test_not_null_proportion(model, group_by_columns) %}\n\n{% set column_name = kwargs.get('column_name', kwargs.get('arg')) %}\n{% set at_least = kwargs.get('at_least', kwargs.get('arg')) %}\n{% set at_most = kwargs.get('at_most', kwargs.get('arg', 1)) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(' ,') + ', ' %}\n {% set groupby_gb_cols = 'group by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith validation as (\n select\n {{select_gb_cols}}\n sum(case when {{ column_name }} is null then 0 else 1 end) / cast(count(*) as numeric) as not_null_proportion\n from {{ model }}\n {{groupby_gb_cols}}\n),\nvalidation_errors as (\n select\n {{select_gb_cols}}\n not_null_proportion\n from validation\n where not_null_proportion < {{ at_least }} or not_null_proportion > {{ at_most }}\n)\nselect\n *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.735883, "supported_languages": null}, "macro.dbt_utils.test_sequential_values": {"name": "test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.test_sequential_values", "macro_sql": "{% test sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n {{ return(adapter.dispatch('test_sequential_values', 'dbt_utils')(model, column_name, interval, datepart, group_by_columns)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_sequential_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.736922, "supported_languages": null}, "macro.dbt_utils.default__test_sequential_values": {"name": "default__test_sequential_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/sequential_values.sql", "original_file_path": "macros/generic_tests/sequential_values.sql", "unique_id": "macro.dbt_utils.default__test_sequential_values", "macro_sql": "{% macro default__test_sequential_values(model, column_name, interval=1, datepart=None, group_by_columns = []) %}\n\n{% set previous_column_name = \"previous_\" ~ dbt_utils.slugify(column_name) %}\n\n{% if group_by_columns|length() > 0 %}\n {% set select_gb_cols = group_by_columns|join(',') + ', ' %}\n {% set partition_gb_cols = 'partition by ' + group_by_columns|join(',') %}\n{% endif %}\n\nwith windowed as (\n\n select\n {{ select_gb_cols }}\n {{ column_name }},\n lag({{ column_name }}) over (\n {{partition_gb_cols}}\n order by {{ column_name }}\n ) as {{ previous_column_name }}\n from {{ model }}\n),\n\nvalidation_errors as (\n select\n *\n from windowed\n {% if datepart %}\n where not(cast({{ column_name }} as {{ dbt.type_timestamp() }})= cast({{ dbt.dateadd(datepart, interval, previous_column_name) }} as {{ dbt.type_timestamp() }}))\n {% else %}\n where not({{ column_name }} = {{ previous_column_name }} + {{ interval }})\n {% endif %}\n)\n\nselect *\nfrom validation_errors\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.slugify", "macro.dbt.type_timestamp", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.737819, "supported_languages": null}, "macro.dbt_utils.test_equality": {"name": "test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.test_equality", "macro_sql": "{% test equality(model, compare_model, compare_columns=None, exclude_columns=None, precision = None) %}\n {{ return(adapter.dispatch('test_equality', 'dbt_utils')(model, compare_model, compare_columns, exclude_columns, precision)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_equality"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.739666, "supported_languages": null}, "macro.dbt_utils.default__test_equality": {"name": "default__test_equality", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/equality.sql", "original_file_path": "macros/generic_tests/equality.sql", "unique_id": "macro.dbt_utils.default__test_equality", "macro_sql": "{% macro default__test_equality(model, compare_model, compare_columns=None, exclude_columns=None, precision = None) %}\n\n{%- if compare_columns and exclude_columns -%}\n {{ exceptions.raise_compiler_error(\"Both a compare and an ignore list were provided to the `equality` macro. Only one is allowed\") }}\n{%- endif -%}\n\n{% set set_diff %}\n count(*) + coalesce(abs(\n sum(case when which_diff = 'a_minus_b' then 1 else 0 end) -\n sum(case when which_diff = 'b_minus_a' then 1 else 0 end)\n ), 0)\n{% endset %}\n\n{#-- Needs to be set at parse time, before we return '' below --#}\n{{ config(fail_calc = set_diff) }}\n\n{#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n{%- if not execute -%}\n {{ return('') }}\n{% endif %}\n\n\n\n-- setup\n{%- do dbt_utils._is_relation(model, 'test_equality') -%}\n\n{# Ensure there are no extra columns in the compare_model vs model #}\n{%- if not compare_columns -%}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- do dbt_utils._is_ephemeral(compare_model, 'test_equality') -%}\n\n {%- set model_columns = adapter.get_columns_in_relation(model) -%}\n {%- set compare_model_columns = adapter.get_columns_in_relation(compare_model) -%}\n\n\n {%- if exclude_columns -%}\n {#-- Lower case ignore columns for easier comparison --#}\n {%- set exclude_columns = exclude_columns | map(\"lower\") | list %}\n\n {# Filter out the excluded columns #}\n {%- set include_columns = [] %}\n {%- set include_model_columns = [] %}\n {%- for column in model_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n {%- for column in compare_model_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_model_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n\n {%- set compare_columns_set = set(include_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- set compare_model_columns_set = set(include_model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- else -%}\n {%- set compare_columns_set = set(model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- set compare_model_columns_set = set(compare_model_columns | map(attribute='quoted') | map(\"lower\")) %}\n {%- endif -%}\n\n {% if compare_columns_set != compare_model_columns_set %}\n {{ exceptions.raise_compiler_error(compare_model ~\" has less columns than \" ~ model ~ \", please ensure they have the same columns or use the `compare_columns` or `exclude_columns` arguments to subset them.\") }}\n {% endif %}\n\n\n{% endif %}\n\n{%- if not precision -%}\n {%- if not compare_columns -%}\n {# \n You cannot get the columns in an ephemeral model (due to not existing in the information schema),\n so if the user does not provide an explicit list of columns we must error in the case it is ephemeral\n #}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set compare_columns = adapter.get_columns_in_relation(model)-%}\n\n {%- if exclude_columns -%}\n {#-- Lower case ignore columns for easier comparison --#}\n {%- set exclude_columns = exclude_columns | map(\"lower\") | list %}\n\n {# Filter out the excluded columns #}\n {%- set include_columns = [] %}\n {%- for column in compare_columns -%}\n {%- if column.name | lower not in exclude_columns -%}\n {% do include_columns.append(column) %}\n {%- endif %}\n {%- endfor %}\n\n {%- set compare_columns = include_columns | map(attribute='quoted') %}\n {%- else -%} {# Compare columns provided #}\n {%- set compare_columns = compare_columns | map(attribute='quoted') %}\n {%- endif -%}\n {%- endif -%}\n\n {% set compare_cols_csv = compare_columns | join(', ') %}\n\n{% else %} {# Precision required #}\n {#-\n If rounding is required, we need to get the types, so it cannot be ephemeral even if they provide column names\n -#}\n {%- do dbt_utils._is_ephemeral(model, 'test_equality') -%}\n {%- set columns = adapter.get_columns_in_relation(model) -%}\n\n {% set columns_list = [] %}\n {%- for col in columns -%}\n {%- if (\n (col.name|lower in compare_columns|map('lower') or not compare_columns) and\n (col.name|lower not in exclude_columns|map('lower') or not exclude_columns)\n ) -%}\n {# Databricks double type is not picked up by any number type checks in dbt #}\n {%- if col.is_float() or col.is_numeric() or col.data_type == 'double' -%}\n {# Cast is required due to postgres not having round for a double precision number #}\n {%- do columns_list.append('round(cast(' ~ col.quoted ~ ' as ' ~ dbt.type_numeric() ~ '),' ~ precision ~ ') as ' ~ col.quoted) -%}\n {%- else -%} {# Non-numeric type #}\n {%- do columns_list.append(col.quoted) -%}\n {%- endif -%}\n {% endif %}\n {%- endfor -%}\n\n {% set compare_cols_csv = columns_list | join(', ') %}\n\n{% endif %}\n\nwith a as (\n\n select * from {{ model }}\n\n),\n\nb as (\n\n select * from {{ compare_model }}\n\n),\n\na_minus_b as (\n\n select {{compare_cols_csv}} from a\n {{ dbt.except() }}\n select {{compare_cols_csv}} from b\n\n),\n\nb_minus_a as (\n\n select {{compare_cols_csv}} from b\n {{ dbt.except() }}\n select {{compare_cols_csv}} from a\n\n),\n\nunioned as (\n\n select 'a_minus_b' as which_diff, a_minus_b.* from a_minus_b\n union all\n select 'b_minus_a' as which_diff, b_minus_a.* from b_minus_a\n\n)\n\nselect * from unioned\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_numeric", "macro.dbt.except"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.743366, "supported_languages": null}, "macro.dbt_utils.test_not_empty_string": {"name": "test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.test_not_empty_string", "macro_sql": "{% test not_empty_string(model, column_name, trim_whitespace=true) %}\n\n {{ return(adapter.dispatch('test_not_empty_string', 'dbt_utils')(model, column_name, trim_whitespace)) }}\n\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_not_empty_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.743863, "supported_languages": null}, "macro.dbt_utils.default__test_not_empty_string": {"name": "default__test_not_empty_string", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/not_empty_string.sql", "original_file_path": "macros/generic_tests/not_empty_string.sql", "unique_id": "macro.dbt_utils.default__test_not_empty_string", "macro_sql": "{% macro default__test_not_empty_string(model, column_name, trim_whitespace=true) %}\n\n with\n \n all_values as (\n\n select \n\n\n {% if trim_whitespace == true -%}\n\n trim({{ column_name }}) as {{ column_name }}\n\n {%- else -%}\n\n {{ column_name }}\n\n {%- endif %}\n \n from {{ model }}\n\n ),\n\n errors as (\n\n select * from all_values\n where {{ column_name }} = ''\n\n )\n\n select * from errors\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.744139, "supported_languages": null}, "macro.dbt_utils.test_mutually_exclusive_ranges": {"name": "test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.test_mutually_exclusive_ranges", "macro_sql": "{% test mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n {{ return(adapter.dispatch('test_mutually_exclusive_ranges', 'dbt_utils')(model, lower_bound_column, upper_bound_column, partition_by, gaps, zero_length_range_allowed)) }}\n{% endtest %}", "depends_on": {"macros": ["macro.dbt_utils.default__test_mutually_exclusive_ranges"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.747255, "supported_languages": null}, "macro.dbt_utils.default__test_mutually_exclusive_ranges": {"name": "default__test_mutually_exclusive_ranges", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/generic_tests/mutually_exclusive_ranges.sql", "original_file_path": "macros/generic_tests/mutually_exclusive_ranges.sql", "unique_id": "macro.dbt_utils.default__test_mutually_exclusive_ranges", "macro_sql": "{% macro default__test_mutually_exclusive_ranges(model, lower_bound_column, upper_bound_column, partition_by=None, gaps='allowed', zero_length_range_allowed=False) %}\n{% if gaps == 'not_allowed' %}\n {% set allow_gaps_operator='=' %}\n {% set allow_gaps_operator_in_words='equal_to' %}\n{% elif gaps == 'allowed' %}\n {% set allow_gaps_operator='<=' %}\n {% set allow_gaps_operator_in_words='less_than_or_equal_to' %}\n{% elif gaps == 'required' %}\n {% set allow_gaps_operator='<' %}\n {% set allow_gaps_operator_in_words='less_than' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`gaps` argument for mutually_exclusive_ranges test must be one of ['not_allowed', 'allowed', 'required'] Got: '\" ~ gaps ~\"'.'\"\n ) }}\n{% endif %}\n{% if not zero_length_range_allowed %}\n {% set allow_zero_length_operator='<' %}\n {% set allow_zero_length_operator_in_words='less_than' %}\n{% elif zero_length_range_allowed %}\n {% set allow_zero_length_operator='<=' %}\n {% set allow_zero_length_operator_in_words='less_than_or_equal_to' %}\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"`zero_length_range_allowed` argument for mutually_exclusive_ranges test must be one of [true, false] Got: '\" ~ zero_length_range_allowed ~\"'.'\"\n ) }}\n{% endif %}\n\n{% set partition_clause=\"partition by \" ~ partition_by if partition_by else '' %}\n\nwith window_functions as (\n\n select\n {% if partition_by %}\n {{ partition_by }} as partition_by_col,\n {% endif %}\n {{ lower_bound_column }} as lower_bound,\n {{ upper_bound_column }} as upper_bound,\n\n lead({{ lower_bound_column }}) over (\n {{ partition_clause }}\n order by {{ lower_bound_column }}, {{ upper_bound_column }}\n ) as next_lower_bound,\n\n row_number() over (\n {{ partition_clause }}\n order by {{ lower_bound_column }} desc, {{ upper_bound_column }} desc\n ) = 1 as is_last_record\n\n from {{ model }}\n\n),\n\ncalc as (\n -- We want to return records where one of our assumptions fails, so we'll use\n -- the `not` function with `and` statements so we can write our assumptions more cleanly\n select\n *,\n\n -- For each record: lower_bound should be < upper_bound.\n -- Coalesce it to return an error on the null case (implicit assumption\n -- these columns are not_null)\n coalesce(\n lower_bound {{ allow_zero_length_operator }} upper_bound,\n false\n ) as lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound,\n\n -- For each record: upper_bound {{ allow_gaps_operator }} the next lower_bound.\n -- Coalesce it to handle null cases for the last record.\n coalesce(\n upper_bound {{ allow_gaps_operator }} next_lower_bound,\n is_last_record,\n false\n ) as upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n\n from window_functions\n\n),\n\nvalidation_errors as (\n\n select\n *\n from calc\n\n where not(\n -- THE FOLLOWING SHOULD BE TRUE --\n lower_bound_{{ allow_zero_length_operator_in_words }}_upper_bound\n and upper_bound_{{ allow_gaps_operator_in_words }}_next_lower_bound\n )\n)\n\nselect * from validation_errors\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.748681, "supported_languages": null}, "macro.dbt_utils.pretty_log_format": {"name": "pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.pretty_log_format", "macro_sql": "{% macro pretty_log_format(message) %}\n {{ return(adapter.dispatch('pretty_log_format', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.74895, "supported_languages": null}, "macro.dbt_utils.default__pretty_log_format": {"name": "default__pretty_log_format", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_log_format.sql", "original_file_path": "macros/jinja_helpers/pretty_log_format.sql", "unique_id": "macro.dbt_utils.default__pretty_log_format", "macro_sql": "{% macro default__pretty_log_format(message) %}\n {{ return( dbt_utils.pretty_time() ~ ' + ' ~ message) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.749098, "supported_languages": null}, "macro.dbt_utils._is_relation": {"name": "_is_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_relation.sql", "original_file_path": "macros/jinja_helpers/_is_relation.sql", "unique_id": "macro.dbt_utils._is_relation", "macro_sql": "{% macro _is_relation(obj, macro) %}\n {%- if not (obj is mapping and obj.get('metadata', {}).get('type', '').endswith('Relation')) -%}\n {%- do exceptions.raise_compiler_error(\"Macro \" ~ macro ~ \" expected a Relation but received the value: \" ~ obj) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7494872, "supported_languages": null}, "macro.dbt_utils.pretty_time": {"name": "pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.pretty_time", "macro_sql": "{% macro pretty_time(format='%H:%M:%S') %}\n {{ return(adapter.dispatch('pretty_time', 'dbt_utils')(format)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pretty_time"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.749731, "supported_languages": null}, "macro.dbt_utils.default__pretty_time": {"name": "default__pretty_time", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/pretty_time.sql", "original_file_path": "macros/jinja_helpers/pretty_time.sql", "unique_id": "macro.dbt_utils.default__pretty_time", "macro_sql": "{% macro default__pretty_time(format='%H:%M:%S') %}\n {{ return(modules.datetime.datetime.now().strftime(format)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.749908, "supported_languages": null}, "macro.dbt_utils.log_info": {"name": "log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.log_info", "macro_sql": "{% macro log_info(message) %}\n {{ return(adapter.dispatch('log_info', 'dbt_utils')(message)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__log_info"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.75014, "supported_languages": null}, "macro.dbt_utils.default__log_info": {"name": "default__log_info", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/log_info.sql", "original_file_path": "macros/jinja_helpers/log_info.sql", "unique_id": "macro.dbt_utils.default__log_info", "macro_sql": "{% macro default__log_info(message) %}\n {{ log(dbt_utils.pretty_log_format(message), info=True) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.pretty_log_format"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7503018, "supported_languages": null}, "macro.dbt_utils.slugify": {"name": "slugify", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/slugify.sql", "original_file_path": "macros/jinja_helpers/slugify.sql", "unique_id": "macro.dbt_utils.slugify", "macro_sql": "{% macro slugify(string) %}\n\n{#- Lower case the string -#}\n{% set string = string | lower %}\n{#- Replace spaces and dashes with underscores -#}\n{% set string = modules.re.sub('[ -]+', '_', string) %}\n{#- Only take letters, numbers, and underscores -#}\n{% set string = modules.re.sub('[^a-z0-9_]+', '', string) %}\n{#- Prepends \"_\" if string begins with a number -#}\n{% set string = modules.re.sub('^[0-9]', '_' + string[0], string) %}\n\n{{ return(string) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7508538, "supported_languages": null}, "macro.dbt_utils._is_ephemeral": {"name": "_is_ephemeral", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/jinja_helpers/_is_ephemeral.sql", "original_file_path": "macros/jinja_helpers/_is_ephemeral.sql", "unique_id": "macro.dbt_utils._is_ephemeral", "macro_sql": "{% macro _is_ephemeral(obj, macro) %}\n {%- if obj.is_cte -%}\n {% set ephemeral_prefix = api.Relation.add_ephemeral_prefix('') %}\n {% if obj.name.startswith(ephemeral_prefix) %}\n {% set model_name = obj.name[(ephemeral_prefix|length):] %}\n {% else %}\n {% set model_name = obj.name %}\n {%- endif -%}\n {% set error_message %}\nThe `{{ macro }}` macro cannot be used with ephemeral models, as it relies on the information schema.\n\n`{{ model_name }}` is an ephemeral model. Consider making it a view or table instead.\n {% endset %}\n {%- do exceptions.raise_compiler_error(error_message) -%}\n {%- endif -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.751576, "supported_languages": null}, "macro.dbt_utils.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_utils')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7522078, "supported_languages": null}, "macro.dbt_utils.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.752757, "supported_languages": null}, "macro.dbt_utils.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_utils')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.752977, "supported_languages": null}, "macro.dbt_utils.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/date_spine.sql", "original_file_path": "macros/sql/date_spine.sql", "unique_id": "macro.dbt_utils.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{dbt_utils.generate_series(\n dbt_utils.get_intervals_between(start_date, end_date, datepart)\n )}}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"row_number() over (order by 1) - 1\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.generate_series", "macro.dbt_utils.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.753319, "supported_languages": null}, "macro.dbt_utils.safe_subtract": {"name": "safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.safe_subtract", "macro_sql": "{%- macro safe_subtract(field_list) -%}\n {{ return(adapter.dispatch('safe_subtract', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_subtract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7536771, "supported_languages": null}, "macro.dbt_utils.default__safe_subtract": {"name": "default__safe_subtract", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_subtract.sql", "original_file_path": "macros/sql/safe_subtract.sql", "unique_id": "macro.dbt_utils.default__safe_subtract", "macro_sql": "\n\n{%- macro default__safe_subtract(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_subtract` macro takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' -\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.754251, "supported_languages": null}, "macro.dbt_utils.nullcheck_table": {"name": "nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.nullcheck_table", "macro_sql": "{% macro nullcheck_table(relation) %}\n {{ return(adapter.dispatch('nullcheck_table', 'dbt_utils')(relation)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck_table"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.754538, "supported_languages": null}, "macro.dbt_utils.default__nullcheck_table": {"name": "default__nullcheck_table", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck_table.sql", "original_file_path": "macros/sql/nullcheck_table.sql", "unique_id": "macro.dbt_utils.default__nullcheck_table", "macro_sql": "{% macro default__nullcheck_table(relation) %}\n\n {%- do dbt_utils._is_relation(relation, 'nullcheck_table') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'nullcheck_table') -%}\n {% set cols = adapter.get_columns_in_relation(relation) %}\n\n select {{ dbt_utils.nullcheck(cols) }}\n from {{relation}}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.75492, "supported_languages": null}, "macro.dbt_utils.get_relations_by_pattern": {"name": "get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.get_relations_by_pattern", "macro_sql": "{% macro get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_pattern', 'dbt_utils')(schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7555652, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_pattern": {"name": "default__get_relations_by_pattern", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_pattern.sql", "original_file_path": "macros/sql/get_relations_by_pattern.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_pattern", "macro_sql": "{% macro default__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.756333, "supported_languages": null}, "macro.dbt_utils.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7570822, "supported_languages": null}, "macro.dbt_utils.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7574458, "supported_languages": null}, "macro.dbt_utils.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_utils')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.757619, "supported_languages": null}, "macro.dbt_utils.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_series.sql", "original_file_path": "macros/sql/generate_series.sql", "unique_id": "macro.dbt_utils.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_utils.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7580988, "supported_languages": null}, "macro.dbt_utils.get_relations_by_prefix": {"name": "get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.get_relations_by_prefix", "macro_sql": "{% macro get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_relations_by_prefix', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7587252, "supported_languages": null}, "macro.dbt_utils.default__get_relations_by_prefix": {"name": "default__get_relations_by_prefix", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_relations_by_prefix.sql", "original_file_path": "macros/sql/get_relations_by_prefix.sql", "unique_id": "macro.dbt_utils.default__get_relations_by_prefix", "macro_sql": "{% macro default__get_relations_by_prefix(schema, prefix, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n {{ dbt_utils.get_tables_by_prefix_sql(schema, prefix, exclude, database) }}\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=database,\n schema=row.table_schema,\n identifier=row.table_name,\n type=row.table_type\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt_utils.get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.75952, "supported_languages": null}, "macro.dbt_utils.get_tables_by_prefix_sql": {"name": "get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_prefix_sql", "macro_sql": "{% macro get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_prefix_sql', 'dbt_utils')(schema, prefix, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_prefix_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.759901, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_prefix_sql": {"name": "default__get_tables_by_prefix_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_prefix_sql.sql", "original_file_path": "macros/sql/get_tables_by_prefix_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_prefix_sql", "macro_sql": "{% macro default__get_tables_by_prefix_sql(schema, prefix, exclude='', database=target.database) %}\n\n {{ dbt_utils.get_tables_by_pattern_sql(\n schema_pattern = schema,\n table_pattern = prefix ~ '%',\n exclude = exclude,\n database = database\n ) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7601652, "supported_languages": null}, "macro.dbt_utils.star": {"name": "star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.star", "macro_sql": "{% macro star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {{ return(adapter.dispatch('star', 'dbt_utils')(from, relation_alias, except, prefix, suffix, quote_identifiers)) }}\r\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__star"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7612422, "supported_languages": null}, "macro.dbt_utils.default__star": {"name": "default__star", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/star.sql", "original_file_path": "macros/sql/star.sql", "unique_id": "macro.dbt_utils.default__star", "macro_sql": "{% macro default__star(from, relation_alias=False, except=[], prefix='', suffix='', quote_identifiers=True) -%}\r\n {%- do dbt_utils._is_relation(from, 'star') -%}\r\n {%- do dbt_utils._is_ephemeral(from, 'star') -%}\r\n\r\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\r\n {%- if not execute -%}\r\n {% do return('*') %}\r\n {%- endif -%}\r\n\r\n {% set cols = dbt_utils.get_filtered_columns_in_relation(from, except) %}\r\n\r\n {%- if cols|length <= 0 -%}\r\n {% if flags.WHICH == 'compile' %}\r\n {% set response %}\r\n*\r\n/* No columns were returned. Maybe the relation doesn't exist yet \r\nor all columns were excluded. This star is only output during \r\ndbt compile, and exists to keep SQLFluff happy. */\r\n {% endset %}\r\n {% do return(response) %}\r\n {% else %}\r\n {% do return(\"/* no columns returned from star() macro */\") %}\r\n {% endif %}\r\n {%- else -%}\r\n {%- for col in cols %}\r\n {%- if relation_alias %}{{ relation_alias }}.{% else %}{%- endif -%}\r\n {%- if quote_identifiers -%}\r\n {{ adapter.quote(col)|trim }} {%- if prefix!='' or suffix!='' %} as {{ adapter.quote(prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {%- else -%}\r\n {{ col|trim }} {%- if prefix!='' or suffix!='' %} as {{ (prefix ~ col ~ suffix)|trim }} {%- endif -%}\r\n {% endif %}\r\n {%- if not loop.last %},{{ '\\n ' }}{%- endif -%}\r\n {%- endfor -%}\r\n {% endif %}\r\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt_utils.get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.762581, "supported_languages": null}, "macro.dbt_utils.unpivot": {"name": "unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.unpivot", "macro_sql": "{% macro unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value') -%}\n {{ return(adapter.dispatch('unpivot', 'dbt_utils')(relation, cast_to, exclude, remove, field_name, value_name)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__unpivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7637608, "supported_languages": null}, "macro.dbt_utils.default__unpivot": {"name": "default__unpivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/unpivot.sql", "original_file_path": "macros/sql/unpivot.sql", "unique_id": "macro.dbt_utils.default__unpivot", "macro_sql": "{% macro default__unpivot(relation=none, cast_to='varchar', exclude=none, remove=none, field_name='field_name', value_name='value') -%}\n\n {% if not relation %}\n {{ exceptions.raise_compiler_error(\"Error: argument `relation` is required for `unpivot` macro.\") }}\n {% endif %}\n\n {%- set exclude = exclude if exclude is not none else [] %}\n {%- set remove = remove if remove is not none else [] %}\n\n {%- set include_cols = [] %}\n\n {%- set table_columns = {} %}\n\n {%- do table_columns.update({relation: []}) %}\n\n {%- do dbt_utils._is_relation(relation, 'unpivot') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'unpivot') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) %}\n\n {%- for col in cols -%}\n {%- if col.column.lower() not in remove|map('lower') and col.column.lower() not in exclude|map('lower') -%}\n {% do include_cols.append(col) %}\n {%- endif %}\n {%- endfor %}\n\n\n {%- for col in include_cols -%}\n select\n {%- for exclude_col in exclude %}\n {{ exclude_col }},\n {%- endfor %}\n\n cast('{{ col.column }}' as {{ dbt.type_string() }}) as {{ field_name }},\n cast( {% if col.data_type == 'boolean' %}\n {{ dbt.cast_bool_to_text(col.column) }}\n {% else %}\n {{ col.column }}\n {% endif %}\n as {{ cast_to }}) as {{ value_name }}\n\n from {{ relation }}\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n {%- endfor -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.type_string", "macro.dbt.cast_bool_to_text"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7652988, "supported_languages": null}, "macro.dbt_utils.safe_divide": {"name": "safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.safe_divide", "macro_sql": "{% macro safe_divide(numerator, denominator) -%}\n {{ return(adapter.dispatch('safe_divide', 'dbt_utils')(numerator, denominator)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_divide"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7655911, "supported_languages": null}, "macro.dbt_utils.default__safe_divide": {"name": "default__safe_divide", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_divide.sql", "original_file_path": "macros/sql/safe_divide.sql", "unique_id": "macro.dbt_utils.default__safe_divide", "macro_sql": "{% macro default__safe_divide(numerator, denominator) %}\n ( {{ numerator }} ) / nullif( ( {{ denominator }} ), 0)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.765719, "supported_languages": null}, "macro.dbt_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n {{ return(adapter.dispatch('union_relations', 'dbt_utils')(relations, column_override, include, exclude, source_column_name, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.768469, "supported_languages": null}, "macro.dbt_utils.default__union_relations": {"name": "default__union_relations", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/union.sql", "original_file_path": "macros/sql/union.sql", "unique_id": "macro.dbt_utils.default__union_relations", "macro_sql": "\n\n{%- macro default__union_relations(relations, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_relation', where=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n {%- set all_excludes = [] -%}\n {%- set all_includes = [] -%}\n\n {%- if exclude -%}\n {%- for exc in exclude -%}\n {%- do all_excludes.append(exc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- if include -%}\n {%- for inc in include -%}\n {%- do all_includes.append(inc | lower) -%}\n {%- endfor -%}\n {%- endif -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- do dbt_utils._is_ephemeral(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column | lower in all_excludes -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column | lower not in all_includes -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n {%- set dbt_command = flags.WHICH -%}\n\n\n {% if dbt_command in ['run', 'build'] %}\n {% if (include | length > 0 or exclude | length > 0) and not column_superset.keys() %}\n {%- set relations_string -%}\n {%- for relation in relations -%}\n {{ relation.name }}\n {%- if not loop.last %}, {% endif -%}\n {%- endfor -%}\n {%- endset -%}\n\n {%- set error_message -%}\n There were no columns found to union for relations {{ relations_string }}\n {%- endset -%}\n\n {{ exceptions.raise_compiler_error(error_message) }}\n {%- endif -%}\n {%- endif -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n {%- if source_column_name is not none %}\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {%- endif %}\n\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ relation }}\n\n {% if where -%}\n where {{ where }}\n {%- endif %}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.77169, "supported_languages": null}, "macro.dbt_utils.group_by": {"name": "group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.group_by", "macro_sql": "{%- macro group_by(n) -%}\n {{ return(adapter.dispatch('group_by', 'dbt_utils')(n)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__group_by"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.771987, "supported_languages": null}, "macro.dbt_utils.default__group_by": {"name": "default__group_by", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/groupby.sql", "original_file_path": "macros/sql/groupby.sql", "unique_id": "macro.dbt_utils.default__group_by", "macro_sql": "\n\n{%- macro default__group_by(n) -%}\n\n group by {% for i in range(1, n + 1) -%}\n {{ i }}{{ ',' if not loop.last }} \n {%- endfor -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.77222, "supported_languages": null}, "macro.dbt_utils.deduplicate": {"name": "deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.deduplicate", "macro_sql": "{%- macro deduplicate(relation, partition_by, order_by) -%}\n {{ return(adapter.dispatch('deduplicate', 'dbt_utils')(relation, partition_by, order_by)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.postgres__deduplicate"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.773088, "supported_languages": null}, "macro.dbt_utils.default__deduplicate": {"name": "default__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.default__deduplicate", "macro_sql": "\n\n{%- macro default__deduplicate(relation, partition_by, order_by) -%}\n\n with row_numbered as (\n select\n _inner.*,\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) as rn\n from {{ relation }} as _inner\n )\n\n select\n distinct data.*\n from {{ relation }} as data\n {#\n -- Not all DBs will support natural joins but the ones that do include:\n -- Oracle, MySQL, SQLite, Redshift, Teradata, Materialize, Databricks\n -- Apache Spark, SingleStore, Vertica\n -- Those that do not appear to support natural joins include:\n -- SQLServer, Trino, Presto, Rockset, Athena\n #}\n natural join row_numbered\n where row_numbered.rn = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7732868, "supported_languages": null}, "macro.dbt_utils.redshift__deduplicate": {"name": "redshift__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.redshift__deduplicate", "macro_sql": "{% macro redshift__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }} as tt\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.773439, "supported_languages": null}, "macro.dbt_utils.postgres__deduplicate": {"name": "postgres__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.postgres__deduplicate", "macro_sql": "\n{%- macro postgres__deduplicate(relation, partition_by, order_by) -%}\n\n select\n distinct on ({{ partition_by }}) *\n from {{ relation }}\n order by {{ partition_by }}{{ ',' ~ order_by }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.773613, "supported_languages": null}, "macro.dbt_utils.snowflake__deduplicate": {"name": "snowflake__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.snowflake__deduplicate", "macro_sql": "\n{%- macro snowflake__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7737632, "supported_languages": null}, "macro.dbt_utils.databricks__deduplicate": {"name": "databricks__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.databricks__deduplicate", "macro_sql": "\n{%- macro databricks__deduplicate(relation, partition_by, order_by) -%}\n\n select *\n from {{ relation }}\n qualify\n row_number() over (\n partition by {{ partition_by }}\n order by {{ order_by }}\n ) = 1\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.773916, "supported_languages": null}, "macro.dbt_utils.bigquery__deduplicate": {"name": "bigquery__deduplicate", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/deduplicate.sql", "original_file_path": "macros/sql/deduplicate.sql", "unique_id": "macro.dbt_utils.bigquery__deduplicate", "macro_sql": "\n{%- macro bigquery__deduplicate(relation, partition_by, order_by) -%}\n\n select unique.*\n from (\n select\n array_agg (\n original\n order by {{ order_by }}\n limit 1\n )[offset(0)] unique\n from {{ relation }} original\n group by {{ partition_by }}\n )\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7740731, "supported_languages": null}, "macro.dbt_utils.surrogate_key": {"name": "surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.surrogate_key", "macro_sql": "{%- macro surrogate_key(field_list) -%}\n {% set frustrating_jinja_feature = varargs %}\n {{ return(adapter.dispatch('surrogate_key', 'dbt_utils')(field_list, *varargs)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7744389, "supported_languages": null}, "macro.dbt_utils.default__surrogate_key": {"name": "default__surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/surrogate_key.sql", "original_file_path": "macros/sql/surrogate_key.sql", "unique_id": "macro.dbt_utils.default__surrogate_key", "macro_sql": "\n\n{%- macro default__surrogate_key(field_list) -%}\n\n{%- set error_message = '\nWarning: `dbt_utils.surrogate_key` has been replaced by \\\n`dbt_utils.generate_surrogate_key`. The new macro treats null values \\\ndifferently to empty strings. To restore the behaviour of the original \\\nmacro, add a global variable in dbt_project.yml called \\\n`surrogate_key_treat_nulls_as_empty_strings` to your \\\ndbt_project.yml file with a value of True. \\\nThe {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.raise_compiler_error(error_message) -%}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.774668, "supported_languages": null}, "macro.dbt_utils.safe_add": {"name": "safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.safe_add", "macro_sql": "{%- macro safe_add(field_list) -%}\n {{ return(adapter.dispatch('safe_add', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__safe_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.775021, "supported_languages": null}, "macro.dbt_utils.default__safe_add": {"name": "default__safe_add", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/safe_add.sql", "original_file_path": "macros/sql/safe_add.sql", "unique_id": "macro.dbt_utils.default__safe_add", "macro_sql": "\n\n{%- macro default__safe_add(field_list) -%}\n\n{%- if field_list is not iterable or field_list is string or field_list is mapping -%}\n\n{%- set error_message = '\nWarning: the `safe_add` macro now takes a single list argument instead of \\\nstring arguments. The {}.{} model triggered this warning. \\\n'.format(model.package_name, model.name) -%}\n\n{%- do exceptions.warn(error_message) -%}\n\n{%- endif -%}\n\n{% set fields = [] %}\n\n{%- for field in field_list -%}\n\n {% do fields.append(\"coalesce(\" ~ field ~ \", 0)\") %}\n\n{%- endfor -%}\n\n{{ fields|join(' +\\n ') }}\n\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.775519, "supported_languages": null}, "macro.dbt_utils.nullcheck": {"name": "nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.nullcheck", "macro_sql": "{% macro nullcheck(cols) %}\n {{ return(adapter.dispatch('nullcheck', 'dbt_utils')(cols)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__nullcheck"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.775828, "supported_languages": null}, "macro.dbt_utils.default__nullcheck": {"name": "default__nullcheck", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/nullcheck.sql", "original_file_path": "macros/sql/nullcheck.sql", "unique_id": "macro.dbt_utils.default__nullcheck", "macro_sql": "{% macro default__nullcheck(cols) %}\n{%- for col in cols %}\n\n {% if col.is_string() -%}\n\n nullif({{col.name}},'') as {{col.name}}\n\n {%- else -%}\n\n {{col.name}}\n\n {%- endif -%}\n\n{%- if not loop.last -%} , {%- endif -%}\n\n{%- endfor -%}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.776129, "supported_languages": null}, "macro.dbt_utils.get_tables_by_pattern_sql": {"name": "get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.get_tables_by_pattern_sql", "macro_sql": "{% macro get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(adapter.dispatch('get_tables_by_pattern_sql', 'dbt_utils')\n (schema_pattern, table_pattern, exclude, database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_tables_by_pattern_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.778878, "supported_languages": null}, "macro.dbt_utils.default__get_tables_by_pattern_sql": {"name": "default__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.default__get_tables_by_pattern_sql", "macro_sql": "{% macro default__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from {{ database }}.information_schema.tables\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7792919, "supported_languages": null}, "macro.dbt_utils.redshift__get_tables_by_pattern_sql": {"name": "redshift__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.redshift__get_tables_by_pattern_sql", "macro_sql": "{% macro redshift__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% set sql %}\n select distinct\n table_schema as {{ adapter.quote('table_schema') }},\n table_name as {{ adapter.quote('table_name') }},\n {{ dbt_utils.get_table_types_sql() }}\n from \"{{ database }}\".\"information_schema\".\"tables\"\n where table_schema ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n union all\n select distinct\n schemaname as {{ adapter.quote('table_schema') }},\n tablename as {{ adapter.quote('table_name') }},\n 'external' as {{ adapter.quote('table_type') }}\n from svv_external_tables\n where redshift_database_name = '{{ database }}'\n and schemaname ilike '{{ schema_pattern }}'\n and table_name ilike '{{ table_pattern }}'\n and table_name not ilike '{{ exclude }}'\n {% endset %}\n\n {{ return(sql) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.779999, "supported_languages": null}, "macro.dbt_utils.bigquery__get_tables_by_pattern_sql": {"name": "bigquery__get_tables_by_pattern_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils.bigquery__get_tables_by_pattern_sql", "macro_sql": "{% macro bigquery__get_tables_by_pattern_sql(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {% if '%' in schema_pattern %}\n {% set schemata=dbt_utils._bigquery__get_matching_schemata(schema_pattern, database) %}\n {% else %}\n {% set schemata=[schema_pattern] %}\n {% endif %}\n\n {% set sql %}\n {% for schema in schemata %}\n select distinct\n table_schema,\n table_name,\n {{ dbt_utils.get_table_types_sql() }}\n\n from {{ adapter.quote(database) }}.{{ schema }}.INFORMATION_SCHEMA.TABLES\n where lower(table_name) like lower ('{{ table_pattern }}')\n and lower(table_name) not like lower ('{{ exclude }}')\n\n {% if not loop.last %} union all {% endif %}\n\n {% endfor %}\n {% endset %}\n\n {{ return(sql) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._bigquery__get_matching_schemata", "macro.dbt_utils.get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.780661, "supported_languages": null}, "macro.dbt_utils._bigquery__get_matching_schemata": {"name": "_bigquery__get_matching_schemata", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_tables_by_pattern_sql.sql", "original_file_path": "macros/sql/get_tables_by_pattern_sql.sql", "unique_id": "macro.dbt_utils._bigquery__get_matching_schemata", "macro_sql": "{% macro _bigquery__get_matching_schemata(schema_pattern, database) %}\n {% if execute %}\n\n {% set sql %}\n select schema_name from {{ adapter.quote(database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like lower('{{ schema_pattern }}')\n {% endset %}\n\n {% set results=run_query(sql) %}\n\n {% set schemata=results.columns['schema_name'].values() %}\n\n {{ return(schemata) }}\n\n {% else %}\n\n {{ return([]) }}\n\n {% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7812212, "supported_languages": null}, "macro.dbt_utils.get_column_values": {"name": "get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.get_column_values", "macro_sql": "{% macro get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {{ return(adapter.dispatch('get_column_values', 'dbt_utils')(table, column, order_by, max_records, default, where)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_column_values"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.782284, "supported_languages": null}, "macro.dbt_utils.default__get_column_values": {"name": "default__get_column_values", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_column_values.sql", "original_file_path": "macros/sql/get_column_values.sql", "unique_id": "macro.dbt_utils.default__get_column_values", "macro_sql": "{% macro default__get_column_values(table, column, order_by='count(*) desc', max_records=none, default=none, where=none) -%}\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {% set default = [] if not default %}\n {{ return(default) }}\n {% endif %}\n\n {%- do dbt_utils._is_ephemeral(table, 'get_column_values') -%}\n\n {# Not all relations are tables. Renaming for internal clarity without breaking functionality for anyone using named arguments #}\n {# TODO: Change the method signature in a future 0.x.0 release #}\n {%- set target_relation = table -%}\n\n {# adapter.load_relation is a convenience wrapper to avoid building a Relation when we already have one #}\n {% set relation_exists = (load_relation(target_relation)) is not none %}\n\n {%- call statement('get_column_values', fetch_result=true) %}\n\n {%- if not relation_exists and default is none -%}\n\n {{ exceptions.raise_compiler_error(\"In get_column_values(): relation \" ~ target_relation ~ \" does not exist and no default value was provided.\") }}\n\n {%- elif not relation_exists and default is not none -%}\n\n {{ log(\"Relation \" ~ target_relation ~ \" does not exist. Returning the default value: \" ~ default) }}\n\n {{ return(default) }}\n\n {%- else -%}\n\n\n select\n {{ column }} as value\n\n from {{ target_relation }}\n\n {% if where is not none %}\n where {{ where }}\n {% endif %}\n\n group by {{ column }}\n order by {{ order_by }}\n\n {% if max_records is not none %}\n limit {{ max_records }}\n {% endif %}\n\n {% endif %}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_column_values') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values) }}\n {%- else -%}\n {{ return(default) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_ephemeral", "macro.dbt.load_relation", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7837708, "supported_languages": null}, "macro.dbt_utils.pivot": {"name": "pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.pivot", "macro_sql": "{% macro pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {{ return(adapter.dispatch('pivot', 'dbt_utils')(column, values, alias, agg, cmp, prefix, suffix, then_value, else_value, quote_identifiers, distinct)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__pivot"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.784794, "supported_languages": null}, "macro.dbt_utils.default__pivot": {"name": "default__pivot", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/pivot.sql", "original_file_path": "macros/sql/pivot.sql", "unique_id": "macro.dbt_utils.default__pivot", "macro_sql": "{% macro default__pivot(column,\n values,\n alias=True,\n agg='sum',\n cmp='=',\n prefix='',\n suffix='',\n then_value=1,\n else_value=0,\n quote_identifiers=True,\n distinct=False) %}\n {% for value in values %}\n {{ agg }}(\n {% if distinct %} distinct {% endif %}\n case\n when {{ column }} {{ cmp }} '{{ dbt.escape_single_quotes(value) }}'\n then {{ then_value }}\n else {{ else_value }}\n end\n )\n {% if alias %}\n {% if quote_identifiers %}\n as {{ adapter.quote(prefix ~ value ~ suffix) }}\n {% else %}\n as {{ dbt_utils.slugify(prefix ~ value ~ suffix) }}\n {% endif %}\n {% endif %}\n {% if not loop.last %},{% endif %}\n {% endfor %}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.escape_single_quotes", "macro.dbt_utils.slugify"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.785568, "supported_languages": null}, "macro.dbt_utils.get_filtered_columns_in_relation": {"name": "get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.get_filtered_columns_in_relation", "macro_sql": "{% macro get_filtered_columns_in_relation(from, except=[]) -%}\n {{ return(adapter.dispatch('get_filtered_columns_in_relation', 'dbt_utils')(from, except)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_filtered_columns_in_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.786006, "supported_languages": null}, "macro.dbt_utils.default__get_filtered_columns_in_relation": {"name": "default__get_filtered_columns_in_relation", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_filtered_columns_in_relation.sql", "original_file_path": "macros/sql/get_filtered_columns_in_relation.sql", "unique_id": "macro.dbt_utils.default__get_filtered_columns_in_relation", "macro_sql": "{% macro default__get_filtered_columns_in_relation(from, except=[]) -%}\n {%- do dbt_utils._is_relation(from, 'get_filtered_columns_in_relation') -%}\n {%- do dbt_utils._is_ephemeral(from, 'get_filtered_columns_in_relation') -%}\n\n {# -- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. #}\n {%- if not execute -%}\n {{ return('') }}\n {% endif %}\n\n {%- set include_cols = [] %}\n {%- set cols = adapter.get_columns_in_relation(from) -%}\n {%- set except = except | map(\"lower\") | list %}\n {%- for col in cols -%}\n {%- if col.column|lower not in except -%}\n {% do include_cols.append(col.column) %}\n {%- endif %}\n {%- endfor %}\n\n {{ return(include_cols) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt_utils._is_ephemeral"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.786693, "supported_languages": null}, "macro.dbt_utils.width_bucket": {"name": "width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.width_bucket", "macro_sql": "{% macro width_bucket(expr, min_value, max_value, num_buckets) %}\n {{ return(adapter.dispatch('width_bucket', 'dbt_utils') (expr, min_value, max_value, num_buckets)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__width_bucket"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7873971, "supported_languages": null}, "macro.dbt_utils.default__width_bucket": {"name": "default__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.default__width_bucket", "macro_sql": "{% macro default__width_bucket(expr, min_value, max_value, num_buckets) -%}\n\n {% set bin_size -%}\n (( {{ max_value }} - {{ min_value }} ) / {{ num_buckets }} )\n {%- endset %}\n (\n -- to break ties when the amount is eaxtly at the bucket egde\n case\n when\n mod(\n {{ dbt.safe_cast(expr, dbt.type_numeric() ) }},\n {{ dbt.safe_cast(bin_size, dbt.type_numeric() ) }}\n ) = 0\n then 1\n else 0\n end\n ) +\n -- Anything over max_value goes the N+1 bucket\n least(\n ceil(\n ({{ expr }} - {{ min_value }})/{{ bin_size }}\n ),\n {{ num_buckets }} + 1\n )\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.safe_cast", "macro.dbt.type_numeric"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.787829, "supported_languages": null}, "macro.dbt_utils.snowflake__width_bucket": {"name": "snowflake__width_bucket", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/width_bucket.sql", "original_file_path": "macros/sql/width_bucket.sql", "unique_id": "macro.dbt_utils.snowflake__width_bucket", "macro_sql": "{% macro snowflake__width_bucket(expr, min_value, max_value, num_buckets) %}\n width_bucket({{ expr }}, {{ min_value }}, {{ max_value }}, {{ num_buckets }} )\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.788011, "supported_languages": null}, "macro.dbt_utils.get_query_results_as_dict": {"name": "get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.get_query_results_as_dict", "macro_sql": "{% macro get_query_results_as_dict(query) %}\n {{ return(adapter.dispatch('get_query_results_as_dict', 'dbt_utils')(query)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_query_results_as_dict"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.788376, "supported_languages": null}, "macro.dbt_utils.default__get_query_results_as_dict": {"name": "default__get_query_results_as_dict", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_query_results_as_dict.sql", "original_file_path": "macros/sql/get_query_results_as_dict.sql", "unique_id": "macro.dbt_utils.default__get_query_results_as_dict", "macro_sql": "{% macro default__get_query_results_as_dict(query) %}\n\n{# This macro returns a dictionary of the form {column_name: (tuple_of_results)} #}\n\n {%- call statement('get_query_results', fetch_result=True,auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {% set sql_results={} %}\n\n {%- if execute -%}\n {% set sql_results_table = load_result('get_query_results').table.columns %}\n {% for column_name, column in sql_results_table.items() %}\n {% do sql_results.update({column_name: column.values()}) %}\n {% endfor %}\n {%- endif -%}\n\n {{ return(sql_results) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.788948, "supported_languages": null}, "macro.dbt_utils.generate_surrogate_key": {"name": "generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.generate_surrogate_key", "macro_sql": "{%- macro generate_surrogate_key(field_list) -%}\n {{ return(adapter.dispatch('generate_surrogate_key', 'dbt_utils')(field_list)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__generate_surrogate_key"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.789359, "supported_languages": null}, "macro.dbt_utils.default__generate_surrogate_key": {"name": "default__generate_surrogate_key", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/generate_surrogate_key.sql", "original_file_path": "macros/sql/generate_surrogate_key.sql", "unique_id": "macro.dbt_utils.default__generate_surrogate_key", "macro_sql": "\n\n{%- macro default__generate_surrogate_key(field_list) -%}\n\n{%- if var('surrogate_key_treat_nulls_as_empty_strings', False) -%}\n {%- set default_null_value = \"\" -%}\n{%- else -%}\n {%- set default_null_value = '_dbt_utils_surrogate_key_null_' -%}\n{%- endif -%}\n\n{%- set fields = [] -%}\n\n{%- for field in field_list -%}\n\n {%- do fields.append(\n \"coalesce(cast(\" ~ field ~ \" as \" ~ dbt.type_string() ~ \"), '\" ~ default_null_value ~\"')\"\n ) -%}\n\n {%- if not loop.last %}\n {%- do fields.append(\"'-'\") -%}\n {%- endif -%}\n\n{%- endfor -%}\n\n{{ dbt.hash(dbt.concat(fields)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.hash", "macro.dbt.concat"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.789917, "supported_languages": null}, "macro.dbt_utils.get_table_types_sql": {"name": "get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.get_table_types_sql", "macro_sql": "{%- macro get_table_types_sql() -%}\n {{ return(adapter.dispatch('get_table_types_sql', 'dbt_utils')()) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils.postgres__get_table_types_sql"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.790376, "supported_languages": null}, "macro.dbt_utils.default__get_table_types_sql": {"name": "default__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.default__get_table_types_sql", "macro_sql": "{% macro default__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'EXTERNAL TABLE' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.790504, "supported_languages": null}, "macro.dbt_utils.postgres__get_table_types_sql": {"name": "postgres__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.postgres__get_table_types_sql", "macro_sql": "{% macro postgres__get_table_types_sql() %}\n case table_type\n when 'BASE TABLE' then 'table'\n when 'FOREIGN' then 'external'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7906392, "supported_languages": null}, "macro.dbt_utils.databricks__get_table_types_sql": {"name": "databricks__get_table_types_sql", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_table_types_sql.sql", "original_file_path": "macros/sql/get_table_types_sql.sql", "unique_id": "macro.dbt_utils.databricks__get_table_types_sql", "macro_sql": "{% macro databricks__get_table_types_sql() %}\n case table_type\n when 'MANAGED' then 'table'\n when 'BASE TABLE' then 'table'\n when 'MATERIALIZED VIEW' then 'materializedview'\n else lower(table_type)\n end as {{ adapter.quote('table_type') }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.790771, "supported_languages": null}, "macro.dbt_utils.get_single_value": {"name": "get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.get_single_value", "macro_sql": "{% macro get_single_value(query, default=none) %}\n {{ return(adapter.dispatch('get_single_value', 'dbt_utils')(query, default)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__get_single_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.791299, "supported_languages": null}, "macro.dbt_utils.default__get_single_value": {"name": "default__get_single_value", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/get_single_value.sql", "original_file_path": "macros/sql/get_single_value.sql", "unique_id": "macro.dbt_utils.default__get_single_value", "macro_sql": "{% macro default__get_single_value(query, default) %}\n\n{# This macro returns the (0, 0) record in a query, i.e. the first row of the first column #}\n\n {%- call statement('get_query_result', fetch_result=True, auto_begin=false) -%}\n\n {{ query }}\n\n {%- endcall -%}\n\n {%- if execute -%}\n\n {% set r = load_result('get_query_result').table.columns[0].values() %}\n {% if r | length == 0 %}\n {% do print('Query `' ~ query ~ '` returned no rows. Using the default value: ' ~ default) %}\n {% set sql_result = default %}\n {% else %}\n {% set sql_result = r[0] %}\n {% endif %}\n \n {%- else -%}\n \n {% set sql_result = default %}\n \n {%- endif -%}\n\n {% do return(sql_result) %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.792031, "supported_languages": null}, "macro.dbt_utils.degrees_to_radians": {"name": "degrees_to_radians", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.degrees_to_radians", "macro_sql": "{% macro degrees_to_radians(degrees) -%}\n acos(-1) * {{degrees}} / 180\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.793067, "supported_languages": null}, "macro.dbt_utils.haversine_distance": {"name": "haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.haversine_distance", "macro_sql": "{% macro haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n {{ return(adapter.dispatch('haversine_distance', 'dbt_utils')(lat1,lon1,lat2,lon2,unit)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.default__haversine_distance"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.793343, "supported_languages": null}, "macro.dbt_utils.default__haversine_distance": {"name": "default__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.default__haversine_distance", "macro_sql": "{% macro default__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n\n 2 * 3961 * asin(sqrt(power((sin(radians(({{ lat2 }} - {{ lat1 }}) / 2))), 2) +\n cos(radians({{lat1}})) * cos(radians({{lat2}})) *\n power((sin(radians(({{ lon2 }} - {{ lon1 }}) / 2))), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.7938812, "supported_languages": null}, "macro.dbt_utils.bigquery__haversine_distance": {"name": "bigquery__haversine_distance", "resource_type": "macro", "package_name": "dbt_utils", "path": "macros/sql/haversine_distance.sql", "original_file_path": "macros/sql/haversine_distance.sql", "unique_id": "macro.dbt_utils.bigquery__haversine_distance", "macro_sql": "{% macro bigquery__haversine_distance(lat1, lon1, lat2, lon2, unit='mi') -%}\n{% set radians_lat1 = dbt_utils.degrees_to_radians(lat1) %}\n{% set radians_lat2 = dbt_utils.degrees_to_radians(lat2) %}\n{% set radians_lon1 = dbt_utils.degrees_to_radians(lon1) %}\n{% set radians_lon2 = dbt_utils.degrees_to_radians(lon2) %}\n{%- if unit == 'mi' %}\n {% set conversion_rate = 1 %}\n{% elif unit == 'km' %}\n {% set conversion_rate = 1.60934 %}\n{% else %}\n {{ exceptions.raise_compiler_error(\"unit input must be one of 'mi' or 'km'. Got \" ~ unit) }}\n{% endif %}\n 2 * 3961 * asin(sqrt(power(sin(({{ radians_lat2 }} - {{ radians_lat1 }}) / 2), 2) +\n cos({{ radians_lat1 }}) * cos({{ radians_lat2 }}) *\n power(sin(({{ radians_lon2 }} - {{ radians_lon1 }}) / 2), 2))) * {{ conversion_rate }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.degrees_to_radians"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.794626, "supported_languages": null}, "macro.spark_utils.get_tables": {"name": "get_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_tables", "macro_sql": "{% macro get_tables(table_regex_pattern='.*') %}\n\n {% set tables = [] %}\n {% for database in spark__list_schemas('not_used') %}\n {% for table in spark__list_relations_without_caching(database[0]) %}\n {% set db_tablename = database[0] ~ \".\" ~ table[1] %}\n {% set is_match = modules.re.match(table_regex_pattern, db_tablename) %}\n {% if is_match %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('type', 'TYPE', 'Type'))|first %}\n {% if table_type[1]|lower != 'view' %}\n {{ tables.append(db_tablename) }}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% endfor %}\n {{ return(tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.799356, "supported_languages": null}, "macro.spark_utils.get_delta_tables": {"name": "get_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_delta_tables", "macro_sql": "{% macro get_delta_tables(table_regex_pattern='.*') %}\n\n {% set delta_tables = [] %}\n {% for db_tablename in get_tables(table_regex_pattern) %}\n {% call statement('table_detail', fetch_result=True) -%}\n describe extended {{ db_tablename }}\n {% endcall %}\n\n {% set table_type = load_result('table_detail').table|reverse|selectattr(0, 'in', ('provider', 'PROVIDER', 'Provider'))|first %}\n {% if table_type[1]|lower == 'delta' %}\n {{ delta_tables.append(db_tablename) }}\n {% endif %}\n {% endfor %}\n {{ return(delta_tables) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.799982, "supported_languages": null}, "macro.spark_utils.get_statistic_columns": {"name": "get_statistic_columns", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.get_statistic_columns", "macro_sql": "{% macro get_statistic_columns(table) %}\n\n {% call statement('input_columns', fetch_result=True) %}\n SHOW COLUMNS IN {{ table }}\n {% endcall %}\n {% set input_columns = load_result('input_columns').table %}\n\n {% set output_columns = [] %}\n {% for column in input_columns %}\n {% call statement('column_information', fetch_result=True) %}\n DESCRIBE TABLE {{ table }} `{{ column[0] }}`\n {% endcall %}\n {% if not load_result('column_information').table[1][1].startswith('struct') and not load_result('column_information').table[1][1].startswith('array') %}\n {{ output_columns.append('`' ~ column[0] ~ '`') }}\n {% endif %}\n {% endfor %}\n {{ return(output_columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8007889, "supported_languages": null}, "macro.spark_utils.spark_optimize_delta_tables": {"name": "spark_optimize_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_optimize_delta_tables", "macro_sql": "{% macro spark_optimize_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Optimizing \" ~ table) }}\n {% do run_query(\"optimize \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.801431, "supported_languages": null}, "macro.spark_utils.spark_vacuum_delta_tables": {"name": "spark_vacuum_delta_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_vacuum_delta_tables", "macro_sql": "{% macro spark_vacuum_delta_tables(table_regex_pattern='.*') %}\n\n {% for table in get_delta_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Vacuuming \" ~ table) }}\n {% do run_query(\"vacuum \" ~ table) %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_delta_tables", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.802132, "supported_languages": null}, "macro.spark_utils.spark_analyze_tables": {"name": "spark_analyze_tables", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/maintenance_operation.sql", "original_file_path": "macros/maintenance_operation.sql", "unique_id": "macro.spark_utils.spark_analyze_tables", "macro_sql": "{% macro spark_analyze_tables(table_regex_pattern='.*') %}\n\n {% for table in get_tables(table_regex_pattern) %}\n {% set start=modules.datetime.datetime.now() %}\n {% set columns = get_statistic_columns(table) | join(',') %}\n {% set message_prefix=loop.index ~ \" of \" ~ loop.length %}\n {{ dbt_utils.log_info(message_prefix ~ \" Analyzing \" ~ table) }}\n {% if columns != '' %}\n {% do run_query(\"analyze table \" ~ table ~ \" compute statistics for columns \" ~ columns) %}\n {% endif %}\n {% set end=modules.datetime.datetime.now() %}\n {% set total_seconds = (end - start).total_seconds() | round(2) %}\n {{ dbt_utils.log_info(message_prefix ~ \" Finished \" ~ table ~ \" in \" ~ total_seconds ~ \"s\") }}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.get_tables", "macro.spark_utils.get_statistic_columns", "macro.dbt_utils.log_info", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.803093, "supported_languages": null}, "macro.spark_utils.spark__concat": {"name": "spark__concat", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/concat.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/concat.sql", "unique_id": "macro.spark_utils.spark__concat", "macro_sql": "{% macro spark__concat(fields) -%}\n concat({{ fields|join(', ') }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.803285, "supported_languages": null}, "macro.spark_utils.spark__type_numeric": {"name": "spark__type_numeric", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datatypes.sql", "unique_id": "macro.spark_utils.spark__type_numeric", "macro_sql": "{% macro spark__type_numeric() %}\n decimal(28, 6)\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8033931, "supported_languages": null}, "macro.spark_utils.spark__dateadd": {"name": "spark__dateadd", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/dateadd.sql", "unique_id": "macro.spark_utils.spark__dateadd", "macro_sql": "{% macro spark__dateadd(datepart, interval, from_date_or_timestamp) %}\n\n {%- set clock_component -%}\n {# make sure the dates + timestamps are real, otherwise raise an error asap #}\n to_unix_timestamp({{ spark_utils.assert_not_null('to_timestamp', from_date_or_timestamp) }})\n - to_unix_timestamp({{ spark_utils.assert_not_null('date', from_date_or_timestamp) }})\n {%- endset -%}\n\n {%- if datepart in ['day', 'week'] -%}\n \n {%- set multiplier = 7 if datepart == 'week' else 1 -%}\n\n to_timestamp(\n to_unix_timestamp(\n date_add(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ['month', 'quarter', 'year'] -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'month' -%} 1\n {%- elif datepart == 'quarter' -%} 3\n {%- elif datepart == 'year' -%} 12\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n to_unix_timestamp(\n add_months(\n {{ spark_utils.assert_not_null('date', from_date_or_timestamp) }},\n cast({{interval}} * {{multiplier}} as int)\n )\n ) + {{clock_component}}\n )\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set multiplier -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n to_timestamp(\n {{ spark_utils.assert_not_null('to_unix_timestamp', from_date_or_timestamp) }}\n + cast({{interval}} * {{multiplier}} as int)\n )\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro dateadd not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.805989, "supported_languages": null}, "macro.spark_utils.spark__datediff": {"name": "spark__datediff", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/datediff.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/datediff.sql", "unique_id": "macro.spark_utils.spark__datediff", "macro_sql": "{% macro spark__datediff(first_date, second_date, datepart) %}\n\n {%- if datepart in ['day', 'week', 'month', 'quarter', 'year'] -%}\n \n {# make sure the dates are real, otherwise raise an error asap #}\n {% set first_date = spark_utils.assert_not_null('date', first_date) %}\n {% set second_date = spark_utils.assert_not_null('date', second_date) %}\n \n {%- endif -%}\n \n {%- if datepart == 'day' -%}\n \n datediff({{second_date}}, {{first_date}})\n \n {%- elif datepart == 'week' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(datediff({{second_date}}, {{first_date}})/7)\n else ceil(datediff({{second_date}}, {{first_date}})/7)\n end\n \n -- did we cross a week boundary (Sunday)?\n + case\n when {{first_date}} < {{second_date}} and dayofweek({{second_date}}) < dayofweek({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofweek({{second_date}}) > dayofweek({{first_date}}) then -1\n else 0 end\n\n {%- elif datepart == 'month' -%}\n\n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}})))\n else ceil(months_between(date({{second_date}}), date({{first_date}})))\n end\n \n -- did we cross a month boundary?\n + case\n when {{first_date}} < {{second_date}} and dayofmonth({{second_date}}) < dayofmonth({{first_date}}) then 1\n when {{first_date}} > {{second_date}} and dayofmonth({{second_date}}) > dayofmonth({{first_date}}) then -1\n else 0 end\n \n {%- elif datepart == 'quarter' -%}\n \n case when {{first_date}} < {{second_date}}\n then floor(months_between(date({{second_date}}), date({{first_date}}))/3)\n else ceil(months_between(date({{second_date}}), date({{first_date}}))/3)\n end\n \n -- did we cross a quarter boundary?\n + case\n when {{first_date}} < {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n < (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then 1\n when {{first_date}} > {{second_date}} and (\n (dayofyear({{second_date}}) - (quarter({{second_date}}) * 365/4))\n > (dayofyear({{first_date}}) - (quarter({{first_date}}) * 365/4))\n ) then -1\n else 0 end\n\n {%- elif datepart == 'year' -%}\n \n year({{second_date}}) - year({{first_date}})\n\n {%- elif datepart in ('hour', 'minute', 'second', 'millisecond', 'microsecond') -%}\n \n {%- set divisor -%} \n {%- if datepart == 'hour' -%} 3600\n {%- elif datepart == 'minute' -%} 60\n {%- elif datepart == 'second' -%} 1\n {%- elif datepart == 'millisecond' -%} (1/1000)\n {%- elif datepart == 'microsecond' -%} (1/1000000)\n {%- endif -%}\n {%- endset -%}\n\n case when {{first_date}} < {{second_date}}\n then ceil((\n {# make sure the timestamps are real, otherwise raise an error asap #}\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n else floor((\n {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', second_date)) }}\n - {{ spark_utils.assert_not_null('to_unix_timestamp', spark_utils.assert_not_null('to_timestamp', first_date)) }}\n ) / {{divisor}})\n end\n \n {% if datepart == 'millisecond' %}\n + cast(date_format({{second_date}}, 'SSS') as int)\n - cast(date_format({{first_date}}, 'SSS') as int)\n {% endif %}\n \n {% if datepart == 'microsecond' %} \n {% set capture_str = '[0-9]{4}-[0-9]{2}-[0-9]{2}.[0-9]{2}:[0-9]{2}:[0-9]{2}.([0-9]{6})' %}\n -- Spark doesn't really support microseconds, so this is a massive hack!\n -- It will only work if the timestamp-string is of the format\n -- 'yyyy-MM-dd-HH mm.ss.SSSSSS'\n + cast(regexp_extract({{second_date}}, '{{capture_str}}', 1) as int)\n - cast(regexp_extract({{first_date}}, '{{capture_str}}', 1) as int) \n {% endif %}\n\n {%- else -%}\n\n {{ exceptions.raise_compiler_error(\"macro datediff not implemented for datepart ~ '\" ~ datepart ~ \"' ~ on Spark\") }}\n\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.812663, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp": {"name": "spark__current_timestamp", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp", "macro_sql": "{% macro spark__current_timestamp() %}\n current_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8128169, "supported_languages": null}, "macro.spark_utils.spark__current_timestamp_in_utc": {"name": "spark__current_timestamp_in_utc", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/current_timestamp.sql", "unique_id": "macro.spark_utils.spark__current_timestamp_in_utc", "macro_sql": "{% macro spark__current_timestamp_in_utc() %}\n unix_timestamp()\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.812896, "supported_languages": null}, "macro.spark_utils.spark__split_part": {"name": "spark__split_part", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/cross_db_utils/split_part.sql", "original_file_path": "macros/dbt_utils/cross_db_utils/split_part.sql", "unique_id": "macro.spark_utils.spark__split_part", "macro_sql": "{% macro spark__split_part(string_text, delimiter_text, part_number) %}\n\n {% set delimiter_expr %}\n \n -- escape if starts with a special character\n case when regexp_extract({{ delimiter_text }}, '([^A-Za-z0-9])(.*)', 1) != '_'\n then concat('\\\\', {{ delimiter_text }})\n else {{ delimiter_text }} end\n \n {% endset %}\n\n {% set split_part_expr %}\n \n split(\n {{ string_text }},\n {{ delimiter_expr }}\n )[({{ part_number - 1 }})]\n \n {% endset %}\n \n {{ return(split_part_expr) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.81343, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_pattern": {"name": "spark__get_relations_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_pattern", "macro_sql": "{% macro spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n\n {%- call statement('get_tables', fetch_result=True) %}\n\n show table extended in {{ schema_pattern }} like '{{ table_pattern }}'\n\n {%- endcall -%}\n\n {%- set table_list = load_result('get_tables') -%}\n\n {%- if table_list and table_list['table'] -%}\n {%- set tbl_relations = [] -%}\n {%- for row in table_list['table'] -%}\n {%- set tbl_relation = api.Relation.create(\n database=None,\n schema=row[0],\n identifier=row[1],\n type=('view' if 'Type: VIEW' in row[3] else 'table')\n ) -%}\n {%- do tbl_relations.append(tbl_relation) -%}\n {%- endfor -%}\n\n {{ return(tbl_relations) }}\n {%- else -%}\n {{ return([]) }}\n {%- endif -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.814923, "supported_languages": null}, "macro.spark_utils.spark__get_relations_by_prefix": {"name": "spark__get_relations_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_relations_by_prefix", "macro_sql": "{% macro spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {% set table_pattern = table_pattern ~ '*' %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.815233, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_pattern": {"name": "spark__get_tables_by_pattern", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_pattern", "macro_sql": "{% macro spark__get_tables_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_pattern(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_pattern"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.815493, "supported_languages": null}, "macro.spark_utils.spark__get_tables_by_prefix": {"name": "spark__get_tables_by_prefix", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "original_file_path": "macros/dbt_utils/sql/get_relations_by_prefix.sql", "unique_id": "macro.spark_utils.spark__get_tables_by_prefix", "macro_sql": "{% macro spark__get_tables_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database) %}\n {{ return(spark_utils.spark__get_relations_by_prefix(schema_pattern, table_pattern, exclude='', database=target.database)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.spark_utils.spark__get_relations_by_prefix"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.815742, "supported_languages": null}, "macro.spark_utils.assert_not_null": {"name": "assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.assert_not_null", "macro_sql": "{% macro assert_not_null(function, arg) -%}\n {{ return(adapter.dispatch('assert_not_null', 'spark_utils')(function, arg)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.spark_utils.default__assert_not_null"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.816043, "supported_languages": null}, "macro.spark_utils.default__assert_not_null": {"name": "default__assert_not_null", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/etc/assert_not_null.sql", "original_file_path": "macros/etc/assert_not_null.sql", "unique_id": "macro.spark_utils.default__assert_not_null", "macro_sql": "{% macro default__assert_not_null(function, arg) %}\n\n coalesce({{function}}({{arg}}), nvl2({{function}}({{arg}}), assert_true({{function}}({{arg}}) is not null), null))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8162282, "supported_languages": null}, "macro.spark_utils.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "spark_utils", "path": "macros/snowplow/convert_timezone.sql", "original_file_path": "macros/snowplow/convert_timezone.sql", "unique_id": "macro.spark_utils.spark__convert_timezone", "macro_sql": "{% macro spark__convert_timezone(in_tz, out_tz, in_timestamp) %}\n from_utc_timestamp(to_utc_timestamp({{in_timestamp}}, {{in_tz}}), {{out_tz}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.816422, "supported_languages": null}, "macro.dbt_date.get_date_dimension": {"name": "get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.get_date_dimension", "macro_sql": "{% macro get_date_dimension(start_date, end_date) %}\n {{ adapter.dispatch('get_date_dimension', 'dbt_date') (start_date, end_date) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__get_date_dimension"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.824498, "supported_languages": null}, "macro.dbt_date.default__get_date_dimension": {"name": "default__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.default__get_date_dimension", "macro_sql": "{% macro default__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=false) }} as day_of_week,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week_iso,\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n cast({{ last_day('d.date_day', 'quarter') }} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.826561, "supported_languages": null}, "macro.dbt_date.postgres__get_date_dimension": {"name": "postgres__get_date_dimension", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_date_dimension.sql", "original_file_path": "macros/get_date_dimension.sql", "unique_id": "macro.dbt_date.postgres__get_date_dimension", "macro_sql": "{% macro postgres__get_date_dimension(start_date, end_date) %}\nwith base_dates as (\n {{ dbt_date.get_base_dates(start_date, end_date) }}\n),\ndates_with_prior_year_dates as (\n\n select\n cast(d.date_day as date) as date_day,\n cast({{ dbt.dateadd('year', -1 , 'd.date_day') }} as date) as prior_year_date_day,\n cast({{ dbt.dateadd('day', -364 , 'd.date_day') }} as date) as prior_year_over_year_date_day\n from\n \tbase_dates d\n\n)\nselect\n d.date_day,\n {{ dbt_date.yesterday('d.date_day') }} as prior_date_day,\n {{ dbt_date.tomorrow('d.date_day') }} as next_date_day,\n d.prior_year_date_day as prior_year_date_day,\n d.prior_year_over_year_date_day,\n {{ dbt_date.day_of_week('d.date_day', isoweek=true) }} as day_of_week,\n\n {{ dbt_date.day_name('d.date_day', short=false) }} as day_of_week_name,\n {{ dbt_date.day_name('d.date_day', short=true) }} as day_of_week_name_short,\n {{ dbt_date.day_of_month('d.date_day') }} as day_of_month,\n {{ dbt_date.day_of_year('d.date_day') }} as day_of_year,\n\n {{ dbt_date.week_start('d.date_day') }} as week_start_date,\n {{ dbt_date.week_end('d.date_day') }} as week_end_date,\n {{ dbt_date.week_start('d.prior_year_over_year_date_day') }} as prior_year_week_start_date,\n {{ dbt_date.week_end('d.prior_year_over_year_date_day') }} as prior_year_week_end_date,\n {{ dbt_date.week_of_year('d.date_day') }} as week_of_year,\n\n {{ dbt_date.iso_week_start('d.date_day') }} as iso_week_start_date,\n {{ dbt_date.iso_week_end('d.date_day') }} as iso_week_end_date,\n {{ dbt_date.iso_week_start('d.prior_year_over_year_date_day') }} as prior_year_iso_week_start_date,\n {{ dbt_date.iso_week_end('d.prior_year_over_year_date_day') }} as prior_year_iso_week_end_date,\n {{ dbt_date.iso_week_of_year('d.date_day') }} as iso_week_of_year,\n\n {{ dbt_date.week_of_year('d.prior_year_over_year_date_day') }} as prior_year_week_of_year,\n {{ dbt_date.iso_week_of_year('d.prior_year_over_year_date_day') }} as prior_year_iso_week_of_year,\n\n cast({{ dbt_date.date_part('month', 'd.date_day') }} as {{ dbt.type_int() }}) as month_of_year,\n {{ dbt_date.month_name('d.date_day', short=false) }} as month_name,\n {{ dbt_date.month_name('d.date_day', short=true) }} as month_name_short,\n\n cast({{ dbt.date_trunc('month', 'd.date_day') }} as date) as month_start_date,\n cast({{ last_day('d.date_day', 'month') }} as date) as month_end_date,\n\n cast({{ dbt.date_trunc('month', 'd.prior_year_date_day') }} as date) as prior_year_month_start_date,\n cast({{ last_day('d.prior_year_date_day', 'month') }} as date) as prior_year_month_end_date,\n\n cast({{ dbt_date.date_part('quarter', 'd.date_day') }} as {{ dbt.type_int() }}) as quarter_of_year,\n cast({{ dbt.date_trunc('quarter', 'd.date_day') }} as date) as quarter_start_date,\n {# last_day does not support quarter because postgresql does not support quarter interval. #}\n cast({{dbt.dateadd('day', '-1', dbt.dateadd('month', '3', dbt.date_trunc('quarter', 'd.date_day')))}} as date) as quarter_end_date,\n\n cast({{ dbt_date.date_part('year', 'd.date_day') }} as {{ dbt.type_int() }}) as year_number,\n cast({{ dbt.date_trunc('year', 'd.date_day') }} as date) as year_start_date,\n cast({{ last_day('d.date_day', 'year') }} as date) as year_end_date\nfrom\n dates_with_prior_year_dates d\norder by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_base_dates", "macro.dbt.dateadd", "macro.dbt_date.yesterday", "macro.dbt_date.tomorrow", "macro.dbt_date.day_of_week", "macro.dbt_date.day_name", "macro.dbt_date.day_of_month", "macro.dbt_date.day_of_year", "macro.dbt_date.week_start", "macro.dbt_date.week_end", "macro.dbt_date.week_of_year", "macro.dbt_date.iso_week_start", "macro.dbt_date.iso_week_end", "macro.dbt_date.iso_week_of_year", "macro.dbt_date.date_part", "macro.dbt.type_int", "macro.dbt_date.month_name", "macro.dbt.date_trunc", "macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.828852, "supported_languages": null}, "macro.dbt_date.get_base_dates": {"name": "get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.get_base_dates", "macro_sql": "{% macro get_base_dates(start_date=None, end_date=None, n_dateparts=None, datepart=\"day\") %}\n {{ adapter.dispatch('get_base_dates', 'dbt_date') (start_date, end_date, n_dateparts, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_base_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.830416, "supported_languages": null}, "macro.dbt_date.default__get_base_dates": {"name": "default__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.default__get_base_dates", "macro_sql": "{% macro default__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.831121, "supported_languages": null}, "macro.dbt_date.bigquery__get_base_dates": {"name": "bigquery__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.bigquery__get_base_dates", "macro_sql": "{% macro bigquery__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as datetime )\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as datetime )\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.today()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt_date.today", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.831799, "supported_languages": null}, "macro.dbt_date.trino__get_base_dates": {"name": "trino__get_base_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/get_base_dates.sql", "original_file_path": "macros/get_base_dates.sql", "unique_id": "macro.dbt_date.trino__get_base_dates", "macro_sql": "{% macro trino__get_base_dates(start_date, end_date, n_dateparts, datepart) %}\n\n{%- if start_date and end_date -%}\n{%- set start_date=\"cast('\" ~ start_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n{%- set end_date=\"cast('\" ~ end_date ~ \"' as \" ~ dbt.type_timestamp() ~ \")\" -%}\n\n{%- elif n_dateparts and datepart -%}\n\n{%- set start_date = dbt.dateadd(datepart, -1 * n_dateparts, dbt_date.now()) -%}\n{%- set end_date = dbt_date.tomorrow() -%}\n{%- endif -%}\n\nwith date_spine as\n(\n\n {{ dbt_date.date_spine(\n datepart=datepart,\n start_date=start_date,\n end_date=end_date,\n )\n }}\n\n)\nselect\n cast(d.date_{{ datepart }} as {{ dbt.type_timestamp() }}) as date_{{ datepart }}\nfrom\n date_spine d\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.dateadd", "macro.dbt_date.now", "macro.dbt_date.tomorrow", "macro.dbt_date.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.832499, "supported_languages": null}, "macro.dbt_date.get_intervals_between": {"name": "get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.get_intervals_between", "macro_sql": "{% macro get_intervals_between(start_date, end_date, datepart) -%}\n {{ return(adapter.dispatch('get_intervals_between', 'dbt_date')(start_date, end_date, datepart)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_intervals_between"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.833175, "supported_languages": null}, "macro.dbt_date.default__get_intervals_between": {"name": "default__get_intervals_between", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__get_intervals_between", "macro_sql": "{% macro default__get_intervals_between(start_date, end_date, datepart) -%}\n {%- call statement('get_intervals_between', fetch_result=True) %}\n\n select {{ dbt.datediff(start_date, end_date, datepart) }}\n\n {%- endcall -%}\n\n {%- set value_list = load_result('get_intervals_between') -%}\n\n {%- if value_list and value_list['data'] -%}\n {%- set values = value_list['data'] | map(attribute=0) | list %}\n {{ return(values[0]) }}\n {%- else -%}\n {{ return(1) }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.833751, "supported_languages": null}, "macro.dbt_date.date_spine": {"name": "date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.date_spine", "macro_sql": "{% macro date_spine(datepart, start_date, end_date) %}\n {{ return(adapter.dispatch('date_spine', 'dbt_date')(datepart, start_date, end_date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.833977, "supported_languages": null}, "macro.dbt_date.default__date_spine": {"name": "default__date_spine", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/date_spine.sql", "original_file_path": "macros/_utils/date_spine.sql", "unique_id": "macro.dbt_date.default__date_spine", "macro_sql": "{% macro default__date_spine(datepart, start_date, end_date) %}\n\n\n{# call as follows:\n\ndate_spine(\n \"day\",\n \"to_date('01/01/2016', 'mm/dd/yyyy')\",\n \"dbt.dateadd(week, 1, current_date)\"\n) #}\n\n\nwith rawdata as (\n\n {{\n dbt_date.generate_series(\n dbt_date.get_intervals_between(start_date, end_date, datepart)\n )\n }}\n\n),\n\nall_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n \"(row_number() over (order by 1) - 1)\",\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n)\n\nselect * from filtered\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.generate_series", "macro.dbt_date.get_intervals_between", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8343198, "supported_languages": null}, "macro.dbt_date.get_powers_of_two": {"name": "get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.get_powers_of_two", "macro_sql": "{% macro get_powers_of_two(upper_bound) %}\n {{ return(adapter.dispatch('get_powers_of_two', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8350878, "supported_languages": null}, "macro.dbt_date.default__get_powers_of_two": {"name": "default__get_powers_of_two", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__get_powers_of_two", "macro_sql": "{% macro default__get_powers_of_two(upper_bound) %}\n\n {% if upper_bound <= 0 %}\n {{ exceptions.raise_compiler_error(\"upper bound must be positive\") }}\n {% endif %}\n\n {% for _ in range(1, 100) %}\n {% if upper_bound <= 2 ** loop.index %}{{ return(loop.index) }}{% endif %}\n {% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.835487, "supported_languages": null}, "macro.dbt_date.generate_series": {"name": "generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.generate_series", "macro_sql": "{% macro generate_series(upper_bound) %}\n {{ return(adapter.dispatch('generate_series', 'dbt_date')(upper_bound)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__generate_series"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8356721, "supported_languages": null}, "macro.dbt_date.default__generate_series": {"name": "default__generate_series", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/generate_series.sql", "original_file_path": "macros/_utils/generate_series.sql", "unique_id": "macro.dbt_date.default__generate_series", "macro_sql": "{% macro default__generate_series(upper_bound) %}\n\n {% set n = dbt_date.get_powers_of_two(upper_bound) %}\n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n {% for i in range(n) %}\n p{{i}}.generated_number * power(2, {{i}})\n {% if not loop.last %} + {% endif %}\n {% endfor %}\n + 1\n as generated_number\n\n from\n\n {% for i in range(n) %}\n p as p{{i}}\n {% if not loop.last %} cross join {% endif %}\n {% endfor %}\n\n )\n\n select *\n from unioned\n where generated_number <= {{upper_bound}}\n order by generated_number\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_powers_of_two"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.836155, "supported_languages": null}, "macro.dbt_date.date": {"name": "date", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.date", "macro_sql": "{% macro date(year, month, day) %}\n {{ return(modules.datetime.date(year, month, day)) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.836462, "supported_languages": null}, "macro.dbt_date.datetime": {"name": "datetime", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/_utils/modules_datetime.sql", "original_file_path": "macros/_utils/modules_datetime.sql", "unique_id": "macro.dbt_date.datetime", "macro_sql": "{% macro datetime(year, month, day, hour=0, minute=0, second=0, microsecond=0, tz=None) %}\n {% set tz = tz if tz else var(\"dbt_date:time_zone\") %}\n {{ return(\n modules.datetime.datetime(\n year=year, month=month, day=day, hour=hour,\n minute=minute, second=second, microsecond=microsecond,\n tzinfo=modules.pytz.timezone(tz)\n )\n ) }}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.836971, "supported_languages": null}, "macro.dbt_date.get_fiscal_year_dates": {"name": "get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.get_fiscal_year_dates", "macro_sql": "{% macro get_fiscal_year_dates(dates, year_end_month=12, week_start_day=1, shift_year=1) %}\n{{ adapter.dispatch('get_fiscal_year_dates', 'dbt_date') (dates, year_end_month, week_start_day, shift_year) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__get_fiscal_year_dates"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.838569, "supported_languages": null}, "macro.dbt_date.default__get_fiscal_year_dates": {"name": "default__get_fiscal_year_dates", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_year_dates.sql", "original_file_path": "macros/fiscal_date/get_fiscal_year_dates.sql", "unique_id": "macro.dbt_date.default__get_fiscal_year_dates", "macro_sql": "{% macro default__get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) %}\n-- this gets all the dates within a fiscal year\n-- determined by the given year-end-month\n-- ending on the saturday closest to that month's end date\nwith fsc_date_dimension as (\n select * from {{ dates }}\n),\nyear_month_end as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.month_end_date\n from\n fsc_date_dimension d\n where\n d.month_of_year = {{ year_end_month }}\n group by 1,2\n\n),\nweeks as (\n\n select\n d.year_number,\n d.month_of_year,\n d.date_day as week_start_date,\n cast({{ dbt.dateadd('day', 6, 'd.date_day') }} as date) as week_end_date\n from\n fsc_date_dimension d\n where\n d.day_of_week = {{ week_start_day }}\n\n),\n-- get all the weeks that start in the month the year ends\nyear_week_ends as (\n\n select\n d.year_number - {{ shift_year }} as fiscal_year_number,\n d.week_end_date\n from\n weeks d\n where\n d.month_of_year = {{ year_end_month }}\n group by\n 1,2\n\n),\n-- then calculate which Saturday is closest to month end\nweeks_at_month_end as (\n\n select\n d.fiscal_year_number,\n d.week_end_date,\n m.month_end_date,\n rank() over\n (partition by d.fiscal_year_number\n order by\n abs({{ dbt.datediff('d.week_end_date', 'm.month_end_date', 'day') }})\n\n ) as closest_to_month_end\n from\n year_week_ends d\n join\n year_month_end m on d.fiscal_year_number = m.fiscal_year_number\n),\nfiscal_year_range as (\n\n select\n w.fiscal_year_number,\n cast(\n {{ dbt.dateadd('day', 1,\n 'lag(w.week_end_date) over(order by w.week_end_date)') }}\n as date) as fiscal_year_start_date,\n w.week_end_date as fiscal_year_end_date\n from\n weeks_at_month_end w\n where\n w.closest_to_month_end = 1\n\n),\nfiscal_year_dates as (\n\n select\n d.date_day,\n m.fiscal_year_number,\n m.fiscal_year_start_date,\n m.fiscal_year_end_date,\n w.week_start_date,\n w.week_end_date,\n -- we reset the weeks of the year starting with the merch year start date\n dense_rank()\n over(\n partition by m.fiscal_year_number\n order by w.week_start_date\n ) as fiscal_week_of_year\n from\n fsc_date_dimension d\n join\n fiscal_year_range m on d.date_day between m.fiscal_year_start_date and m.fiscal_year_end_date\n join\n weeks w on d.date_day between w.week_start_date and w.week_end_date\n\n)\nselect * from fiscal_year_dates order by 1\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8391209, "supported_languages": null}, "macro.dbt_date.get_fiscal_periods": {"name": "get_fiscal_periods", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/fiscal_date/get_fiscal_periods.sql", "original_file_path": "macros/fiscal_date/get_fiscal_periods.sql", "unique_id": "macro.dbt_date.get_fiscal_periods", "macro_sql": "{% macro get_fiscal_periods(dates, year_end_month, week_start_day, shift_year=1) %}\n{#\nThis macro requires you to pass in a ref to a date dimension, created via\ndbt_date.get_date_dimension()s\n#}\nwith fscl_year_dates_for_periods as (\n {{ dbt_date.get_fiscal_year_dates(dates, year_end_month, week_start_day, shift_year) }}\n),\nfscl_year_w13 as (\n\n select\n f.*,\n -- We count the weeks in a 13 week period\n -- and separate the 4-5-4 week sequences\n mod(cast(\n (f.fiscal_week_of_year-1) as {{ dbt.type_int() }}\n ), 13) as w13_number,\n -- Chop weeks into 13 week merch quarters\n cast(\n least(\n floor((f.fiscal_week_of_year-1)/13.0)\n , 3)\n as {{ dbt.type_int() }}) as quarter_number\n from\n fscl_year_dates_for_periods f\n\n),\nfscl_periods as (\n\n select\n f.date_day,\n f.fiscal_year_number,\n f.week_start_date,\n f.week_end_date,\n f.fiscal_week_of_year,\n case\n -- we move week 53 into the 3rd period of the quarter\n when f.fiscal_week_of_year = 53 then 3\n when f.w13_number between 0 and 3 then 1\n when f.w13_number between 4 and 8 then 2\n when f.w13_number between 9 and 12 then 3\n end as period_of_quarter,\n f.quarter_number\n from\n fscl_year_w13 f\n\n),\nfscl_periods_quarters as (\n\n select\n f.*,\n cast((\n (f.quarter_number * 3) + f.period_of_quarter\n ) as {{ dbt.type_int() }}) as fiscal_period_number\n from\n fscl_periods f\n\n)\nselect\n date_day,\n fiscal_year_number,\n week_start_date,\n week_end_date,\n fiscal_week_of_year,\n dense_rank() over(partition by fiscal_period_number order by fiscal_week_of_year) as fiscal_week_of_period,\n fiscal_period_number,\n quarter_number+1 as fiscal_quarter_number,\n period_of_quarter as fiscal_period_of_quarter\nfrom\n fscl_periods_quarters\norder by 1,2\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_date.get_fiscal_year_dates", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.840008, "supported_languages": null}, "macro.dbt_date.tomorrow": {"name": "tomorrow", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/tomorrow.sql", "original_file_path": "macros/calendar_date/tomorrow.sql", "unique_id": "macro.dbt_date.tomorrow", "macro_sql": "{%- macro tomorrow(date=None, tz=None) -%}\n{{ dbt_date.n_days_away(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.840214, "supported_languages": null}, "macro.dbt_date.next_week": {"name": "next_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_week.sql", "original_file_path": "macros/calendar_date/next_week.sql", "unique_id": "macro.dbt_date.next_week", "macro_sql": "{%- macro next_week(tz=None) -%}\n{{ dbt_date.n_weeks_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.840377, "supported_languages": null}, "macro.dbt_date.next_month_name": {"name": "next_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_name.sql", "original_file_path": "macros/calendar_date/next_month_name.sql", "unique_id": "macro.dbt_date.next_month_name", "macro_sql": "{%- macro next_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.next_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.84095, "supported_languages": null}, "macro.dbt_date.next_month": {"name": "next_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month.sql", "original_file_path": "macros/calendar_date/next_month.sql", "unique_id": "macro.dbt_date.next_month", "macro_sql": "{%- macro next_month(tz=None) -%}\n{{ dbt_date.n_months_away(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8411222, "supported_languages": null}, "macro.dbt_date.day_name": {"name": "day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.day_name", "macro_sql": "{%- macro day_name(date, short=True) -%}\n {{ adapter.dispatch('day_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.841888, "supported_languages": null}, "macro.dbt_date.default__day_name": {"name": "default__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.default__day_name", "macro_sql": "\n\n{%- macro default__day_name(date, short) -%}\n{%- set f = 'Dy' if short else 'Day' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8420858, "supported_languages": null}, "macro.dbt_date.snowflake__day_name": {"name": "snowflake__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.snowflake__day_name", "macro_sql": "\n\n{%- macro snowflake__day_name(date, short) -%}\n {%- if short -%}\n dayname({{ date }})\n {%- else -%}\n -- long version not implemented on Snowflake so we're doing it manually :/\n case dayname({{ date }})\n when 'Mon' then 'Monday'\n when 'Tue' then 'Tuesday'\n when 'Wed' then 'Wednesday'\n when 'Thu' then 'Thursday'\n when 'Fri' then 'Friday'\n when 'Sat' then 'Saturday'\n when 'Sun' then 'Sunday'\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.842284, "supported_languages": null}, "macro.dbt_date.bigquery__day_name": {"name": "bigquery__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.bigquery__day_name", "macro_sql": "\n\n{%- macro bigquery__day_name(date, short) -%}\n{%- set f = '%a' if short else '%A' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8424692, "supported_languages": null}, "macro.dbt_date.postgres__day_name": {"name": "postgres__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.postgres__day_name", "macro_sql": "\n\n{%- macro postgres__day_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMDy' if short else 'FMDay' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.842656, "supported_languages": null}, "macro.dbt_date.duckdb__day_name": {"name": "duckdb__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.duckdb__day_name", "macro_sql": "\n\n{%- macro duckdb__day_name(date, short) -%}\n {%- if short -%}\n substr(dayname({{ date }}), 1, 3)\n {%- else -%}\n dayname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.842829, "supported_languages": null}, "macro.dbt_date.spark__day_name": {"name": "spark__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.spark__day_name", "macro_sql": "\n\n{%- macro spark__day_name(date, short) -%}\n{%- set f = 'E' if short else 'EEEE' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.843012, "supported_languages": null}, "macro.dbt_date.trino__day_name": {"name": "trino__day_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_name.sql", "original_file_path": "macros/calendar_date/day_name.sql", "unique_id": "macro.dbt_date.trino__day_name", "macro_sql": "\n\n{%- macro trino__day_name(date, short) -%}\n{%- set f = 'a' if short else 'W' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.843198, "supported_languages": null}, "macro.dbt_date.to_unixtimestamp": {"name": "to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.to_unixtimestamp", "macro_sql": "{%- macro to_unixtimestamp(timestamp) -%}\n {{ adapter.dispatch('to_unixtimestamp', 'dbt_date') (timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__to_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8435688, "supported_languages": null}, "macro.dbt_date.default__to_unixtimestamp": {"name": "default__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__to_unixtimestamp", "macro_sql": "\n\n{%- macro default__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8436968, "supported_languages": null}, "macro.dbt_date.snowflake__to_unixtimestamp": {"name": "snowflake__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__to_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__to_unixtimestamp(timestamp) -%}\n {{ dbt_date.date_part('epoch_seconds', timestamp) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.843827, "supported_languages": null}, "macro.dbt_date.bigquery__to_unixtimestamp": {"name": "bigquery__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__to_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__to_unixtimestamp(timestamp) -%}\n unix_seconds({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8439271, "supported_languages": null}, "macro.dbt_date.spark__to_unixtimestamp": {"name": "spark__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.spark__to_unixtimestamp", "macro_sql": "\n\n{%- macro spark__to_unixtimestamp(timestamp) -%}\n unix_timestamp({{ timestamp }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.844023, "supported_languages": null}, "macro.dbt_date.trino__to_unixtimestamp": {"name": "trino__to_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/to_unixtimestamp.sql", "original_file_path": "macros/calendar_date/to_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__to_unixtimestamp", "macro_sql": "\n\n{%- macro trino__to_unixtimestamp(timestamp) -%}\n to_unixtime({{ timestamp }} AT TIME ZONE 'UTC')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.844115, "supported_languages": null}, "macro.dbt_date.n_days_away": {"name": "n_days_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_away.sql", "original_file_path": "macros/calendar_date/n_days_away.sql", "unique_id": "macro.dbt_date.n_days_away", "macro_sql": "{%- macro n_days_away(n, date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(-1 * n, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.844338, "supported_languages": null}, "macro.dbt_date.week_start": {"name": "week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.week_start", "macro_sql": "{%- macro week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.844803, "supported_languages": null}, "macro.dbt_date.default__week_start": {"name": "default__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.default__week_start", "macro_sql": "{%- macro default__week_start(date) -%}\ncast({{ dbt.date_trunc('week', date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.845012, "supported_languages": null}, "macro.dbt_date.snowflake__week_start": {"name": "snowflake__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.snowflake__week_start", "macro_sql": "\n\n{%- macro snowflake__week_start(date) -%}\n {#\n Get the day of week offset: e.g. if the date is a Sunday,\n dbt_date.day_of_week returns 1, so we subtract 1 to get a 0 offset\n #}\n {% set off_set = dbt_date.day_of_week(date, isoweek=False) ~ \" - 1\" %}\n cast({{ dbt.dateadd(\"day\", \"-1 * (\" ~ off_set ~ \")\", date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.day_of_week", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.84529, "supported_languages": null}, "macro.dbt_date.postgres__week_start": {"name": "postgres__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.postgres__week_start", "macro_sql": "\n\n{%- macro postgres__week_start(date) -%}\n-- Sunday as week start date\ncast({{ dbt.dateadd('day', -1, dbt.date_trunc('week', dbt.dateadd('day', 1, date))) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd", "macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.845532, "supported_languages": null}, "macro.dbt_date.duckdb__week_start": {"name": "duckdb__week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_start.sql", "original_file_path": "macros/calendar_date/week_start.sql", "unique_id": "macro.dbt_date.duckdb__week_start", "macro_sql": "\n\n{%- macro duckdb__week_start(date) -%}\n{{ return(dbt_date.postgres__week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.845663, "supported_languages": null}, "macro.dbt_date.iso_week_start": {"name": "iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.iso_week_start", "macro_sql": "{%- macro iso_week_start(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_start', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.846242, "supported_languages": null}, "macro.dbt_date._iso_week_start": {"name": "_iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date._iso_week_start", "macro_sql": "{%- macro _iso_week_start(date, week_type) -%}\ncast({{ dbt.date_trunc(week_type, date) }} as date)\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.846383, "supported_languages": null}, "macro.dbt_date.default__iso_week_start": {"name": "default__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.default__iso_week_start", "macro_sql": "\n\n{%- macro default__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8465128, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_start": {"name": "snowflake__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_start", "macro_sql": "\n\n{%- macro snowflake__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8466449, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_start": {"name": "postgres__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.postgres__iso_week_start", "macro_sql": "\n\n{%- macro postgres__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.846776, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_start": {"name": "duckdb__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_start", "macro_sql": "\n\n{%- macro duckdb__iso_week_start(date) -%}\n{{ return(dbt_date.postgres__iso_week_start(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8469062, "supported_languages": null}, "macro.dbt_date.spark__iso_week_start": {"name": "spark__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.spark__iso_week_start", "macro_sql": "\n\n{%- macro spark__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.847031, "supported_languages": null}, "macro.dbt_date.trino__iso_week_start": {"name": "trino__iso_week_start", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_start.sql", "original_file_path": "macros/calendar_date/iso_week_start.sql", "unique_id": "macro.dbt_date.trino__iso_week_start", "macro_sql": "\n\n{%- macro trino__iso_week_start(date) -%}\n{{ dbt_date._iso_week_start(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_start"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.84715, "supported_languages": null}, "macro.dbt_date.n_days_ago": {"name": "n_days_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_days_ago.sql", "original_file_path": "macros/calendar_date/n_days_ago.sql", "unique_id": "macro.dbt_date.n_days_ago", "macro_sql": "{%- macro n_days_ago(n, date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{%- set n = n|int -%}\ncast({{ dbt.dateadd('day', -1 * n, dt) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8475132, "supported_languages": null}, "macro.dbt_date.last_week": {"name": "last_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_week.sql", "original_file_path": "macros/calendar_date/last_week.sql", "unique_id": "macro.dbt_date.last_week", "macro_sql": "{%- macro last_week(tz=None) -%}\n{{ dbt_date.n_weeks_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_weeks_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.847682, "supported_languages": null}, "macro.dbt_date.now": {"name": "now", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/now.sql", "original_file_path": "macros/calendar_date/now.sql", "unique_id": "macro.dbt_date.now", "macro_sql": "{%- macro now(tz=None) -%}\n{{ dbt_date.convert_timezone(dbt.current_timestamp(), tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.convert_timezone", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.847863, "supported_languages": null}, "macro.dbt_date.periods_since": {"name": "periods_since", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/periods_since.sql", "original_file_path": "macros/calendar_date/periods_since.sql", "unique_id": "macro.dbt_date.periods_since", "macro_sql": "{%- macro periods_since(date_col, period_name='day', tz=None) -%}\n{{ dbt.datediff(date_col, dbt_date.now(tz), period_name) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8481002, "supported_languages": null}, "macro.dbt_date.today": {"name": "today", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/today.sql", "original_file_path": "macros/calendar_date/today.sql", "unique_id": "macro.dbt_date.today", "macro_sql": "{%- macro today(tz=None) -%}\ncast({{ dbt_date.now(tz) }} as date)\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.now"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.848264, "supported_languages": null}, "macro.dbt_date.last_month": {"name": "last_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month.sql", "original_file_path": "macros/calendar_date/last_month.sql", "unique_id": "macro.dbt_date.last_month", "macro_sql": "{%- macro last_month(tz=None) -%}\n{{ dbt_date.n_months_ago(1, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_months_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.848432, "supported_languages": null}, "macro.dbt_date.day_of_year": {"name": "day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.day_of_year", "macro_sql": "{%- macro day_of_year(date) -%}\n{{ adapter.dispatch('day_of_year', 'dbt_date') (date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.84882, "supported_languages": null}, "macro.dbt_date.default__day_of_year": {"name": "default__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.default__day_of_year", "macro_sql": "\n\n{%- macro default__day_of_year(date) -%}\n {{ dbt_date.date_part('dayofyear', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.848948, "supported_languages": null}, "macro.dbt_date.postgres__day_of_year": {"name": "postgres__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.postgres__day_of_year", "macro_sql": "\n\n{%- macro postgres__day_of_year(date) -%}\n {{ dbt_date.date_part('doy', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8491478, "supported_languages": null}, "macro.dbt_date.redshift__day_of_year": {"name": "redshift__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.redshift__day_of_year", "macro_sql": "\n\n{%- macro redshift__day_of_year(date) -%}\n cast({{ dbt_date.date_part('dayofyear', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.849317, "supported_languages": null}, "macro.dbt_date.spark__day_of_year": {"name": "spark__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.spark__day_of_year", "macro_sql": "\n\n{%- macro spark__day_of_year(date) -%}\n dayofyear({{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8494139, "supported_languages": null}, "macro.dbt_date.trino__day_of_year": {"name": "trino__day_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_year.sql", "original_file_path": "macros/calendar_date/day_of_year.sql", "unique_id": "macro.dbt_date.trino__day_of_year", "macro_sql": "\n\n{%- macro trino__day_of_year(date) -%}\n {{ dbt_date.date_part('day_of_year', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.849549, "supported_languages": null}, "macro.dbt_date.round_timestamp": {"name": "round_timestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/round_timestamp.sql", "original_file_path": "macros/calendar_date/round_timestamp.sql", "unique_id": "macro.dbt_date.round_timestamp", "macro_sql": "{% macro round_timestamp(timestamp) %}\n {{ dbt.date_trunc(\"day\", dbt.dateadd(\"hour\", 12, timestamp)) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8497741, "supported_languages": null}, "macro.dbt_date.from_unixtimestamp": {"name": "from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.from_unixtimestamp", "macro_sql": "{%- macro from_unixtimestamp(epochs, format=\"seconds\") -%}\n {{ adapter.dispatch('from_unixtimestamp', 'dbt_date') (epochs, format) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__from_unixtimestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.852078, "supported_languages": null}, "macro.dbt_date.default__from_unixtimestamp": {"name": "default__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.default__from_unixtimestamp", "macro_sql": "\n\n{%- macro default__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp({{ epochs }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.85233, "supported_languages": null}, "macro.dbt_date.postgres__from_unixtimestamp": {"name": "postgres__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.postgres__from_unixtimestamp", "macro_sql": "\n\n{%- macro postgres__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.852575, "supported_languages": null}, "macro.dbt_date.snowflake__from_unixtimestamp": {"name": "snowflake__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.snowflake__from_unixtimestamp", "macro_sql": "\n\n{%- macro snowflake__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n {%- set scale = 0 -%}\n {%- elif format == \"milliseconds\" -%}\n {%- set scale = 3 -%}\n {%- elif format == \"microseconds\" -%}\n {%- set scale = 6 -%}\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n to_timestamp_ntz({{ epochs }}, {{ scale }})\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.853009, "supported_languages": null}, "macro.dbt_date.bigquery__from_unixtimestamp": {"name": "bigquery__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.bigquery__from_unixtimestamp", "macro_sql": "\n\n{%- macro bigquery__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n timestamp_seconds({{ epochs }})\n {%- elif format == \"milliseconds\" -%}\n timestamp_millis({{ epochs }})\n {%- elif format == \"microseconds\" -%}\n timestamp_micros({{ epochs }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.853368, "supported_languages": null}, "macro.dbt_date.trino__from_unixtimestamp": {"name": "trino__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.trino__from_unixtimestamp", "macro_sql": "\n\n{%- macro trino__from_unixtimestamp(epochs, format) -%}\n {%- if format == \"seconds\" -%}\n cast(from_unixtime({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"milliseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 6)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"microseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }} * pow(10, 3)) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- elif format == \"nanoseconds\" -%}\n cast(from_unixtime_nanos({{ epochs }}) AT TIME ZONE 'UTC' as {{ dbt.type_timestamp() }})\n {%- else -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.85392, "supported_languages": null}, "macro.dbt_date.duckdb__from_unixtimestamp": {"name": "duckdb__from_unixtimestamp", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/from_unixtimestamp.sql", "original_file_path": "macros/calendar_date/from_unixtimestamp.sql", "unique_id": "macro.dbt_date.duckdb__from_unixtimestamp", "macro_sql": "\n\n\n{%- macro duckdb__from_unixtimestamp(epochs, format=\"seconds\") -%}\n {%- if format != \"seconds\" -%}\n {{ exceptions.raise_compiler_error(\n \"value \" ~ format ~ \" for `format` for from_unixtimestamp is not supported.\"\n )\n }}\n {% endif -%}\n cast(to_timestamp({{ epochs }}) at time zone 'UTC' as timestamp)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8542342, "supported_languages": null}, "macro.dbt_date.n_months_ago": {"name": "n_months_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_ago.sql", "original_file_path": "macros/calendar_date/n_months_ago.sql", "unique_id": "macro.dbt_date.n_months_ago", "macro_sql": "{%- macro n_months_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.854651, "supported_languages": null}, "macro.dbt_date.date_part": {"name": "date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.date_part", "macro_sql": "{% macro date_part(datepart, date) -%}\n {{ adapter.dispatch('date_part', 'dbt_date') (datepart, date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.default__date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.854986, "supported_languages": null}, "macro.dbt_date.default__date_part": {"name": "default__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.default__date_part", "macro_sql": "{% macro default__date_part(datepart, date) -%}\n date_part('{{ datepart }}', {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8551118, "supported_languages": null}, "macro.dbt_date.bigquery__date_part": {"name": "bigquery__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.bigquery__date_part", "macro_sql": "{% macro bigquery__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.855229, "supported_languages": null}, "macro.dbt_date.trino__date_part": {"name": "trino__date_part", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/date_part.sql", "original_file_path": "macros/calendar_date/date_part.sql", "unique_id": "macro.dbt_date.trino__date_part", "macro_sql": "{% macro trino__date_part(datepart, date) -%}\n extract({{ datepart }} from {{ date }})\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8553479, "supported_languages": null}, "macro.dbt_date.n_weeks_away": {"name": "n_weeks_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_away.sql", "original_file_path": "macros/calendar_date/n_weeks_away.sql", "unique_id": "macro.dbt_date.n_weeks_away", "macro_sql": "{%- macro n_weeks_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.855662, "supported_languages": null}, "macro.dbt_date.day_of_month": {"name": "day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.day_of_month", "macro_sql": "{%- macro day_of_month(date) -%}\n{{ dbt_date.date_part('day', date) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.855857, "supported_languages": null}, "macro.dbt_date.redshift__day_of_month": {"name": "redshift__day_of_month", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_month.sql", "original_file_path": "macros/calendar_date/day_of_month.sql", "unique_id": "macro.dbt_date.redshift__day_of_month", "macro_sql": "\n\n{%- macro redshift__day_of_month(date) -%}\ncast({{ dbt_date.date_part('day', date) }} as {{ dbt.type_bigint() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8560221, "supported_languages": null}, "macro.dbt_date.yesterday": {"name": "yesterday", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/yesterday.sql", "original_file_path": "macros/calendar_date/yesterday.sql", "unique_id": "macro.dbt_date.yesterday", "macro_sql": "{%- macro yesterday(date=None, tz=None) -%}\n{{ dbt_date.n_days_ago(1, date, tz) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.n_days_ago"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.856211, "supported_languages": null}, "macro.dbt_date.day_of_week": {"name": "day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.day_of_week", "macro_sql": "{%- macro day_of_week(date, isoweek=true) -%}\n{{ adapter.dispatch('day_of_week', 'dbt_date') (date, isoweek) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.85855, "supported_languages": null}, "macro.dbt_date.default__day_of_week": {"name": "default__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.default__day_of_week", "macro_sql": "\n\n{%- macro default__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else {{ dow }}\n end\n {%- else -%}\n {{ dow }} + 1\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.858811, "supported_languages": null}, "macro.dbt_date.snowflake__day_of_week": {"name": "snowflake__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.snowflake__day_of_week", "macro_sql": "\n\n{%- macro snowflake__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'dayofweekiso' -%}\n {{ dbt_date.date_part(dow_part, date) }}\n {%- else -%}\n {%- set dow_part = 'dayofweek' -%}\n case\n when {{ dbt_date.date_part(dow_part, date) }} = 7 then 1\n else {{ dbt_date.date_part(dow_part, date) }} + 1\n end\n {%- endif -%}\n\n\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8591669, "supported_languages": null}, "macro.dbt_date.bigquery__day_of_week": {"name": "bigquery__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.bigquery__day_of_week", "macro_sql": "\n\n{%- macro bigquery__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (1) to Monday (2)\n when {{ dow }} = 1 then 7\n else {{ dow }} - 1\n end\n {%- else -%}\n {{ dow }}\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8594189, "supported_languages": null}, "macro.dbt_date.postgres__day_of_week": {"name": "postgres__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.postgres__day_of_week", "macro_sql": "\n\n\n{%- macro postgres__day_of_week(date, isoweek) -%}\n\n {%- if isoweek -%}\n {%- set dow_part = 'isodow' -%}\n -- Monday(1) to Sunday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} as {{ dbt.type_int() }})\n {%- else -%}\n {%- set dow_part = 'dow' -%}\n -- Sunday(1) to Saturday (7)\n cast({{ dbt_date.date_part(dow_part, date) }} + 1 as {{ dbt.type_int() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.85979, "supported_languages": null}, "macro.dbt_date.redshift__day_of_week": {"name": "redshift__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.redshift__day_of_week", "macro_sql": "\n\n\n{%- macro redshift__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('dayofweek', date) -%}\n\n {%- if isoweek -%}\n case\n -- Shift start of week from Sunday (0) to Monday (1)\n when {{ dow }} = 0 then 7\n else cast({{ dow }} as {{ dbt.type_bigint() }})\n end\n {%- else -%}\n cast({{ dow }} + 1 as {{ dbt.type_bigint() }})\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_bigint"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.860115, "supported_languages": null}, "macro.dbt_date.duckdb__day_of_week": {"name": "duckdb__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.duckdb__day_of_week", "macro_sql": "\n\n{%- macro duckdb__day_of_week(date, isoweek) -%}\n{{ return(dbt_date.postgres__day_of_week(date, isoweek)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__day_of_week"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8602629, "supported_languages": null}, "macro.dbt_date.spark__day_of_week": {"name": "spark__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.spark__day_of_week", "macro_sql": "\n\n\n{%- macro spark__day_of_week(date, isoweek) -%}\n\n {%- set dow = \"dayofweek_iso\" if isoweek else \"dayofweek\" -%}\n\n {{ dbt_date.date_part(dow, date) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.860456, "supported_languages": null}, "macro.dbt_date.trino__day_of_week": {"name": "trino__day_of_week", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/day_of_week.sql", "original_file_path": "macros/calendar_date/day_of_week.sql", "unique_id": "macro.dbt_date.trino__day_of_week", "macro_sql": "\n\n\n{%- macro trino__day_of_week(date, isoweek) -%}\n\n {%- set dow = dbt_date.date_part('day_of_week', date) -%}\n\n {%- if isoweek -%}\n {{ dow }}\n {%- else -%}\n case\n when {{ dow }} = 7 then 1\n else {{ dow }} + 1\n end\n {%- endif -%}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.860703, "supported_languages": null}, "macro.dbt_date.iso_week_end": {"name": "iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.iso_week_end", "macro_sql": "{%- macro iso_week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.default__iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.861106, "supported_languages": null}, "macro.dbt_date._iso_week_end": {"name": "_iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date._iso_week_end", "macro_sql": "{%- macro _iso_week_end(date, week_type) -%}\n{%- set dt = dbt_date.iso_week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.iso_week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.861294, "supported_languages": null}, "macro.dbt_date.default__iso_week_end": {"name": "default__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.default__iso_week_end", "macro_sql": "\n\n{%- macro default__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8614159, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_end": {"name": "snowflake__iso_week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_end.sql", "original_file_path": "macros/calendar_date/iso_week_end.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_end", "macro_sql": "\n\n{%- macro snowflake__iso_week_end(date) -%}\n{{ dbt_date._iso_week_end(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.861538, "supported_languages": null}, "macro.dbt_date.n_weeks_ago": {"name": "n_weeks_ago", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_weeks_ago.sql", "original_file_path": "macros/calendar_date/n_weeks_ago.sql", "unique_id": "macro.dbt_date.n_weeks_ago", "macro_sql": "{%- macro n_weeks_ago(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('week',\n dbt.dateadd('week', -1 * n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.861855, "supported_languages": null}, "macro.dbt_date.month_name": {"name": "month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.month_name", "macro_sql": "{%- macro month_name(date, short=True) -%}\n {{ adapter.dispatch('month_name', 'dbt_date') (date, short) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__month_name"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.86245, "supported_languages": null}, "macro.dbt_date.default__month_name": {"name": "default__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.default__month_name", "macro_sql": "\n\n{%- macro default__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MONTH' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.862624, "supported_languages": null}, "macro.dbt_date.bigquery__month_name": {"name": "bigquery__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.bigquery__month_name", "macro_sql": "\n\n{%- macro bigquery__month_name(date, short) -%}\n{%- set f = '%b' if short else '%B' -%}\n format_date('{{ f }}', cast({{ date }} as date))\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8628, "supported_languages": null}, "macro.dbt_date.snowflake__month_name": {"name": "snowflake__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.snowflake__month_name", "macro_sql": "\n\n{%- macro snowflake__month_name(date, short) -%}\n{%- set f = 'MON' if short else 'MMMM' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.862972, "supported_languages": null}, "macro.dbt_date.postgres__month_name": {"name": "postgres__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.postgres__month_name", "macro_sql": "\n\n{%- macro postgres__month_name(date, short) -%}\n{# FM = Fill mode, which suppresses padding blanks #}\n{%- set f = 'FMMon' if short else 'FMMonth' -%}\n to_char({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.863151, "supported_languages": null}, "macro.dbt_date.duckdb__month_name": {"name": "duckdb__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.duckdb__month_name", "macro_sql": "\n\n\n{%- macro duckdb__month_name(date, short) -%}\n {%- if short -%}\n substr(monthname({{ date }}), 1, 3)\n {%- else -%}\n monthname({{ date }})\n {%- endif -%}\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8633878, "supported_languages": null}, "macro.dbt_date.spark__month_name": {"name": "spark__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.spark__month_name", "macro_sql": "\n\n{%- macro spark__month_name(date, short) -%}\n{%- set f = 'MMM' if short else 'MMMM' -%}\n date_format({{ date }}, '{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8635688, "supported_languages": null}, "macro.dbt_date.trino__month_name": {"name": "trino__month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/month_name.sql", "original_file_path": "macros/calendar_date/month_name.sql", "unique_id": "macro.dbt_date.trino__month_name", "macro_sql": "\n\n{%- macro trino__month_name(date, short) -%}\n{%- set f = 'b' if short else 'M' -%}\n date_format({{ date }}, '%{{ f }}')\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.863744, "supported_languages": null}, "macro.dbt_date.last_month_name": {"name": "last_month_name", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_name.sql", "original_file_path": "macros/calendar_date/last_month_name.sql", "unique_id": "macro.dbt_date.last_month_name", "macro_sql": "{%- macro last_month_name(short=True, tz=None) -%}\n{{ dbt_date.month_name(dbt_date.last_month(tz), short=short) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.month_name", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.863959, "supported_languages": null}, "macro.dbt_date.week_of_year": {"name": "week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.week_of_year", "macro_sql": "{%- macro week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.864521, "supported_languages": null}, "macro.dbt_date.default__week_of_year": {"name": "default__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.default__week_of_year", "macro_sql": "{%- macro default__week_of_year(date) -%}\ncast({{ dbt_date.date_part('week', date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.864738, "supported_languages": null}, "macro.dbt_date.postgres__week_of_year": {"name": "postgres__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.postgres__week_of_year", "macro_sql": "\n\n{%- macro postgres__week_of_year(date) -%}\n{# postgresql 'week' returns isoweek. Use to_char instead.\n WW = the first week starts on the first day of the year #}\ncast(to_char({{ date }}, 'WW') as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.864892, "supported_languages": null}, "macro.dbt_date.duckdb__week_of_year": {"name": "duckdb__week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_of_year.sql", "original_file_path": "macros/calendar_date/week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__week_of_year", "macro_sql": "\n\n{%- macro duckdb__week_of_year(date) -%}\ncast(ceil(dayofyear({{ date }}) / 7) as int)\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.865, "supported_languages": null}, "macro.dbt_date.convert_timezone": {"name": "convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.convert_timezone", "macro_sql": "{%- macro convert_timezone(column, target_tz=None, source_tz=None) -%}\n{%- set source_tz = \"UTC\" if not source_tz else source_tz -%}\n{%- set target_tz = var(\"dbt_date:time_zone\") if not target_tz else target_tz -%}\n{{ adapter.dispatch('convert_timezone', 'dbt_date') (column, target_tz, source_tz) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.86622, "supported_languages": null}, "macro.dbt_date.default__convert_timezone": {"name": "default__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.default__convert_timezone", "macro_sql": "{% macro default__convert_timezone(column, target_tz, source_tz) -%}\nconvert_timezone('{{ source_tz }}', '{{ target_tz }}',\n cast({{ column }} as {{ dbt.type_timestamp() }})\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.866421, "supported_languages": null}, "macro.dbt_date.bigquery__convert_timezone": {"name": "bigquery__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.bigquery__convert_timezone", "macro_sql": "{%- macro bigquery__convert_timezone(column, target_tz, source_tz=None) -%}\ntimestamp(datetime({{ column }}, '{{ target_tz}}'))\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.866563, "supported_languages": null}, "macro.dbt_date.postgres__convert_timezone": {"name": "postgres__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.postgres__convert_timezone", "macro_sql": "{% macro postgres__convert_timezone(column, target_tz, source_tz) -%}\ncast(\n cast({{ column }} as {{ dbt.type_timestamp() }})\n at time zone '{{ source_tz }}' at time zone '{{ target_tz }}' as {{ dbt.type_timestamp() }}\n)\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8667898, "supported_languages": null}, "macro.dbt_date.redshift__convert_timezone": {"name": "redshift__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.redshift__convert_timezone", "macro_sql": "{%- macro redshift__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.default__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.default__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.866976, "supported_languages": null}, "macro.dbt_date.duckdb__convert_timezone": {"name": "duckdb__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.duckdb__convert_timezone", "macro_sql": "{% macro duckdb__convert_timezone(column, target_tz, source_tz) -%}\n{{ return(dbt_date.postgres__convert_timezone(column, target_tz, source_tz)) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.postgres__convert_timezone"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.867147, "supported_languages": null}, "macro.dbt_date.spark__convert_timezone": {"name": "spark__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.spark__convert_timezone", "macro_sql": "{%- macro spark__convert_timezone(column, target_tz, source_tz) -%}\nfrom_utc_timestamp(\n to_utc_timestamp({{ column }}, '{{ source_tz }}'),\n '{{ target_tz }}'\n )\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8673139, "supported_languages": null}, "macro.dbt_date.trino__convert_timezone": {"name": "trino__convert_timezone", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/convert_timezone.sql", "original_file_path": "macros/calendar_date/convert_timezone.sql", "unique_id": "macro.dbt_date.trino__convert_timezone", "macro_sql": "{%- macro trino__convert_timezone(column, target_tz, source_tz) -%}\n cast((at_timezone(with_timezone(cast({{ column }} as {{ dbt.type_timestamp() }}), '{{ source_tz }}'), '{{ target_tz }}')) as {{ dbt.type_timestamp() }})\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8675258, "supported_languages": null}, "macro.dbt_date.n_months_away": {"name": "n_months_away", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/n_months_away.sql", "original_file_path": "macros/calendar_date/n_months_away.sql", "unique_id": "macro.dbt_date.n_months_away", "macro_sql": "{%- macro n_months_away(n, tz=None) -%}\n{%- set n = n|int -%}\n{{ dbt.date_trunc('month',\n dbt.dateadd('month', n,\n dbt_date.today(tz)\n )\n ) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt.date_trunc", "macro.dbt.dateadd", "macro.dbt_date.today"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.867857, "supported_languages": null}, "macro.dbt_date.iso_week_of_year": {"name": "iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.iso_week_of_year", "macro_sql": "{%- macro iso_week_of_year(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('iso_week_of_year', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8685179, "supported_languages": null}, "macro.dbt_date._iso_week_of_year": {"name": "_iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date._iso_week_of_year", "macro_sql": "{%- macro _iso_week_of_year(date, week_type) -%}\ncast({{ dbt_date.date_part(week_type, date) }} as {{ dbt.type_int() }})\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.868705, "supported_languages": null}, "macro.dbt_date.default__iso_week_of_year": {"name": "default__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.default__iso_week_of_year", "macro_sql": "\n\n{%- macro default__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'isoweek') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.868895, "supported_languages": null}, "macro.dbt_date.snowflake__iso_week_of_year": {"name": "snowflake__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.snowflake__iso_week_of_year", "macro_sql": "\n\n{%- macro snowflake__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'weekiso') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.869141, "supported_languages": null}, "macro.dbt_date.postgres__iso_week_of_year": {"name": "postgres__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.postgres__iso_week_of_year", "macro_sql": "\n\n{%- macro postgres__iso_week_of_year(date) -%}\n-- postgresql week is isoweek, the first week of a year containing January 4 of that year.\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8692842, "supported_languages": null}, "macro.dbt_date.duckdb__iso_week_of_year": {"name": "duckdb__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.duckdb__iso_week_of_year", "macro_sql": "\n\n{%- macro duckdb__iso_week_of_year(date) -%}\n{{ return(dbt_date.postgres__iso_week_of_year(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8694222, "supported_languages": null}, "macro.dbt_date.spark__iso_week_of_year": {"name": "spark__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.spark__iso_week_of_year", "macro_sql": "\n\n{%- macro spark__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.869556, "supported_languages": null}, "macro.dbt_date.trino__iso_week_of_year": {"name": "trino__iso_week_of_year", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/iso_week_of_year.sql", "original_file_path": "macros/calendar_date/iso_week_of_year.sql", "unique_id": "macro.dbt_date.trino__iso_week_of_year", "macro_sql": "\n\n{%- macro trino__iso_week_of_year(date) -%}\n{{ dbt_date._iso_week_of_year(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date._iso_week_of_year"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.869684, "supported_languages": null}, "macro.dbt_date.week_end": {"name": "week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.week_end", "macro_sql": "{%- macro week_end(date=None, tz=None) -%}\n{%-set dt = date if date else dbt_date.today(tz) -%}\n{{ adapter.dispatch('week_end', 'dbt_date') (dt) }}\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_date.today", "macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.870166, "supported_languages": null}, "macro.dbt_date.default__week_end": {"name": "default__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.default__week_end", "macro_sql": "{%- macro default__week_end(date) -%}\n{{ last_day(date, 'week') }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt.last_day"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.870293, "supported_languages": null}, "macro.dbt_date.snowflake__week_end": {"name": "snowflake__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.snowflake__week_end", "macro_sql": "\n\n{%- macro snowflake__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8704698, "supported_languages": null}, "macro.dbt_date.postgres__week_end": {"name": "postgres__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.postgres__week_end", "macro_sql": "\n\n{%- macro postgres__week_end(date) -%}\n{%- set dt = dbt_date.week_start(date) -%}\n{{ dbt_date.n_days_away(6, dt) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.week_start", "macro.dbt_date.n_days_away"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.870655, "supported_languages": null}, "macro.dbt_date.duckdb__week_end": {"name": "duckdb__week_end", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/week_end.sql", "original_file_path": "macros/calendar_date/week_end.sql", "unique_id": "macro.dbt_date.duckdb__week_end", "macro_sql": "\n\n{%- macro duckdb__week_end(date) -%}\n{{ return(dbt_date.postgres__week_end(date)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_date.postgres__week_end"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.870789, "supported_languages": null}, "macro.dbt_date.next_month_number": {"name": "next_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/next_month_number.sql", "original_file_path": "macros/calendar_date/next_month_number.sql", "unique_id": "macro.dbt_date.next_month_number", "macro_sql": "{%- macro next_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.next_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.next_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.870989, "supported_languages": null}, "macro.dbt_date.last_month_number": {"name": "last_month_number", "resource_type": "macro", "package_name": "dbt_date", "path": "macros/calendar_date/last_month_number.sql", "original_file_path": "macros/calendar_date/last_month_number.sql", "unique_id": "macro.dbt_date.last_month_number", "macro_sql": "{%- macro last_month_number(tz=None) -%}\n{{ dbt_date.date_part('month', dbt_date.last_month(tz)) }}\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_date.date_part", "macro.dbt_date.last_month"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.871181, "supported_languages": null}, "macro.fivetran_utils.enabled_vars": {"name": "enabled_vars", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars.sql", "original_file_path": "macros/enabled_vars.sql", "unique_id": "macro.fivetran_utils.enabled_vars", "macro_sql": "{% macro enabled_vars(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, True) == False %}\n {{ return(False) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(True) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.871549, "supported_languages": null}, "macro.fivetran_utils.percentile": {"name": "percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.percentile", "macro_sql": "{% macro percentile(percentile_field, partition_field, percent) -%}\n\n{{ adapter.dispatch('percentile', 'fivetran_utils') (percentile_field, partition_field, percent) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__percentile"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.872406, "supported_languages": null}, "macro.fivetran_utils.default__percentile": {"name": "default__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.default__percentile", "macro_sql": "{% macro default__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.872562, "supported_languages": null}, "macro.fivetran_utils.redshift__percentile": {"name": "redshift__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.redshift__percentile", "macro_sql": "{% macro redshift__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n over ( partition by {{ partition_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.872719, "supported_languages": null}, "macro.fivetran_utils.bigquery__percentile": {"name": "bigquery__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.bigquery__percentile", "macro_sql": "{% macro bigquery__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.872868, "supported_languages": null}, "macro.fivetran_utils.postgres__percentile": {"name": "postgres__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.postgres__percentile", "macro_sql": "{% macro postgres__percentile(percentile_field, partition_field, percent) %}\n\n percentile_cont( \n {{ percent }} )\n within group ( order by {{ percentile_field }} )\n /* have to group by partition field */\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.873003, "supported_languages": null}, "macro.fivetran_utils.spark__percentile": {"name": "spark__percentile", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/percentile.sql", "original_file_path": "macros/percentile.sql", "unique_id": "macro.fivetran_utils.spark__percentile", "macro_sql": "{% macro spark__percentile(percentile_field, partition_field, percent) %}\n\n percentile( \n {{ percentile_field }}, \n {{ percent }}) \n over (partition by {{ partition_field }} \n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8731542, "supported_languages": null}, "macro.fivetran_utils.pivot_json_extract": {"name": "pivot_json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/pivot_json_extract.sql", "original_file_path": "macros/pivot_json_extract.sql", "unique_id": "macro.fivetran_utils.pivot_json_extract", "macro_sql": "{% macro pivot_json_extract(string, list_of_properties) %}\n\n{%- for property in list_of_properties -%}\n{%- if property is mapping -%}\nreplace( {{ fivetran_utils.json_extract(string, property.name) }}, '\"', '') as {{ property.alias if property.alias else property.name | replace(' ', '_') | replace('.', '_') | lower }}\n\n{%- else -%}\nreplace( {{ fivetran_utils.json_extract(string, property) }}, '\"', '') as {{ property | replace(' ', '_') | lower }}\n\n{%- endif -%}\n{%- if not loop.last -%},{%- endif %}\n{% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8739681, "supported_languages": null}, "macro.fivetran_utils.persist_pass_through_columns": {"name": "persist_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/persist_pass_through_columns.sql", "original_file_path": "macros/persist_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.persist_pass_through_columns", "macro_sql": "{% macro persist_pass_through_columns(pass_through_variable, identifier=none, transform='') %}\n\n{% if var(pass_through_variable, none) %}\n {% for field in var(pass_through_variable) %}\n , {{ transform ~ '(' ~ (identifier ~ '.' if identifier else '') ~ (field.alias if field.alias else field.name) ~ ')' }} as {{ field.alias if field.alias else field.name }}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.874706, "supported_languages": null}, "macro.fivetran_utils.json_parse": {"name": "json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.json_parse", "macro_sql": "{% macro json_parse(string, string_path) -%}\n\n{{ adapter.dispatch('json_parse', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__json_parse"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.876043, "supported_languages": null}, "macro.fivetran_utils.default__json_parse": {"name": "default__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.default__json_parse", "macro_sql": "{% macro default__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.876339, "supported_languages": null}, "macro.fivetran_utils.redshift__json_parse": {"name": "redshift__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.redshift__json_parse", "macro_sql": "{% macro redshift__json_parse(string, string_path) %}\n\n json_extract_path_text({{string}}, {%- for s in string_path -%}'{{ s }}'{%- if not loop.last -%},{%- endif -%}{%- endfor -%} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.876597, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_parse": {"name": "bigquery__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.bigquery__json_parse", "macro_sql": "{% macro bigquery__json_parse(string, string_path) %}\n\n \n json_extract_scalar({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8768709, "supported_languages": null}, "macro.fivetran_utils.postgres__json_parse": {"name": "postgres__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.postgres__json_parse", "macro_sql": "{% macro postgres__json_parse(string, string_path) %}\n\n {{string}}::json #>> '{ {%- for s in string_path -%}{{ s }}{%- if not loop.last -%},{%- endif -%}{%- endfor -%} }'\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8771122, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_parse": {"name": "snowflake__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.snowflake__json_parse", "macro_sql": "{% macro snowflake__json_parse(string, string_path) %}\n\n parse_json( {{string}} ) {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.877366, "supported_languages": null}, "macro.fivetran_utils.spark__json_parse": {"name": "spark__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.spark__json_parse", "macro_sql": "{% macro spark__json_parse(string, string_path) %}\n\n {{string}} : {%- for s in string_path -%}{% if s is number %}[{{ s }}]{% else %}['{{ s }}']{% endif %}{%- endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8776221, "supported_languages": null}, "macro.fivetran_utils.sqlserver__json_parse": {"name": "sqlserver__json_parse", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_parse.sql", "original_file_path": "macros/json_parse.sql", "unique_id": "macro.fivetran_utils.sqlserver__json_parse", "macro_sql": "{% macro sqlserver__json_parse(string, string_path) %}\n\n json_value({{string}}, '$.{%- for s in string_path -%}{{ s }}{%- if not loop.last -%}.{%- endif -%}{%- endfor -%} ')\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.877856, "supported_languages": null}, "macro.fivetran_utils.max_bool": {"name": "max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.max_bool", "macro_sql": "{% macro max_bool(boolean_field) -%}\n\n{{ adapter.dispatch('max_bool', 'fivetran_utils') (boolean_field) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__max_bool"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.878196, "supported_languages": null}, "macro.fivetran_utils.default__max_bool": {"name": "default__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.default__max_bool", "macro_sql": "{% macro default__max_bool(boolean_field) %}\n\n bool_or( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8783, "supported_languages": null}, "macro.fivetran_utils.snowflake__max_bool": {"name": "snowflake__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.snowflake__max_bool", "macro_sql": "{% macro snowflake__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8784, "supported_languages": null}, "macro.fivetran_utils.bigquery__max_bool": {"name": "bigquery__max_bool", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/max_bool.sql", "original_file_path": "macros/max_bool.sql", "unique_id": "macro.fivetran_utils.bigquery__max_bool", "macro_sql": "{% macro bigquery__max_bool(boolean_field) %}\n\n max( {{ boolean_field }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.878498, "supported_languages": null}, "macro.fivetran_utils.calculated_fields": {"name": "calculated_fields", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/calculated_fields.sql", "original_file_path": "macros/calculated_fields.sql", "unique_id": "macro.fivetran_utils.calculated_fields", "macro_sql": "{% macro calculated_fields(variable) -%}\n\n{% if var(variable, none) %}\n {% for field in var(variable) %}\n , {{ field.transform_sql }} as {{ field.name }} \n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.878865, "supported_languages": null}, "macro.fivetran_utils.drop_schemas_automation": {"name": "drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.drop_schemas_automation", "macro_sql": "{% macro drop_schemas_automation(drop_target_schema=true) %}\n {{ return(adapter.dispatch('drop_schemas_automation', 'fivetran_utils')(drop_target_schema)) }}\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__drop_schemas_automation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8795211, "supported_languages": null}, "macro.fivetran_utils.default__drop_schemas_automation": {"name": "default__drop_schemas_automation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/drop_schemas_automation.sql", "original_file_path": "macros/drop_schemas_automation.sql", "unique_id": "macro.fivetran_utils.default__drop_schemas_automation", "macro_sql": "{% macro default__drop_schemas_automation(drop_target_schema=true) %}\n\n{% set fetch_list_sql %}\n {% if target.type not in ('databricks', 'spark') %}\n select schema_name\n from \n {{ wrap_in_quotes(target.database) }}.INFORMATION_SCHEMA.SCHEMATA\n where lower(schema_name) like '{{ target.schema | lower }}{%- if not drop_target_schema -%}_{%- endif -%}%'\n {% else %}\n SHOW SCHEMAS LIKE '{{ target.schema }}{%- if not drop_target_schema -%}_{%- endif -%}*'\n {% endif %}\n{% endset %}\n\n{% set results = run_query(fetch_list_sql) %}\n\n{% if execute %}\n {% set results_list = results.columns[0].values() %}\n{% else %}\n {% set results_list = [] %}\n{% endif %}\n\n{% for schema_to_drop in results_list %}\n {% do adapter.drop_schema(api.Relation.create(database=target.database, schema=schema_to_drop)) %}\n {{ print('Schema ' ~ schema_to_drop ~ ' successfully dropped from the ' ~ target.database ~ ' database.\\n')}}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.wrap_in_quotes", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8806489, "supported_languages": null}, "macro.fivetran_utils.seed_data_helper": {"name": "seed_data_helper", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/seed_data_helper.sql", "original_file_path": "macros/seed_data_helper.sql", "unique_id": "macro.fivetran_utils.seed_data_helper", "macro_sql": "{% macro seed_data_helper(seed_name, warehouses) %}\n\n{% if target.type in warehouses %}\n {% for w in warehouses %}\n {% if target.type == w %}\n {{ return(ref(seed_name ~ \"_\" ~ w ~ \"\")) }}\n {% endif %}\n {% endfor %}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8811939, "supported_languages": null}, "macro.fivetran_utils.fill_pass_through_columns": {"name": "fill_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_pass_through_columns.sql", "original_file_path": "macros/fill_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.fill_pass_through_columns", "macro_sql": "{% macro fill_pass_through_columns(pass_through_variable) %}\n\n{% if var(pass_through_variable) %}\n {% for field in var(pass_through_variable) %}\n {% if field is mapping %}\n {% if field.transform_sql %}\n , {{ field.transform_sql }} as {{ field.alias if field.alias else field.name }}\n {% else %}\n , {{ field.alias if field.alias else field.name }}\n {% endif %}\n {% else %}\n , {{ field }}\n {% endif %}\n {% endfor %}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8819041, "supported_languages": null}, "macro.fivetran_utils.string_agg": {"name": "string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.string_agg", "macro_sql": "{% macro string_agg(field_to_agg, delimiter) -%}\n\n{{ adapter.dispatch('string_agg', 'fivetran_utils') (field_to_agg, delimiter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__string_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8823302, "supported_languages": null}, "macro.fivetran_utils.default__string_agg": {"name": "default__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.default__string_agg", "macro_sql": "{% macro default__string_agg(field_to_agg, delimiter) %}\n string_agg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.882448, "supported_languages": null}, "macro.fivetran_utils.snowflake__string_agg": {"name": "snowflake__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.snowflake__string_agg", "macro_sql": "{% macro snowflake__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.882569, "supported_languages": null}, "macro.fivetran_utils.redshift__string_agg": {"name": "redshift__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.redshift__string_agg", "macro_sql": "{% macro redshift__string_agg(field_to_agg, delimiter) %}\n listagg({{ field_to_agg }}, {{ delimiter }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.882689, "supported_languages": null}, "macro.fivetran_utils.spark__string_agg": {"name": "spark__string_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/string_agg.sql", "original_file_path": "macros/string_agg.sql", "unique_id": "macro.fivetran_utils.spark__string_agg", "macro_sql": "{% macro spark__string_agg(field_to_agg, delimiter) %}\n -- collect set will remove duplicates\n replace(replace(replace(cast( collect_set({{ field_to_agg }}) as string), '[', ''), ']', ''), ', ', {{ delimiter }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.882812, "supported_languages": null}, "macro.fivetran_utils.timestamp_diff": {"name": "timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.timestamp_diff", "macro_sql": "{% macro timestamp_diff(first_date, second_date, datepart) %}\n {{ adapter.dispatch('timestamp_diff', 'fivetran_utils')(first_date, second_date, datepart) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__timestamp_diff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8855739, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_diff": {"name": "default__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.default__timestamp_diff", "macro_sql": "{% macro default__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.885744, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_diff": {"name": "redshift__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_diff", "macro_sql": "{% macro redshift__timestamp_diff(first_date, second_date, datepart) %}\n\n datediff(\n {{ datepart }},\n {{ first_date }},\n {{ second_date }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8859022, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_diff": {"name": "bigquery__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_diff", "macro_sql": "{% macro bigquery__timestamp_diff(first_date, second_date, datepart) %}\n\n timestamp_diff(\n {{second_date}},\n {{first_date}},\n {{datepart}}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8860931, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_diff": {"name": "postgres__timestamp_diff", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_diff.sql", "original_file_path": "macros/timestamp_diff.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_diff", "macro_sql": "{% macro postgres__timestamp_diff(first_date, second_date, datepart) %}\n\n {% if datepart == 'year' %}\n (date_part('year', ({{second_date}})::date) - date_part('year', ({{first_date}})::date))\n {% elif datepart == 'quarter' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 4 + date_part('quarter', ({{second_date}})::date) - date_part('quarter', ({{first_date}})::date))\n {% elif datepart == 'month' %}\n ({{ dbt.datediff(first_date, second_date, 'year') }} * 12 + date_part('month', ({{second_date}})::date) - date_part('month', ({{first_date}})::date))\n {% elif datepart == 'day' %}\n (({{second_date}})::date - ({{first_date}})::date)\n {% elif datepart == 'week' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} / 7 + case\n when date_part('dow', ({{first_date}})::timestamp) <= date_part('dow', ({{second_date}})::timestamp) then\n case when {{first_date}} <= {{second_date}} then 0 else -1 end\n else\n case when {{first_date}} <= {{second_date}} then 1 else 0 end\n end)\n {% elif datepart == 'hour' %}\n ({{ dbt.datediff(first_date, second_date, 'day') }} * 24 + date_part('hour', ({{second_date}})::timestamp) - date_part('hour', ({{first_date}})::timestamp))\n {% elif datepart == 'minute' %}\n ({{ dbt.datediff(first_date, second_date, 'hour') }} * 60 + date_part('minute', ({{second_date}})::timestamp) - date_part('minute', ({{first_date}})::timestamp))\n {% elif datepart == 'second' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60 + floor(date_part('second', ({{second_date}})::timestamp)) - floor(date_part('second', ({{first_date}})::timestamp)))\n {% elif datepart == 'millisecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000 + floor(date_part('millisecond', ({{second_date}})::timestamp)) - floor(date_part('millisecond', ({{first_date}})::timestamp)))\n {% elif datepart == 'microsecond' %}\n ({{ dbt.datediff(first_date, second_date, 'minute') }} * 60000000 + floor(date_part('microsecond', ({{second_date}})::timestamp)) - floor(date_part('microsecond', ({{first_date}})::timestamp)))\n {% else %}\n {{ exceptions.raise_compiler_error(\"Unsupported datepart for macro datediff in postgres: {!r}\".format(datepart)) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.887913, "supported_languages": null}, "macro.fivetran_utils.try_cast": {"name": "try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.try_cast", "macro_sql": "{% macro try_cast(field, type) %}\n {{ adapter.dispatch('try_cast', 'fivetran_utils') (field, type) }}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__try_cast"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.888839, "supported_languages": null}, "macro.fivetran_utils.default__try_cast": {"name": "default__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.default__try_cast", "macro_sql": "{% macro default__try_cast(field, type) %}\n {# most databases don't support this function yet\n so we just need to use cast #}\n cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8889668, "supported_languages": null}, "macro.fivetran_utils.redshift__try_cast": {"name": "redshift__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.redshift__try_cast", "macro_sql": "{% macro redshift__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when trim({{field}}) ~ '^(0|[1-9][0-9]*)$' then trim({{field}})\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.889212, "supported_languages": null}, "macro.fivetran_utils.postgres__try_cast": {"name": "postgres__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.postgres__try_cast", "macro_sql": "{% macro postgres__try_cast(field, type) %}\n{%- if type == 'numeric' -%}\n\n case\n when replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar)) ~ '^(0|[1-9][0-9]*)$' \n then replace(cast({{field}} as varchar),cast(' ' as varchar),cast('' as varchar))\n else null\n end::{{type}}\n\n{% else %}\n {{ exceptions.raise_compiler_error(\n \"non-numeric datatypes are not currently supported\") }}\n\n{% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.889462, "supported_languages": null}, "macro.fivetran_utils.snowflake__try_cast": {"name": "snowflake__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.snowflake__try_cast", "macro_sql": "{% macro snowflake__try_cast(field, type) %}\n try_cast(cast({{field}} as varchar) as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8895779, "supported_languages": null}, "macro.fivetran_utils.bigquery__try_cast": {"name": "bigquery__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.bigquery__try_cast", "macro_sql": "{% macro bigquery__try_cast(field, type) %}\n safe_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.889688, "supported_languages": null}, "macro.fivetran_utils.spark__try_cast": {"name": "spark__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.spark__try_cast", "macro_sql": "{% macro spark__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.889796, "supported_languages": null}, "macro.fivetran_utils.sqlserver__try_cast": {"name": "sqlserver__try_cast", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/try_cast.sql", "original_file_path": "macros/try_cast.sql", "unique_id": "macro.fivetran_utils.sqlserver__try_cast", "macro_sql": "{% macro sqlserver__try_cast(field, type) %}\n try_cast({{field}} as {{type}})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.889904, "supported_languages": null}, "macro.fivetran_utils.source_relation": {"name": "source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.source_relation", "macro_sql": "{% macro source_relation(union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('source_relation', 'fivetran_utils') (union_schema_variable, union_database_variable) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__source_relation"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8903308, "supported_languages": null}, "macro.fivetran_utils.default__source_relation": {"name": "default__source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/source_relation.sql", "original_file_path": "macros/source_relation.sql", "unique_id": "macro.fivetran_utils.default__source_relation", "macro_sql": "{% macro default__source_relation(union_schema_variable, union_database_variable) %}\n\n{% if var(union_schema_variable, none) %}\n, case\n {% for schema in var(union_schema_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%.{{ schema|lower }}.%' then '{{ schema|lower }}'\n {% endfor %}\n end as source_relation\n{% elif var(union_database_variable, none) %}\n, case\n {% for database in var(union_database_variable) %}\n when lower(replace(replace(_dbt_source_relation,'\"',''),'`','')) like '%{{ database|lower }}.%' then '{{ database|lower }}'\n {% endfor %}\n end as source_relation\n{% else %}\n, cast('' as {{ dbt.type_string() }}) as source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.890841, "supported_languages": null}, "macro.fivetran_utils.first_value": {"name": "first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.first_value", "macro_sql": "{% macro first_value(first_value_field, partition_field, order_by_field, order=\"asc\") -%}\n\n{{ adapter.dispatch('first_value', 'fivetran_utils') (first_value_field, partition_field, order_by_field, order) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__first_value"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.891283, "supported_languages": null}, "macro.fivetran_utils.default__first_value": {"name": "default__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.default__first_value", "macro_sql": "{% macro default__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.891465, "supported_languages": null}, "macro.fivetran_utils.redshift__first_value": {"name": "redshift__first_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/first_value.sql", "original_file_path": "macros/first_value.sql", "unique_id": "macro.fivetran_utils.redshift__first_value", "macro_sql": "{% macro redshift__first_value(first_value_field, partition_field, order_by_field, order=\"asc\") %}\n\n first_value( {{ first_value_field }} ignore nulls ) over (partition by {{ partition_field }} order by {{ order_by_field }} {{ order }} , {{ partition_field }} rows unbounded preceding )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.8916612, "supported_languages": null}, "macro.fivetran_utils.add_dbt_source_relation": {"name": "add_dbt_source_relation", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_dbt_source_relation.sql", "original_file_path": "macros/add_dbt_source_relation.sql", "unique_id": "macro.fivetran_utils.add_dbt_source_relation", "macro_sql": "{% macro add_dbt_source_relation() %}\n\n{% if var('union_schemas', none) or var('union_databases', none) %}\n, _dbt_source_relation\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.891891, "supported_languages": null}, "macro.fivetran_utils.add_pass_through_columns": {"name": "add_pass_through_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/add_pass_through_columns.sql", "original_file_path": "macros/add_pass_through_columns.sql", "unique_id": "macro.fivetran_utils.add_pass_through_columns", "macro_sql": "{% macro add_pass_through_columns(base_columns, pass_through_var) %}\n\n {% if pass_through_var %}\n\n {% for column in pass_through_var %}\n\n {% if column is mapping %}\n\n {% if column.alias %}\n\n {% do base_columns.append({ \"name\": column.name, \"alias\": column.alias, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column.name, \"datatype\": column.datatype if column.datatype else dbt.type_string()}) %}\n \n {% endif %}\n\n {% else %}\n\n {% do base_columns.append({ \"name\": column, \"datatype\": dbt.type_string()}) %}\n\n {% endif %}\n\n {% endfor %}\n\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.89291, "supported_languages": null}, "macro.fivetran_utils.union_relations": {"name": "union_relations", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_relations", "macro_sql": "{%- macro union_relations(relations, aliases=none, column_override=none, include=[], exclude=[], source_column_name=none) -%}\n\n {%- if exclude and include -%}\n {{ exceptions.raise_compiler_error(\"Both an exclude and include list were provided to the `union` macro. Only one is allowed\") }}\n {%- endif -%}\n\n {#-- Prevent querying of db in parsing mode. This works because this macro does not create any new refs. -#}\n {%- if not execute %}\n {{ return('') }}\n {% endif -%}\n\n {%- set column_override = column_override if column_override is not none else {} -%}\n {%- set source_column_name = source_column_name if source_column_name is not none else '_dbt_source_relation' -%}\n\n {%- set relation_columns = {} -%}\n {%- set column_superset = {} -%}\n\n {%- for relation in relations -%}\n\n {%- do relation_columns.update({relation: []}) -%}\n\n {%- do dbt_utils._is_relation(relation, 'union_relations') -%}\n {%- set cols = adapter.get_columns_in_relation(relation) -%}\n {%- for col in cols -%}\n\n {#- If an exclude list was provided and the column is in the list, do nothing -#}\n {%- if exclude and col.column in exclude -%}\n\n {#- If an include list was provided and the column is not in the list, do nothing -#}\n {%- elif include and col.column not in include -%}\n\n {#- Otherwise add the column to the column superset -#}\n {%- else -%}\n\n {#- update the list of columns in this relation -#}\n {%- do relation_columns[relation].append(col.column) -%}\n\n {%- if col.column in column_superset -%}\n\n {%- set stored = column_superset[col.column] -%}\n {%- if col.is_string() and stored.is_string() and col.string_size() > stored.string_size() -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif %}\n\n {%- else -%}\n\n {%- do column_superset.update({col.column: col}) -%}\n\n {%- endif -%}\n\n {%- endif -%}\n\n {%- endfor -%}\n {%- endfor -%}\n\n {%- set ordered_column_names = column_superset.keys() -%}\n\n {%- for relation in relations %}\n\n (\n select\n\n cast({{ dbt.string_literal(relation) }} as {{ dbt.type_string() }}) as {{ source_column_name }},\n {% for col_name in ordered_column_names -%}\n\n {%- set col = column_superset[col_name] %}\n {%- set col_type = column_override.get(col.column, col.data_type) %}\n {%- set col_name = adapter.quote(col_name) if col_name in relation_columns[relation] else 'null' %}\n cast({{ col_name }} as {{ col_type }}) as {{ col.quoted }} {% if not loop.last %},{% endif -%}\n\n {%- endfor %}\n\n from {{ aliases[loop.index0] if aliases else relation }}\n )\n\n {% if not loop.last -%}\n union all\n {% endif -%}\n\n {%- endfor -%}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.dbt_utils._is_relation", "macro.dbt.string_literal", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.89676, "supported_languages": null}, "macro.fivetran_utils.union_tables": {"name": "union_tables", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_relations.sql", "original_file_path": "macros/union_relations.sql", "unique_id": "macro.fivetran_utils.union_tables", "macro_sql": "{%- macro union_tables(tables, column_override=none, include=[], exclude=[], source_column_name='_dbt_source_table') -%}\n\n {%- do exceptions.warn(\"Warning: the `union_tables` macro is no longer supported and will be deprecated in a future release of dbt-utils. Use the `union_relations` macro instead\") -%}\n\n {{ return(dbt_utils.union_relations(tables, column_override, include, exclude, source_column_name)) }}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.897093, "supported_languages": null}, "macro.fivetran_utils.snowflake_seed_data": {"name": "snowflake_seed_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/snowflake_seed_data.sql", "original_file_path": "macros/snowflake_seed_data.sql", "unique_id": "macro.fivetran_utils.snowflake_seed_data", "macro_sql": "{% macro snowflake_seed_data(seed_name) %}\n\n{% if target.type == 'snowflake' %}\n{{ return(ref(seed_name ~ '_snowflake')) }}\n{% else %}\n{{ return(ref(seed_name)) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.897424, "supported_languages": null}, "macro.fivetran_utils.fill_staging_columns": {"name": "fill_staging_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.fill_staging_columns", "macro_sql": "{% macro fill_staging_columns(source_columns, staging_columns) -%}\n\n{%- set source_column_names = source_columns|map(attribute='name')|map('lower')|list -%}\n\n{%- for column in staging_columns %}\n {% if column.name|lower in source_column_names -%}\n {{ fivetran_utils.quote_column(column) }} as \n {%- if 'alias' in column %} {{ column.alias }} {% else %} {{ fivetran_utils.quote_column(column) }} {%- endif -%}\n {%- else -%}\n cast(null as {{ column.datatype }})\n {%- if 'alias' in column %} as {{ column.alias }} {% else %} as {{ fivetran_utils.quote_column(column) }} {% endif -%}\n {%- endif -%}\n {%- if not loop.last -%} , {% endif -%}\n{% endfor %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.quote_column"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.89875, "supported_languages": null}, "macro.fivetran_utils.quote_column": {"name": "quote_column", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fill_staging_columns.sql", "original_file_path": "macros/fill_staging_columns.sql", "unique_id": "macro.fivetran_utils.quote_column", "macro_sql": "{% macro quote_column(column) %}\n {% if 'quote' in column %}\n {% if column.quote %}\n {% if target.type in ('bigquery', 'spark', 'databricks') %}\n `{{ column.name }}`\n {% elif target.type == 'snowflake' %}\n \"{{ column.name | upper }}\"\n {% else %}\n \"{{ column.name }}\"\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n {% else %}\n {{ column.name }}\n {% endif %}\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.899242, "supported_languages": null}, "macro.fivetran_utils.json_extract": {"name": "json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.json_extract", "macro_sql": "{% macro json_extract(string, string_path) -%}\n\n{{ adapter.dispatch('json_extract', 'fivetran_utils') (string, string_path) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__json_extract"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.89974, "supported_languages": null}, "macro.fivetran_utils.default__json_extract": {"name": "default__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.default__json_extract", "macro_sql": "{% macro default__json_extract(string, string_path) %}\n\n json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} )\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.89988, "supported_languages": null}, "macro.fivetran_utils.snowflake__json_extract": {"name": "snowflake__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.snowflake__json_extract", "macro_sql": "{% macro snowflake__json_extract(string, string_path) %}\n\n json_extract_path_text(try_parse_json( {{string}} ), {{ \"'\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.900016, "supported_languages": null}, "macro.fivetran_utils.redshift__json_extract": {"name": "redshift__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.redshift__json_extract", "macro_sql": "{% macro redshift__json_extract(string, string_path) %}\n\n case when is_valid_json( {{string}} ) then json_extract_path_text({{string}}, {{ \"'\" ~ string_path ~ \"'\" }} ) else null end\n \n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.900165, "supported_languages": null}, "macro.fivetran_utils.bigquery__json_extract": {"name": "bigquery__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.bigquery__json_extract", "macro_sql": "{% macro bigquery__json_extract(string, string_path) %}\n\n json_extract_scalar({{string}}, {{ \"'$.\" ~ string_path ~ \"'\" }} )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.900301, "supported_languages": null}, "macro.fivetran_utils.postgres__json_extract": {"name": "postgres__json_extract", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/json_extract.sql", "original_file_path": "macros/json_extract.sql", "unique_id": "macro.fivetran_utils.postgres__json_extract", "macro_sql": "{% macro postgres__json_extract(string, string_path) %}\n\n {{string}}::json->>{{\"'\" ~ string_path ~ \"'\" }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9004328, "supported_languages": null}, "macro.fivetran_utils.collect_freshness": {"name": "collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.collect_freshness", "macro_sql": "{% macro collect_freshness(source, loaded_at_field, filter) %}\n {{ return(adapter.dispatch('collect_freshness')(source, loaded_at_field, filter))}}\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__collect_freshness"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.90116, "supported_languages": null}, "macro.fivetran_utils.default__collect_freshness": {"name": "default__collect_freshness", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/collect_freshness.sql", "original_file_path": "macros/collect_freshness.sql", "unique_id": "macro.fivetran_utils.default__collect_freshness", "macro_sql": "{% macro default__collect_freshness(source, loaded_at_field, filter) %}\n {% call statement('collect_freshness', fetch_result=True, auto_begin=False) -%}\n\n {%- set enabled_array = [] -%}\n {% for node in graph.sources.values() %}\n {% if node.identifier == source.identifier %}\n {% if (node.meta['is_enabled'] | default(true)) %}\n {%- do enabled_array.append(1) -%}\n {% endif %}\n {% endif %}\n {% endfor %}\n {% set is_enabled = (enabled_array != []) %}\n\n select\n {% if is_enabled %}\n max({{ loaded_at_field }})\n {% else %} \n {{ current_timestamp() }} {% endif %} as max_loaded_at,\n {{ current_timestamp() }} as snapshotted_at\n\n {% if is_enabled %}\n from {{ source }}\n {% if filter %}\n where {{ filter }}\n {% endif %}\n {% endif %}\n\n {% endcall %}\n\n {% if dbt_version.split('.') | map('int') | list >= [1, 5, 0] %}\n {{ return(load_result('collect_freshness')) }}\n {% else %}\n {{ return(load_result('collect_freshness').table) }}\n {% endif %}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.statement", "macro.dbt.current_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.902285, "supported_languages": null}, "macro.fivetran_utils.timestamp_add": {"name": "timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.timestamp_add", "macro_sql": "{% macro timestamp_add(datepart, interval, from_timestamp) -%}\n\n{{ adapter.dispatch('timestamp_add', 'fivetran_utils') (datepart, interval, from_timestamp) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.postgres__timestamp_add"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9029021, "supported_languages": null}, "macro.fivetran_utils.default__timestamp_add": {"name": "default__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.default__timestamp_add", "macro_sql": "{% macro default__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestampadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.903053, "supported_languages": null}, "macro.fivetran_utils.bigquery__timestamp_add": {"name": "bigquery__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.bigquery__timestamp_add", "macro_sql": "{% macro bigquery__timestamp_add(datepart, interval, from_timestamp) %}\n\n timestamp_add({{ from_timestamp }}, interval {{ interval }} {{ datepart }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9031942, "supported_languages": null}, "macro.fivetran_utils.redshift__timestamp_add": {"name": "redshift__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.redshift__timestamp_add", "macro_sql": "{% macro redshift__timestamp_add(datepart, interval, from_timestamp) %}\n\n dateadd(\n {{ datepart }},\n {{ interval }},\n {{ from_timestamp }}\n )\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.90334, "supported_languages": null}, "macro.fivetran_utils.postgres__timestamp_add": {"name": "postgres__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.postgres__timestamp_add", "macro_sql": "{% macro postgres__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ from_timestamp }} + ((interval '1 {{ datepart }}') * ({{ interval }}))\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.903476, "supported_languages": null}, "macro.fivetran_utils.spark__timestamp_add": {"name": "spark__timestamp_add", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/timestamp_add.sql", "original_file_path": "macros/timestamp_add.sql", "unique_id": "macro.fivetran_utils.spark__timestamp_add", "macro_sql": "{% macro spark__timestamp_add(datepart, interval, from_timestamp) %}\n\n {{ dbt.dateadd(datepart, interval, from_timestamp) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.dateadd"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9036338, "supported_languages": null}, "macro.fivetran_utils.ceiling": {"name": "ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.ceiling", "macro_sql": "{% macro ceiling(num) -%}\n\n{{ adapter.dispatch('ceiling', 'fivetran_utils') (num) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__ceiling"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.903855, "supported_languages": null}, "macro.fivetran_utils.default__ceiling": {"name": "default__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.default__ceiling", "macro_sql": "{% macro default__ceiling(num) %}\n ceiling({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.903946, "supported_languages": null}, "macro.fivetran_utils.snowflake__ceiling": {"name": "snowflake__ceiling", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/ceiling.sql", "original_file_path": "macros/ceiling.sql", "unique_id": "macro.fivetran_utils.snowflake__ceiling", "macro_sql": "{% macro snowflake__ceiling(num) %}\n ceil({{ num }})\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.904036, "supported_languages": null}, "macro.fivetran_utils.remove_prefix_from_columns": {"name": "remove_prefix_from_columns", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/remove_prefix_from_columns.sql", "original_file_path": "macros/remove_prefix_from_columns.sql", "unique_id": "macro.fivetran_utils.remove_prefix_from_columns", "macro_sql": "{% macro remove_prefix_from_columns(columns, prefix='', exclude=[]) %}\n\n {%- for col in columns if col.name not in exclude -%}\n {%- if col.name[:prefix|length]|lower == prefix -%}\n {{ col.name }} as {{ col.name[prefix|length:] }}\n {%- else -%}\n {{ col.name }}\n {%- endif -%}\n {%- if not loop.last -%},{%- endif %}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.904756, "supported_languages": null}, "macro.fivetran_utils.fivetran_date_spine": {"name": "fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.fivetran_date_spine", "macro_sql": "{% macro fivetran_date_spine(datepart, start_date, end_date) -%}\n\n{{ return(adapter.dispatch('fivetran_date_spine', 'fivetran_utils') (datepart, start_date, end_date)) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__fivetran_date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.90606, "supported_languages": null}, "macro.fivetran_utils.default__fivetran_date_spine": {"name": "default__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.default__fivetran_date_spine", "macro_sql": "{% macro default__fivetran_date_spine(datepart, start_date, end_date) %}\n\n {{ dbt_utils.date_spine(datepart, start_date, end_date) }}\n \n{% endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.date_spine"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.906241, "supported_languages": null}, "macro.fivetran_utils.sqlserver__fivetran_date_spine": {"name": "sqlserver__fivetran_date_spine", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/fivetran_date_spine.sql", "original_file_path": "macros/fivetran_date_spine.sql", "unique_id": "macro.fivetran_utils.sqlserver__fivetran_date_spine", "macro_sql": "{% macro sqlserver__fivetran_date_spine(datepart, start_date, end_date) -%}\n\n {% set date_spine_query %}\n with\n\n l0 as (\n\n select c\n from (select 1 union all select 1) as d(c)\n\n ),\n l1 as (\n\n select\n 1 as c\n from l0 as a\n cross join l0 as b\n\n ),\n\n l2 as (\n\n select 1 as c\n from l1 as a\n cross join l1 as b\n ),\n\n l3 as (\n\n select 1 as c\n from l2 as a\n cross join l2 as b\n ),\n\n l4 as (\n\n select 1 as c\n from l3 as a\n cross join l3 as b\n ),\n\n l5 as (\n\n select 1 as c\n from l4 as a\n cross join l4 as b\n ),\n\n nums as (\n\n select row_number() over (order by (select null)) as rownum\n from l5\n ),\n\n rawdata as (\n\n select top ({{dbt.datediff(start_date, end_date, datepart)}}) rownum -1 as n\n from nums\n order by rownum\n ),\n\n all_periods as (\n\n select (\n {{\n dbt.dateadd(\n datepart,\n 'n',\n start_date\n )\n }}\n ) as date_{{datepart}}\n from rawdata\n ),\n\n filtered as (\n\n select *\n from all_periods\n where date_{{datepart}} <= {{ end_date }}\n\n )\n\n select * from filtered\n order by 1\n\n {% endset %}\n\n {% set results = run_query(date_spine_query) %}\n\n {% if execute %}\n\n {% set results_list = results.columns[0].values() %}\n \n {% else %}\n\n {% set results_list = [] %}\n\n {% endif %}\n\n {%- for date_field in results_list %}\n select cast('{{ date_field }}' as date) as date_{{datepart}} {{ 'union all ' if not loop.last else '' }}\n {% endfor -%}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.datediff", "macro.dbt.dateadd", "macro.dbt.run_query"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.90704, "supported_languages": null}, "macro.fivetran_utils.union_data": {"name": "union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.union_data", "macro_sql": "{%- macro union_data(table_identifier, database_variable, schema_variable, default_database, default_schema, default_variable, union_schema_variable='union_schemas', union_database_variable='union_databases') -%}\n\n{{ adapter.dispatch('union_data', 'fivetran_utils') (\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.default__union_data"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9103758, "supported_languages": null}, "macro.fivetran_utils.default__union_data": {"name": "default__union_data", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/union_data.sql", "original_file_path": "macros/union_data.sql", "unique_id": "macro.fivetran_utils.default__union_data", "macro_sql": "{%- macro default__union_data(\n table_identifier, \n database_variable, \n schema_variable, \n default_database, \n default_schema, \n default_variable,\n union_schema_variable,\n union_database_variable\n ) -%}\n\n{%- if var(union_schema_variable, none) -%}\n\n {%- set relations = [] -%}\n \n {%- if var(union_schema_variable) is string -%}\n {%- set trimmed = var(union_schema_variable)|trim('[')|trim(']') -%}\n {%- set schemas = trimmed.split(',')|map('trim',\" \")|map('trim','\"')|map('trim',\"'\") -%}\n {%- else -%}\n {%- set schemas = var(union_schema_variable) -%}\n {%- endif -%}\n\n {%- for schema in var(union_schema_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else var(database_variable, default_database),\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else schema,\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n \n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n \n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- elif var(union_database_variable, none) -%}\n\n {%- set relations = [] -%}\n\n {%- for database in var(union_database_variable) -%}\n {%- set relation=adapter.get_relation(\n database=source(schema, table_identifier).database if var('has_defined_sources', false) else database,\n schema=source(schema, table_identifier).schema if var('has_defined_sources', false) else var(schema_variable, default_schema),\n identifier=source(schema, table_identifier).identifier if var('has_defined_sources', false) else table_identifier\n ) -%}\n\n {%- set relation_exists=relation is not none -%}\n\n {%- if relation_exists -%}\n {%- do relations.append(relation) -%}\n {%- endif -%}\n\n {%- endfor -%}\n\n {%- if relations != [] -%}\n {{ dbt_utils.union_relations(relations) }}\n {%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n {%- endif -%}\n\n{%- else -%}\n {% set exception_schemas = {\"linkedin_company_pages\": \"linkedin_pages\", \"instagram_business_pages\": \"instagram_business\"} %}\n {% set relation = namespace(value=\"\") %}\n {% if default_schema in exception_schemas.keys() %}\n {% for corrected_schema_name in exception_schemas.items() %} \n {% if default_schema in corrected_schema_name %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = corrected_schema_name[1] + \"_\" + table_identifier + \"_identifier\" %}\n {%- set relation.value=adapter.get_relation(\n database=source(corrected_schema_name[1], table_identifier).database,\n schema=source(corrected_schema_name[1], table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n {% endfor %}\n {% else %}\n {# In order for this macro to effectively work within upstream integration tests (mainly used by the Fivetran dbt package maintainers), this identifier variable selection is required to use the macro with different identifier names. #}\n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifier\" %}\n {# Unfortunately the Twitter Organic identifiers were misspelled. As such, we will need to account for this in the model. This will be adjusted in the Twitter Organic package, but to ensure backwards compatibility, this needs to be included. #}\n {% if var(identifier_var, none) is none %} \n {% set identifier_var = default_schema + \"_\" + table_identifier + \"_identifer\" %}\n {% endif %}\n {%- set relation.value=adapter.get_relation(\n database=source(default_schema, table_identifier).database,\n schema=source(default_schema, table_identifier).schema,\n identifier=var(identifier_var, table_identifier)\n ) -%}\n {% endif %}\n{%- set table_exists=relation.value is not none -%}\n\n{%- if table_exists -%}\n select * \n from {{ relation.value }}\n{%- else -%}\n {% if execute and not var('fivetran__remove_empty_table_warnings', false) -%}\n {{ exceptions.warn(\"\\n\\nPlease be aware: The \" ~ table_identifier|upper ~ \" table was not found in your \" ~ default_schema|upper ~ \" schema(s). The Fivetran dbt package will create a completely empty \" ~ table_identifier|upper ~ \" staging model as to not break downstream transformations. To turn off these warnings, set the `fivetran__remove_empty_table_warnings` variable to TRUE (see https://github.com/fivetran/dbt_fivetran_utils/tree/releases/v0.4.latest#union_data-source for details).\\n\") }}\n {% endif -%}\n select \n cast(null as {{ dbt.type_string() }}) as _dbt_source_relation\n limit 0\n{%- endif -%}\n{%- endif -%}\n\n{%- endmacro -%}", "depends_on": {"macros": ["macro.dbt_utils.union_relations", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.915116, "supported_languages": null}, "macro.fivetran_utils.dummy_coalesce_value": {"name": "dummy_coalesce_value", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/dummy_coalesce_value.sql", "original_file_path": "macros/dummy_coalesce_value.sql", "unique_id": "macro.fivetran_utils.dummy_coalesce_value", "macro_sql": "{% macro dummy_coalesce_value(column) %}\n\n{% set coalesce_value = {\n 'STRING': \"'DUMMY_STRING'\",\n 'BOOLEAN': 'null',\n 'INT': 999999999,\n 'FLOAT': 999999999.99,\n 'TIMESTAMP': 'cast(\"2099-12-31\" as timestamp)',\n 'DATE': 'cast(\"2099-12-31\" as date)',\n} %}\n\n{% if column.is_float() %}\n{{ return(coalesce_value['FLOAT']) }}\n\n{% elif column.is_numeric() %}\n{{ return(coalesce_value['INT']) }}\n\n{% elif column.is_string() %}\n{{ return(coalesce_value['STRING']) }}\n\n{% elif column.data_type|lower == 'boolean' %}\n{{ return(coalesce_value['BOOLEAN']) }}\n\n{% elif 'timestamp' in column.data_type|lower %}\n{{ return(coalesce_value['TIMESTAMP']) }}\n\n{% elif 'date' in column.data_type|lower %}\n{{ return(coalesce_value['DATE']) }}\n\n{% elif 'int' in column.data_type|lower %}\n{{ return(coalesce_value['INT']) }}\n\n{% endif %}\n\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.916785, "supported_languages": null}, "macro.fivetran_utils.extract_url_parameter": {"name": "extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.extract_url_parameter", "macro_sql": "{% macro extract_url_parameter(field, url_parameter) -%}\n\n{{ adapter.dispatch('extract_url_parameter', 'fivetran_utils') (field, url_parameter) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__extract_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.917155, "supported_languages": null}, "macro.fivetran_utils.default__extract_url_parameter": {"name": "default__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.default__extract_url_parameter", "macro_sql": "{% macro default__extract_url_parameter(field, url_parameter) -%}\n\n{{ dbt_utils.get_url_parameter(field, url_parameter) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.dbt_utils.get_url_parameter"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9173021, "supported_languages": null}, "macro.fivetran_utils.spark__extract_url_parameter": {"name": "spark__extract_url_parameter", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/extract_url_parameter.sql", "original_file_path": "macros/extract_url_parameter.sql", "unique_id": "macro.fivetran_utils.spark__extract_url_parameter", "macro_sql": "{% macro spark__extract_url_parameter(field, url_parameter) -%}\n\n{%- set formatted_url_parameter = \"'\" + url_parameter + \"=([^&]+)'\" -%}\nnullif(regexp_extract({{ field }}, {{ formatted_url_parameter }}, 1), '')\n\n{%- endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.917494, "supported_languages": null}, "macro.fivetran_utils.wrap_in_quotes": {"name": "wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.wrap_in_quotes", "macro_sql": "{%- macro wrap_in_quotes(object_to_quote) -%}\n\n{{ return(adapter.dispatch('wrap_in_quotes', 'fivetran_utils')(object_to_quote)) }}\n\n{%- endmacro -%}\n\n", "depends_on": {"macros": ["macro.fivetran_utils.postgres__wrap_in_quotes"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9178188, "supported_languages": null}, "macro.fivetran_utils.default__wrap_in_quotes": {"name": "default__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.default__wrap_in_quotes", "macro_sql": "{%- macro default__wrap_in_quotes(object_to_quote) -%}\n{# bigquery, spark, databricks #}\n `{{ object_to_quote }}`\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.917922, "supported_languages": null}, "macro.fivetran_utils.snowflake__wrap_in_quotes": {"name": "snowflake__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.snowflake__wrap_in_quotes", "macro_sql": "{%- macro snowflake__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote | upper }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9180238, "supported_languages": null}, "macro.fivetran_utils.redshift__wrap_in_quotes": {"name": "redshift__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.redshift__wrap_in_quotes", "macro_sql": "{%- macro redshift__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}\n\n", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.918117, "supported_languages": null}, "macro.fivetran_utils.postgres__wrap_in_quotes": {"name": "postgres__wrap_in_quotes", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/wrap_in_quotes.sql", "original_file_path": "macros/wrap_in_quotes.sql", "unique_id": "macro.fivetran_utils.postgres__wrap_in_quotes", "macro_sql": "{%- macro postgres__wrap_in_quotes(object_to_quote) -%}\n \"{{ object_to_quote }}\"\n{%- endmacro -%}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.918208, "supported_languages": null}, "macro.fivetran_utils.array_agg": {"name": "array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.array_agg", "macro_sql": "{% macro array_agg(field_to_agg) -%}\n\n{{ adapter.dispatch('array_agg', 'fivetran_utils') (field_to_agg) }}\n\n{%- endmacro %}", "depends_on": {"macros": ["macro.fivetran_utils.default__array_agg"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.918446, "supported_languages": null}, "macro.fivetran_utils.default__array_agg": {"name": "default__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.default__array_agg", "macro_sql": "{% macro default__array_agg(field_to_agg) %}\n array_agg({{ field_to_agg }})\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.918541, "supported_languages": null}, "macro.fivetran_utils.redshift__array_agg": {"name": "redshift__array_agg", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/array_agg.sql", "original_file_path": "macros/array_agg.sql", "unique_id": "macro.fivetran_utils.redshift__array_agg", "macro_sql": "{% macro redshift__array_agg(field_to_agg) %}\n listagg({{ field_to_agg }}, ',')\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.918632, "supported_languages": null}, "macro.fivetran_utils.empty_variable_warning": {"name": "empty_variable_warning", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/empty_variable_warning.sql", "original_file_path": "macros/empty_variable_warning.sql", "unique_id": "macro.fivetran_utils.empty_variable_warning", "macro_sql": "{% macro empty_variable_warning(variable, downstream_model) %}\n\n{% if not var(variable) %}\n{{ log(\n \"\"\"\n Warning: You have passed an empty list to the \"\"\" ~ variable ~ \"\"\".\n As a result, you won't see the history of any columns in the \"\"\" ~ downstream_model ~ \"\"\" model.\n \"\"\",\n info=True\n) }}\n{% endif %}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.919002, "supported_languages": null}, "macro.fivetran_utils.enabled_vars_one_true": {"name": "enabled_vars_one_true", "resource_type": "macro", "package_name": "fivetran_utils", "path": "macros/enabled_vars_one_true.sql", "original_file_path": "macros/enabled_vars_one_true.sql", "unique_id": "macro.fivetran_utils.enabled_vars_one_true", "macro_sql": "{% macro enabled_vars_one_true(vars) %}\n\n{% for v in vars %}\n \n {% if var(v, False) == True %}\n {{ return(True) }}\n {% endif %}\n\n{% endfor %}\n\n{{ return(False) }}\n\n{% endmacro %}", "depends_on": {"macros": []}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.919348, "supported_languages": null}, "macro.zendesk_source.get_domain_name_columns": {"name": "get_domain_name_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_domain_name_columns.sql", "original_file_path": "macros/get_domain_name_columns.sql", "unique_id": "macro.zendesk_source.get_domain_name_columns", "macro_sql": "{% macro get_domain_name_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"domain_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"index\", \"datatype\": dbt.type_int()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.919874, "supported_languages": null}, "macro.zendesk_source.get_user_tag_columns": {"name": "get_user_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_tag_columns.sql", "original_file_path": "macros/get_user_tag_columns.sql", "unique_id": "macro.zendesk_source.get_user_tag_columns", "macro_sql": "{% macro get_user_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.920911, "supported_languages": null}, "macro.zendesk_source.get_ticket_form_history_columns": {"name": "get_ticket_form_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_form_history_columns.sql", "original_file_path": "macros/get_ticket_form_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_form_history_columns", "macro_sql": "{% macro get_ticket_form_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"display_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"end_user_visible\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.921831, "supported_languages": null}, "macro.zendesk_source.get_schedule_columns": {"name": "get_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_columns.sql", "original_file_path": "macros/get_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_columns", "macro_sql": "{% macro get_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"end_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"start_time\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_time_utc\", \"datatype\": dbt.type_int()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9228532, "supported_languages": null}, "macro.zendesk_source.get_daylight_time_columns": {"name": "get_daylight_time_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_daylight_time_columns.sql", "original_file_path": "macros/get_daylight_time_columns.sql", "unique_id": "macro.zendesk_source.get_daylight_time_columns", "macro_sql": "{% macro get_daylight_time_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"daylight_end_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"daylight_offset\", \"datatype\": dbt.type_int()},\n {\"name\": \"daylight_start_utc\", \"datatype\": \"datetime\"},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"year\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9235132, "supported_languages": null}, "macro.zendesk_source.get_time_zone_columns": {"name": "get_time_zone_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_time_zone_columns.sql", "original_file_path": "macros/get_time_zone_columns.sql", "unique_id": "macro.zendesk_source.get_time_zone_columns", "macro_sql": "{% macro get_time_zone_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"standard_offset\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9239411, "supported_languages": null}, "macro.zendesk_source.get_ticket_tag_columns": {"name": "get_ticket_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_tag_columns.sql", "original_file_path": "macros/get_ticket_tag_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_tag_columns", "macro_sql": "{% macro get_ticket_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.925077, "supported_languages": null}, "macro.zendesk_source.get_organization_tag_columns": {"name": "get_organization_tag_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_tag_columns.sql", "original_file_path": "macros/get_organization_tag_columns.sql", "unique_id": "macro.zendesk_source.get_organization_tag_columns", "macro_sql": "{% macro get_organization_tag_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{% if target.type == 'redshift' %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% elif target.type == 'snowflake' %}\n {{ columns.append( {\"name\": \"TAG\", \"datatype\": dbt.type_string(), \"quote\": True } ) }}\n\n{% else %}\n {{ columns.append( {\"name\": \"tag\", \"datatype\": dbt.type_string()} ) }}\n\n{% endif %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.926163, "supported_languages": null}, "macro.zendesk_source.get_schedule_holiday_columns": {"name": "get_schedule_holiday_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_schedule_holiday_columns.sql", "original_file_path": "macros/get_schedule_holiday_columns.sql", "unique_id": "macro.zendesk_source.get_schedule_holiday_columns", "macro_sql": "{% macro get_schedule_holiday_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"end_date\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"start_date\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9269671, "supported_languages": null}, "macro.zendesk_source.get_group_columns": {"name": "get_group_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_group_columns.sql", "original_file_path": "macros/get_group_columns.sql", "unique_id": "macro.zendesk_source.get_group_columns", "macro_sql": "{% macro get_group_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.927766, "supported_languages": null}, "macro.zendesk_source.get_user_columns": {"name": "get_user_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_user_columns.sql", "original_file_path": "macros/get_user_columns.sql", "unique_id": "macro.zendesk_source.get_user_columns", "macro_sql": "{% macro get_user_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"alias\", \"datatype\": dbt.type_string()},\n {\"name\": \"authenticity_token\", \"datatype\": dbt.type_int()},\n {\"name\": \"chat_only\", \"datatype\": \"boolean\"},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"email\", \"datatype\": dbt.type_string()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"last_login_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"locale\", \"datatype\": dbt.type_string()},\n {\"name\": \"locale_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"moderator\", \"datatype\": \"boolean\"},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"only_private_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"phone\", \"datatype\": dbt.type_string()},\n {\"name\": \"remote_photo_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"restricted_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"role\", \"datatype\": dbt.type_string()},\n {\"name\": \"shared\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_agent\", \"datatype\": \"boolean\"},\n {\"name\": \"signature\", \"datatype\": dbt.type_int()},\n {\"name\": \"suspended\", \"datatype\": \"boolean\"},\n {\"name\": \"ticket_restriction\", \"datatype\": dbt.type_string()},\n {\"name\": \"time_zone\", \"datatype\": dbt.type_string()},\n {\"name\": \"two_factor_auth_enabled\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"verified\", \"datatype\": \"boolean\"}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__user_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9313622, "supported_languages": null}, "macro.zendesk_source.get_ticket_columns": {"name": "get_ticket_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_columns.sql", "original_file_path": "macros/get_ticket_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_columns", "macro_sql": "{% macro get_ticket_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"allow_channelback\", \"datatype\": \"boolean\"},\n {\"name\": \"assignee_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"brand_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"description\", \"datatype\": dbt.type_string()},\n {\"name\": \"due_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"forum_topic_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"has_incidents\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"is_public\", \"datatype\": \"boolean\"},\n {\"name\": \"merged_ticket_ids\", \"datatype\": dbt.type_string()},\n {\"name\": \"organization_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"priority\", \"datatype\": dbt.type_string()},\n {\"name\": \"problem_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"recipient\", \"datatype\": dbt.type_int()},\n {\"name\": \"requester_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"status\", \"datatype\": dbt.type_string()},\n {\"name\": \"subject\", \"datatype\": dbt.type_string()},\n {\"name\": \"submitter_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_ccs\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_client\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_ip_address\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_json_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_latitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_location\", \"datatype\": dbt.type_string()},\n {\"name\": \"system_longitude\", \"datatype\": dbt.type_float()},\n {\"name\": \"system_machine_generated\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_message_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"system_raw_email_identifier\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_form_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"type\", \"datatype\": dbt.type_string()},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_channel\", \"datatype\": dbt.type_string()},\n {\"name\": \"via_source_from_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_from_title\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_rel\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_address\", \"datatype\": dbt.type_int()},\n {\"name\": \"via_source_to_name\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__ticket_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string", "macro.dbt.type_float", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.9364212, "supported_languages": null}, "macro.zendesk_source.get_ticket_field_history_columns": {"name": "get_ticket_field_history_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_field_history_columns.sql", "original_file_path": "macros/get_ticket_field_history_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_field_history_columns", "macro_sql": "{% macro get_ticket_field_history_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"field_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"updated\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"value\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.937286, "supported_languages": null}, "macro.zendesk_source.get_ticket_schedule_columns": {"name": "get_ticket_schedule_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_schedule_columns.sql", "original_file_path": "macros/get_ticket_schedule_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_schedule_columns", "macro_sql": "{% macro get_ticket_schedule_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"schedule_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.937846, "supported_languages": null}, "macro.zendesk_source.get_organization_columns": {"name": "get_organization_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_organization_columns.sql", "original_file_path": "macros/get_organization_columns.sql", "unique_id": "macro.zendesk_source.get_organization_columns", "macro_sql": "{% macro get_organization_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"created_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"details\", \"datatype\": dbt.type_int()},\n {\"name\": \"external_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"group_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"notes\", \"datatype\": dbt.type_int()},\n {\"name\": \"shared_comments\", \"datatype\": \"boolean\"},\n {\"name\": \"shared_tickets\", \"datatype\": \"boolean\"},\n {\"name\": \"updated_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ fivetran_utils.add_pass_through_columns(columns, var('zendesk__organization_passthrough_columns')) }}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_int", "macro.dbt.type_string", "macro.fivetran_utils.add_pass_through_columns"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.939245, "supported_languages": null}, "macro.zendesk_source.get_ticket_comment_columns": {"name": "get_ticket_comment_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_ticket_comment_columns.sql", "original_file_path": "macros/get_ticket_comment_columns.sql", "unique_id": "macro.zendesk_source.get_ticket_comment_columns", "macro_sql": "{% macro get_ticket_comment_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_string()},\n {\"name\": \"body\", \"datatype\": dbt.type_string()},\n {\"name\": \"call_duration\", \"datatype\": dbt.type_int()},\n {\"name\": \"call_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"created\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"facebook_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"location\", \"datatype\": dbt.type_int()},\n {\"name\": \"public\", \"datatype\": \"boolean\"},\n {\"name\": \"recording_url\", \"datatype\": dbt.type_int()},\n {\"name\": \"started_at\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"ticket_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_status\", \"datatype\": dbt.type_int()},\n {\"name\": \"transcription_text\", \"datatype\": dbt.type_int()},\n {\"name\": \"trusted\", \"datatype\": dbt.type_int()},\n {\"name\": \"tweet\", \"datatype\": \"boolean\"},\n {\"name\": \"user_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"voice_comment\", \"datatype\": \"boolean\"},\n {\"name\": \"voice_comment_transcription_visible\", \"datatype\": dbt.type_int()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_string", "macro.dbt.type_int", "macro.dbt.type_timestamp"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.941198, "supported_languages": null}, "macro.zendesk_source.get_brand_columns": {"name": "get_brand_columns", "resource_type": "macro", "package_name": "zendesk_source", "path": "macros/get_brand_columns.sql", "original_file_path": "macros/get_brand_columns.sql", "unique_id": "macro.zendesk_source.get_brand_columns", "macro_sql": "{% macro get_brand_columns() %}\n\n{% set columns = [\n {\"name\": \"_fivetran_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"_fivetran_synced\", \"datatype\": dbt.type_timestamp()},\n {\"name\": \"active\", \"datatype\": \"boolean\"},\n {\"name\": \"brand_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"has_help_center\", \"datatype\": \"boolean\"},\n {\"name\": \"help_center_state\", \"datatype\": dbt.type_string()},\n {\"name\": \"id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_content_type\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_deleted\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_file_name\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_height\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_id\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_inline\", \"datatype\": \"boolean\"},\n {\"name\": \"logo_mapped_content_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_size\", \"datatype\": dbt.type_int()},\n {\"name\": \"logo_url\", \"datatype\": dbt.type_string()},\n {\"name\": \"logo_width\", \"datatype\": dbt.type_int()},\n {\"name\": \"name\", \"datatype\": dbt.type_string()},\n {\"name\": \"subdomain\", \"datatype\": dbt.type_string()},\n {\"name\": \"url\", \"datatype\": dbt.type_string()}\n] %}\n\n{{ return(columns) }}\n\n{% endmacro %}", "depends_on": {"macros": ["macro.dbt.type_timestamp", "macro.dbt.type_string", "macro.dbt.type_int"]}, "description": "", "meta": {}, "docs": {"show": true, "node_color": null}, "patch_path": null, "arguments": [], "created_at": 1724705296.94326, "supported_languages": null}}, "docs": {"doc.dbt.__overview__": {"name": "__overview__", "resource_type": "doc", "package_name": "dbt", "path": "overview.md", "original_file_path": "docs/overview.md", "unique_id": "doc.dbt.__overview__", "block_contents": "### Welcome!\n\nWelcome to the auto-generated documentation for your dbt project!\n\n### Navigation\n\nYou can use the `Project` and `Database` navigation tabs on the left side of the window to explore the models\nin your project.\n\n#### Project Tab\nThe `Project` tab mirrors the directory structure of your dbt project. In this tab, you can see all of the\nmodels defined in your dbt project, as well as models imported from dbt packages.\n\n#### Database Tab\nThe `Database` tab also exposes your models, but in a format that looks more like a database explorer. This view\nshows relations (tables and views) grouped into database schemas. Note that ephemeral models are _not_ shown\nin this interface, as they do not exist in the database.\n\n### Graph Exploration\nYou can click the blue icon on the bottom-right corner of the page to view the lineage graph of your models.\n\nOn model pages, you'll see the immediate parents and children of the model you're exploring. By clicking the `Expand`\nbutton at the top-right of this lineage pane, you'll be able to see all of the models that are used to build,\nor are built from, the model you're exploring.\n\nOnce expanded, you'll be able to use the `--select` and `--exclude` model selection syntax to filter the\nmodels in the graph. For more information on model selection, check out the [dbt docs](https://docs.getdbt.com/docs/model-selection-syntax).\n\nNote that you can also right-click on models to interactively filter and explore the graph.\n\n---\n\n### More information\n\n- [What is dbt](https://docs.getdbt.com/docs/introduction)?\n- Read the [dbt viewpoint](https://docs.getdbt.com/docs/viewpoint)\n- [Installation](https://docs.getdbt.com/docs/installation)\n- Join the [dbt Community](https://www.getdbt.com/community/) for questions and discussion"}}, "exposures": {}, "metrics": {}, "groups": {}, "selectors": {}, "disabled": {"test.zendesk_integration_tests.consistency_ticket_metrics": [{"database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "consistency_ticket_metrics", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_ticket_metrics.sql", "original_file_path": "tests/consistency/consistency_ticket_metrics.sql", "unique_id": "test.zendesk_integration_tests.consistency_ticket_metrics", "fqn": ["zendesk_integration_tests", "consistency", "consistency_ticket_metrics"], "alias": "consistency_ticket_metrics", "checksum": {"name": "sha256", "checksum": "e630be25d326f99cdad0ebc1d29e71dcd7514aa3e56c999e56d1ed15bc6c10e0"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1724705297.266169, "config_call_dict": {"tags": ["fivetran_validations"], "enabled": false}, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_prod.zendesk__ticket_metrics\n),\n\ndev as (\n select\n ticket_id,\n first_reply_time_business_minutes, \n first_reply_time_calendar_minutes\n from {{ target.schema }}_zendesk_dev.zendesk__ticket_metrics\n),\n\nfinal as (\n select \n prod.ticket_id,\n prod.first_reply_time_business_minutes as prod_first_reply_time_business_minutes,\n dev.first_reply_time_business_minutes as dev_first_reply_time_business_minutes,\n prod.first_reply_time_calendar_minutes as prod_first_reply_time_calendar_minutes,\n dev.first_reply_time_calendar_minutes as dev_first_reply_time_calendar_minutes\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere (abs(prod_first_reply_time_business_minutes - dev_first_reply_time_business_minutes) >= 5\n or abs(prod_first_reply_time_calendar_minutes - dev_first_reply_time_calendar_minutes) >= 5)\n {{ \"and ticket_id not in \" ~ var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_ticket_metrics_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policy_count": [{"database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "consistency_sla_policy_count", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policy_count.sql", "original_file_path": "tests/consistency/consistency_sla_policy_count.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policy_count", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policy_count"], "alias": "consistency_sla_policy_count", "checksum": {"name": "sha256", "checksum": "5921d9bd92d9e16e0164f7283fa93f9f533111f8276c85afda3b8f54905c9694"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1724705297.275284, "config_call_dict": {"tags": ["fivetran_validations"], "enabled": false}, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n group by 1\n),\n\ndev as (\n select\n ticket_id,\n count(*) as total_slas\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n group by 1\n),\n\nfinal as (\n select \n prod.ticket_id,\n dev.ticket_id,\n prod.total_slas as prod_sla_total,\n dev.total_slas as dev_sla_total\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n)\n\nselect *\nfrom final\nwhere prod_sla_total != dev_sla_total", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.consistency_sla_policies": [{"database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "consistency_sla_policies", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "consistency/consistency_sla_policies.sql", "original_file_path": "tests/consistency/consistency_sla_policies.sql", "unique_id": "test.zendesk_integration_tests.consistency_sla_policies", "fqn": ["zendesk_integration_tests", "consistency", "consistency_sla_policies"], "alias": "consistency_sla_policies", "checksum": {"name": "sha256", "checksum": "23d8dea3ba888005d225ac9c8f5a8882c1369d8a3039c4c63c8602d0a049ee37"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1724705297.278259, "config_call_dict": {"tags": ["fivetran_validations"], "enabled": false}, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith prod as (\n select\n ticket_id,\n metric, \n sla_applied_at,\n sla_elapsed_time,\n is_sla_breach\n from {{ target.schema }}_zendesk_prod.zendesk__sla_policies\n),\n\ndev as (\n select\n ticket_id,\n metric, \n sla_applied_at,\n sla_elapsed_time,\n is_sla_breach\n from {{ target.schema }}_zendesk_dev.zendesk__sla_policies\n),\n\nfinal as (\n select \n prod.ticket_id,\n prod.metric,\n prod.sla_applied_at,\n prod.sla_elapsed_time as prod_sla_elapsed_time,\n dev.sla_elapsed_time as dev_sla_elapsed_time,\n prod.is_sla_breach as prod_is_sla_breach,\n dev.is_sla_breach as dev_is_sla_breach\n from prod\n full outer join dev \n on dev.ticket_id = prod.ticket_id\n and dev.metric = prod.metric\n and dev.sla_applied_at = prod.sla_applied_at\n)\n\nselect *\nfrom final\nwhere (abs(prod_sla_elapsed_time - dev_sla_elapsed_time) >= 5\n or prod_is_sla_breach != dev_is_sla_breach)\n {{ \"and prod.ticket_id not in \" ~ var('fivetran_consistency_sla_policies_exclusion_tickets',[]) ~ \"\" if var('fivetran_consistency_sla_policies_exclusion_tickets',[]) }}", "language": "sql", "refs": [], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_first_reply_time_match": [{"database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "sla_first_reply_time_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_first_reply_time_match.sql", "original_file_path": "tests/integrity/sla_first_reply_time_match.sql", "unique_id": "test.zendesk_integration_tests.sla_first_reply_time_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_first_reply_time_match"], "alias": "sla_first_reply_time_match", "checksum": {"name": "sha256", "checksum": "a94e41e1bdbc5f4cb6268590d22f37692a708dd7471344b09e2d29a4edf4ccea"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1724705297.281225, "config_call_dict": {"tags": ["fivetran_validations"], "enabled": false}, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\nwith ticket_metrics as (\n select\n ticket_id,\n first_reply_time_business_minutes\n from {{ ref('zendesk__ticket_metrics') }}\n),\n\nsla_policies as (\n select\n ticket_id,\n sla_elapsed_time\n from {{ ref('zendesk__sla_policies') }}\n where metric = 'first_reply_time'\n and in_business_hours\n),\n\nmatch_check as (\n select \n ticket_metrics.ticket_id,\n ticket_metrics.first_reply_time_business_minutes,\n sla_policies.sla_elapsed_time\n from ticket_metrics\n full outer join sla_policies \n on ticket_metrics.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere abs(round(first_reply_time_business_minutes,0) - round(sla_elapsed_time,0)) >= 2\n {{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_first_reply_time_exclusion_tickets',[]) }}", "language": "sql", "refs": [{"name": "zendesk__ticket_metrics", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "test.zendesk_integration_tests.sla_count_match": [{"database": "postgres", "schema": "zendesk_integration_tests_55_dbt_test__audit", "name": "sla_count_match", "resource_type": "test", "package_name": "zendesk_integration_tests", "path": "integrity/sla_count_match.sql", "original_file_path": "tests/integrity/sla_count_match.sql", "unique_id": "test.zendesk_integration_tests.sla_count_match", "fqn": ["zendesk_integration_tests", "integrity", "sla_count_match"], "alias": "sla_count_match", "checksum": {"name": "sha256", "checksum": "b1f23baf0d04729d4855197e4e5f6e76bf72502c3739371ebee1a6d626a6d8b8"}, "config": {"enabled": false, "alias": null, "schema": "dbt_test__audit", "database": null, "tags": ["fivetran_validations"], "meta": {}, "group": null, "materialized": "test", "severity": "ERROR", "store_failures": null, "store_failures_as": null, "where": null, "limit": null, "fail_calc": "count(*)", "warn_if": "!= 0", "error_if": "!= 0"}, "tags": ["fivetran_validations"], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"tags": ["fivetran_validations"], "enabled": false}, "created_at": 1724705297.2844281, "config_call_dict": {"tags": ["fivetran_validations"], "enabled": false}, "relation_name": null, "raw_code": "{{ config(\n tags=\"fivetran_validations\",\n enabled=var('fivetran_validation_tests_enabled', false)\n) }}\n\n-- The necessary source and source_filter adjustments used below originate from the int_zendesk__sla_policy_applied model\nwith source as (\n select\n *,\n case when field_name = 'first_reply_time' then row_number() over (partition by ticket_id, field_name order by valid_starting_at desc) else 1 end as latest_sla\n from {{ ref('stg_zendesk__ticket_field_history') }}\n),\n\nsource_filter as (\n select\n ticket_id,\n count(*) as source_row_count\n from source\n where field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n and value is not null\n and latest_sla = 1\n group by 1\n),\n\nsla_policies as (\n select\n ticket_id,\n count(*) as end_model_row_count\n from {{ ref('zendesk__sla_policies') }}\n group by 1\n),\n\nmatch_check as (\n select \n sla_policies.ticket_id,\n end_model_row_count,\n source_row_count\n from sla_policies\n full outer join source_filter\n on source_filter.ticket_id = sla_policies.ticket_id\n)\n\nselect *\nfrom match_check\nwhere end_model_row_count != source_row_count\n{{ \"and ticket_id not in \" ~ var('fivetran_integrity_sla_count_match_tickets',[]) ~ \"\" if var('fivetran_integrity_sla_count_match_tickets',[]) }}", "language": "sql", "refs": [{"name": "stg_zendesk__ticket_field_history", "package": null, "version": null}, {"name": "zendesk__sla_policies", "package": null, "version": null}], "sources": [], "metrics": [], "depends_on": {"macros": [], "nodes": []}, "compiled_path": null, "contract": {"enforced": false, "alias_types": true, "checksum": null}}], "seed.zendesk_integration_tests.organization_tag_data_snowflake": [{"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "organization_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "organization_tag_data_snowflake.csv", "original_file_path": "seeds/organization_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.organization_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "organization_tag_data_snowflake"], "alias": "organization_tag_data", "checksum": {"name": "sha256", "checksum": "d9219b78d44b8b4620100b064a3af350fb5fa2046bdb0c376a09bade7a99f6f7"}, "config": {"enabled": false, "alias": "organization_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "organization_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1724705297.3632052, "config_call_dict": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"organization_tag_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}, "defer_relation": null}], "seed.zendesk_integration_tests.brand_data": [{"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "brand_data", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "brand_data.csv", "original_file_path": "seeds/brand_data.csv", "unique_id": "seed.zendesk_integration_tests.brand_data", "fqn": ["zendesk_integration_tests", "brand_data"], "alias": "brand_data", "checksum": {"name": "sha256", "checksum": "203980ef5845715ee0758982a85b96a30c8e4b06fbda7f104705bd4cdd586aa9"}, "config": {"enabled": false, "alias": null, "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "_fivetran_synced": "timestamp"}, "enabled": "{{ true if target.type != 'postgres' else false }}"}, "created_at": 1724705297.36896, "config_call_dict": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"brand_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}, "defer_relation": null}], "seed.zendesk_integration_tests.user_data_snowflake": [{"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "user_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_data_snowflake.csv", "original_file_path": "seeds/user_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_data_snowflake", "fqn": ["zendesk_integration_tests", "user_data_snowflake"], "alias": "user_data", "checksum": {"name": "sha256", "checksum": "1d7712839e43bb49c4fb8a2bba60a98e8c3ea558c91a3d4fb4f4db6e1425f178"}, "config": {"enabled": false, "alias": "user_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp", "id": "bigint", "external_id": "bigint", "locale_id": "bigint", "organization_id": "bigint", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "external_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "locale_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "organization_id": "{{ 'int64' if target.type == 'bigquery' else 'bigint' }}", "created_at": "timestamp", "last_login_at": "timestamp", "updated_at": "timestamp"}, "alias": "user_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1724705297.371267, "config_call_dict": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"user_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}, "defer_relation": null}], "seed.zendesk_integration_tests.user_tag_data_snowflake": [{"database": "postgres", "schema": "zendesk_integration_tests_55", "name": "user_tag_data_snowflake", "resource_type": "seed", "package_name": "zendesk_integration_tests", "path": "user_tag_data_snowflake.csv", "original_file_path": "seeds/user_tag_data_snowflake.csv", "unique_id": "seed.zendesk_integration_tests.user_tag_data_snowflake", "fqn": ["zendesk_integration_tests", "user_tag_data_snowflake"], "alias": "user_tag_data", "checksum": {"name": "sha256", "checksum": "7c2274e05f81c1f9906a6a4a217c4493bf003a151402391069f49c64cf9ec5fb"}, "config": {"enabled": false, "alias": "user_tag_data", "schema": null, "database": null, "tags": [], "meta": {}, "group": null, "materialized": "seed", "incremental_strategy": null, "persist_docs": {}, "post-hook": [], "pre-hook": [], "quoting": {}, "column_types": {"_fivetran_synced": "timestamp"}, "full_refresh": null, "unique_key": null, "on_schema_change": "ignore", "on_configuration_change": "apply", "grants": {}, "packages": [], "docs": {"show": true, "node_color": null}, "contract": {"enforced": false, "alias_types": true}, "delimiter": ",", "quote_columns": false}, "tags": [], "description": "", "columns": {}, "meta": {}, "group": null, "docs": {"show": true, "node_color": null}, "patch_path": null, "build_path": null, "deferred": false, "unrendered_config": {"quote_columns": "{{ true if target.type == 'redshift' else false }}", "column_types": {"_fivetran_synced": "timestamp"}, "alias": "user_tag_data", "enabled": "{{ true if target.type == 'snowflake' else false }}"}, "created_at": 1724705297.3739069, "config_call_dict": {}, "relation_name": "\"postgres\".\"zendesk_integration_tests_55\".\"user_tag_data\"", "raw_code": "", "root_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "depends_on": {"macros": []}, "defer_relation": null}]}, "parent_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.brand_data_postgres": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__latest_ticket_form", "model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__group"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.zendesk__ticket_summary": ["model.zendesk.zendesk__ticket_metrics", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.zendesk__sla_policies": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.zendesk__ticket_backlog": ["model.zendesk.zendesk__ticket_field_history", "model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__group", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk_source.stg_zendesk__schedule"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__sla_policy_applied"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__updates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__user"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_reply_times", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__updater_information", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_enriched", "source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.int_zendesk__user_aggregates"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_status", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk.int_zendesk__updates": ["model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_comment", "model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk_source.stg_zendesk__user", "model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__daylight_time", "model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__schedule_holiday", "model.zendesk_source.stg_zendesk__time_zone"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk_source.stg_zendesk__schedule", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.int_zendesk__comments_enriched"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.int_zendesk__updates", "model.zendesk_source.stg_zendesk__ticket"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.int_zendesk__updates"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk_source.stg_zendesk__brand", "model.zendesk_source.stg_zendesk__ticket", "model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk_source.stg_zendesk__domain_name", "model.zendesk_source.stg_zendesk__organization", "model.zendesk_source.stg_zendesk__organization_tag"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk_source.stg_zendesk__group_tmp"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk_source.stg_zendesk__user_tmp"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk_source.stg_zendesk__ticket_tmp"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["source.zendesk_source.zendesk.daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["source.zendesk_source.zendesk.user"], "model.zendesk_source.stg_zendesk__group_tmp": ["source.zendesk_source.zendesk.group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["source.zendesk_source.zendesk.ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["source.zendesk_source.zendesk.brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["source.zendesk_source.zendesk.ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["source.zendesk_source.zendesk.schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["source.zendesk_source.zendesk.user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["source.zendesk_source.zendesk.ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["source.zendesk_source.zendesk.ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["source.zendesk_source.zendesk.ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["source.zendesk_source.zendesk.organization_tag"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["source.zendesk_source.zendesk.schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["source.zendesk_source.zendesk.organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["source.zendesk_source.zendesk.ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["source.zendesk_source.zendesk.domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["source.zendesk_source.zendesk.time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": ["model.zendesk.zendesk__ticket_enriched"], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": ["model.zendesk.zendesk__sla_policies"], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": ["model.zendesk.zendesk__ticket_metrics"], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": ["model.zendesk_source.stg_zendesk__ticket"], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": ["model.zendesk_source.stg_zendesk__brand"], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": ["model.zendesk_source.stg_zendesk__domain_name"], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": ["model.zendesk_source.stg_zendesk__group"], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": ["model.zendesk_source.stg_zendesk__organization"], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": ["model.zendesk_source.stg_zendesk__ticket_comment"], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": ["model.zendesk_source.stg_zendesk__user"], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": ["model.zendesk_source.stg_zendesk__daylight_time"], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "model.zendesk.int_zendesk__calendar_spine": ["source.zendesk_source.zendesk.ticket"], "source.zendesk_source.zendesk.ticket": [], "source.zendesk_source.zendesk.brand": [], "source.zendesk_source.zendesk.domain_name": [], "source.zendesk_source.zendesk.group": [], "source.zendesk_source.zendesk.organization_tag": [], "source.zendesk_source.zendesk.organization": [], "source.zendesk_source.zendesk.ticket_comment": [], "source.zendesk_source.zendesk.user_tag": [], "source.zendesk_source.zendesk.user": [], "source.zendesk_source.zendesk.schedule": [], "source.zendesk_source.zendesk.ticket_schedule": [], "source.zendesk_source.zendesk.ticket_form_history": [], "source.zendesk_source.zendesk.ticket_tag": [], "source.zendesk_source.zendesk.ticket_field_history": [], "source.zendesk_source.zendesk.daylight_time": [], "source.zendesk_source.zendesk.time_zone": [], "source.zendesk_source.zendesk.schedule_holiday": []}, "child_map": {"seed.zendesk_integration_tests.organization_tag_data": [], "seed.zendesk_integration_tests.ticket_comment_data": [], "seed.zendesk_integration_tests.schedule_holiday_data": [], "seed.zendesk_integration_tests.domain_name_data": [], "seed.zendesk_integration_tests.ticket_field_history_data": [], "seed.zendesk_integration_tests.ticket_data": [], "seed.zendesk_integration_tests.brand_data_postgres": [], "seed.zendesk_integration_tests.time_zone_data": [], "seed.zendesk_integration_tests.ticket_schedule_data": [], "seed.zendesk_integration_tests.daylight_time_data": [], "seed.zendesk_integration_tests.user_data": [], "seed.zendesk_integration_tests.schedule_data": [], "seed.zendesk_integration_tests.ticket_tag_data": [], "seed.zendesk_integration_tests.organization_data": [], "seed.zendesk_integration_tests.ticket_form_history_data": [], "seed.zendesk_integration_tests.group_data": [], "seed.zendesk_integration_tests.user_tag_data": [], "model.zendesk.zendesk__ticket_enriched": ["model.zendesk.zendesk__ticket_metrics", "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef"], "model.zendesk.zendesk__ticket_metrics": ["model.zendesk.zendesk__ticket_summary", "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c"], "model.zendesk.zendesk__ticket_summary": [], "model.zendesk.zendesk__ticket_field_history": ["model.zendesk.zendesk__ticket_backlog"], "model.zendesk.zendesk__sla_policies": ["test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd"], "model.zendesk.zendesk__ticket_backlog": [], "model.zendesk.int_zendesk__sla_policy_applied": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_calendar_hours", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses"], "model.zendesk.int_zendesk__agent_work_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__agent_work_time_filtered_statuses": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__agent_work_time_calendar_hours"], "model.zendesk.int_zendesk__reply_time_business_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_calendar_hours": ["model.zendesk.int_zendesk__reply_time_combined"], "model.zendesk.int_zendesk__reply_time_combined": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_calendar_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_business_hours": ["model.zendesk.zendesk__sla_policies"], "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses": ["model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_calendar_hours"], "model.zendesk.int_zendesk__ticket_reply_times": ["model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_reply_times_calendar"], "model.zendesk.int_zendesk__ticket_reply_times_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__comments_enriched": ["model.zendesk.int_zendesk__comment_metrics", "model.zendesk.int_zendesk__ticket_reply_times"], "model.zendesk.int_zendesk__ticket_first_reply_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__field_history_enriched": ["model.zendesk.int_zendesk__field_history_pivot"], "model.zendesk.int_zendesk__field_history_pivot": ["model.zendesk.int_zendesk__field_history_scd"], "model.zendesk.int_zendesk__updater_information": ["model.zendesk.int_zendesk__field_history_enriched"], "model.zendesk.int_zendesk__field_history_scd": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__field_calendar_spine": ["model.zendesk.zendesk__ticket_field_history"], "model.zendesk.int_zendesk__ticket_work_time_calendar": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_work_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_resolution_times_calendar": ["model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_first_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_full_resolution_time_business": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__updates": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.int_zendesk__ticket_historical_assignee", "model.zendesk.int_zendesk__ticket_historical_group", "model.zendesk.int_zendesk__ticket_historical_satisfaction", "model.zendesk.int_zendesk__ticket_historical_status"], "model.zendesk.int_zendesk__ticket_historical_assignee": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__ticket_historical_status": ["model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_work_time_business", "model.zendesk.int_zendesk__ticket_work_time_calendar"], "model.zendesk.int_zendesk__user_aggregates": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__reply_time_combined", "model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__schedule_spine": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__ticket_schedules": ["model.zendesk.int_zendesk__agent_work_time_business_hours", "model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__requester_wait_time_business_hours", "model.zendesk.int_zendesk__ticket_first_reply_time_business", "model.zendesk.int_zendesk__ticket_first_resolution_time_business", "model.zendesk.int_zendesk__ticket_full_resolution_time_business", "model.zendesk.int_zendesk__ticket_work_time_business"], "model.zendesk.int_zendesk__assignee_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__comment_metrics": ["model.zendesk.zendesk__ticket_metrics"], "model.zendesk.int_zendesk__ticket_historical_group": ["model.zendesk.int_zendesk__ticket_resolution_times_calendar"], "model.zendesk.int_zendesk__requester_updates": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_historical_satisfaction": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__latest_ticket_form": ["model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__ticket_aggregates": ["model.zendesk.int_zendesk__sla_policy_applied", "model.zendesk.zendesk__ticket_enriched"], "model.zendesk.int_zendesk__organization_aggregates": ["model.zendesk.int_zendesk__updater_information", "model.zendesk.zendesk__ticket_enriched"], "operation.zendesk.zendesk-on-run-start-0": [], "model.zendesk_source.stg_zendesk__user_tag": ["model.zendesk.int_zendesk__user_aggregates"], "model.zendesk_source.stg_zendesk__ticket_tag": ["model.zendesk.int_zendesk__ticket_aggregates"], "model.zendesk_source.stg_zendesk__ticket_field_history": ["model.zendesk.int_zendesk__field_history_enriched", "model.zendesk.int_zendesk__updates"], "model.zendesk_source.stg_zendesk__schedule_holiday": ["model.zendesk.int_zendesk__schedule_spine", "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a"], "model.zendesk_source.stg_zendesk__daylight_time": ["model.zendesk.int_zendesk__schedule_spine", "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d"], "model.zendesk_source.stg_zendesk__organization": ["model.zendesk.int_zendesk__organization_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31"], "model.zendesk_source.stg_zendesk__time_zone": ["model.zendesk.int_zendesk__schedule_spine", "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf"], "model.zendesk_source.stg_zendesk__group": ["model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_enriched", "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd"], "model.zendesk_source.stg_zendesk__ticket_comment": ["model.zendesk.int_zendesk__updates", "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd"], "model.zendesk_source.stg_zendesk__ticket_schedule": ["model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__schedule": ["model.zendesk.int_zendesk__reply_time_business_hours", "model.zendesk.int_zendesk__schedule_spine", "model.zendesk.int_zendesk__ticket_schedules"], "model.zendesk_source.stg_zendesk__user": ["model.zendesk.int_zendesk__comments_enriched", "model.zendesk.int_zendesk__user_aggregates", "model.zendesk.zendesk__ticket_backlog", "model.zendesk.zendesk__ticket_summary", "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11"], "model.zendesk_source.stg_zendesk__brand": ["model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e"], "model.zendesk_source.stg_zendesk__ticket_form_history": ["model.zendesk.int_zendesk__latest_ticket_form", "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17"], "model.zendesk_source.stg_zendesk__domain_name": ["model.zendesk.int_zendesk__organization_aggregates", "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3"], "model.zendesk_source.stg_zendesk__organization_tag": ["model.zendesk.int_zendesk__organization_aggregates"], "model.zendesk_source.stg_zendesk__ticket": ["model.zendesk.int_zendesk__assignee_updates", "model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__requester_updates", "model.zendesk.int_zendesk__ticket_aggregates", "model.zendesk.int_zendesk__ticket_reply_times_calendar", "model.zendesk.int_zendesk__ticket_resolution_times_calendar", "model.zendesk.int_zendesk__ticket_schedules", "model.zendesk.int_zendesk__updates", "model.zendesk.zendesk__ticket_backlog", "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521"], "model.zendesk_source.stg_zendesk__daylight_time_tmp": ["model.zendesk_source.stg_zendesk__daylight_time"], "model.zendesk_source.stg_zendesk__user_tmp": ["model.zendesk_source.stg_zendesk__user"], "model.zendesk_source.stg_zendesk__group_tmp": ["model.zendesk_source.stg_zendesk__group"], "model.zendesk_source.stg_zendesk__ticket_tmp": ["model.zendesk_source.stg_zendesk__ticket"], "model.zendesk_source.stg_zendesk__brand_tmp": ["model.zendesk_source.stg_zendesk__brand"], "model.zendesk_source.stg_zendesk__ticket_tag_tmp": ["model.zendesk_source.stg_zendesk__ticket_tag"], "model.zendesk_source.stg_zendesk__schedule_holiday_tmp": ["model.zendesk_source.stg_zendesk__schedule_holiday"], "model.zendesk_source.stg_zendesk__user_tag_tmp": ["model.zendesk_source.stg_zendesk__user_tag"], "model.zendesk_source.stg_zendesk__ticket_field_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_field_history"], "model.zendesk_source.stg_zendesk__ticket_form_history_tmp": ["model.zendesk_source.stg_zendesk__ticket_form_history"], "model.zendesk_source.stg_zendesk__ticket_comment_tmp": ["model.zendesk_source.stg_zendesk__ticket_comment"], "model.zendesk_source.stg_zendesk__organization_tag_tmp": ["model.zendesk_source.stg_zendesk__organization_tag"], "model.zendesk_source.stg_zendesk__schedule_tmp": ["model.zendesk_source.stg_zendesk__schedule"], "model.zendesk_source.stg_zendesk__organization_tmp": ["model.zendesk_source.stg_zendesk__organization"], "model.zendesk_source.stg_zendesk__ticket_schedule_tmp": ["model.zendesk_source.stg_zendesk__ticket_schedule"], "model.zendesk_source.stg_zendesk__domain_name_tmp": ["model.zendesk_source.stg_zendesk__domain_name"], "model.zendesk_source.stg_zendesk__time_zone_tmp": ["model.zendesk_source.stg_zendesk__time_zone"], "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef": [], "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a": [], "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd": [], "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c": [], "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd": [], "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521": [], "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981": [], "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e": [], "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741": [], "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3": [], "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd": [], "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec": [], "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31": [], "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a": [], "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd": [], "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606": [], "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11": [], "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926": [], "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17": [], "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d": [], "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf": [], "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1": [], "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a": [], "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782": [], "model.zendesk.int_zendesk__calendar_spine": ["model.zendesk.int_zendesk__field_calendar_spine", "model.zendesk.int_zendesk__schedule_spine"], "source.zendesk_source.zendesk.ticket": ["model.zendesk.int_zendesk__calendar_spine", "model.zendesk_source.stg_zendesk__ticket_tmp"], "source.zendesk_source.zendesk.brand": ["model.zendesk_source.stg_zendesk__brand_tmp"], "source.zendesk_source.zendesk.domain_name": ["model.zendesk_source.stg_zendesk__domain_name_tmp"], "source.zendesk_source.zendesk.group": ["model.zendesk_source.stg_zendesk__group_tmp"], "source.zendesk_source.zendesk.organization_tag": ["model.zendesk_source.stg_zendesk__organization_tag_tmp"], "source.zendesk_source.zendesk.organization": ["model.zendesk_source.stg_zendesk__organization_tmp"], "source.zendesk_source.zendesk.ticket_comment": ["model.zendesk_source.stg_zendesk__ticket_comment_tmp"], "source.zendesk_source.zendesk.user_tag": ["model.zendesk_source.stg_zendesk__user_tag_tmp"], "source.zendesk_source.zendesk.user": ["model.zendesk_source.stg_zendesk__user_tmp"], "source.zendesk_source.zendesk.schedule": ["model.zendesk_source.stg_zendesk__schedule_tmp"], "source.zendesk_source.zendesk.ticket_schedule": ["model.zendesk_source.stg_zendesk__ticket_schedule_tmp"], "source.zendesk_source.zendesk.ticket_form_history": ["model.zendesk_source.stg_zendesk__ticket_form_history_tmp"], "source.zendesk_source.zendesk.ticket_tag": ["model.zendesk_source.stg_zendesk__ticket_tag_tmp"], "source.zendesk_source.zendesk.ticket_field_history": ["model.zendesk.int_zendesk__field_history_pivot", "model.zendesk_source.stg_zendesk__ticket_field_history_tmp"], "source.zendesk_source.zendesk.daylight_time": ["model.zendesk_source.stg_zendesk__daylight_time_tmp"], "source.zendesk_source.zendesk.time_zone": ["model.zendesk_source.stg_zendesk__time_zone_tmp"], "source.zendesk_source.zendesk.schedule_holiday": ["model.zendesk_source.stg_zendesk__schedule_holiday_tmp"]}, "group_map": {}, "saved_queries": {}, "semantic_models": {}} \ No newline at end of file diff --git a/docs/run_results.json b/docs/run_results.json index cacc470c..0d5fcf86 100644 --- a/docs/run_results.json +++ b/docs/run_results.json @@ -1 +1 @@ -{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v5.json", "dbt_version": "1.7.9", "generated_at": "2024-05-14T15:31:20.668843Z", "invocation_id": "a6607f0c-5bee-4c0f-9bfc-3034194b1b1f", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:14.449609Z", "completed_at": "2024-05-14T15:31:14.881298Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:14.883174Z", "completed_at": "2024-05-14T15:31:14.883191Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.46412014961242676, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nselect `index`,\n `organization_id`,\n `_fivetran_synced`,\n `domain_name` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`domain_name_data` as domain_name_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__domain_name_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:14.446689Z", "completed_at": "2024-05-14T15:31:14.883596Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:14.885933Z", "completed_at": "2024-05-14T15:31:14.885944Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.4675099849700928, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect `time_zone`,\n `year`,\n `_fivetran_synced`,\n `daylight_end_utc`,\n `daylight_offset`,\n `daylight_start_utc`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`daylight_time_data` as daylight_time_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__daylight_time_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:14.435126Z", "completed_at": "2024-05-14T15:31:14.920114Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:14.920964Z", "completed_at": "2024-05-14T15:31:14.920973Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.5023617744445801, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_deleted`,\n `_fivetran_synced`,\n `active`,\n `brand_url`,\n `default`,\n `has_help_center`,\n `help_center_state`,\n `logo_content_type`,\n `logo_content_url`,\n `logo_deleted`,\n `logo_file_name`,\n `logo_height`,\n `logo_id`,\n `logo_inline`,\n `logo_mapped_content_url`,\n `logo_size`,\n `logo_url`,\n `logo_width`,\n `name`,\n `subdomain`,\n `url` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`brand_data` as brand_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:14.888693Z", "completed_at": "2024-05-14T15:31:15.195481Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:15.196093Z", "completed_at": "2024-05-14T15:31:15.196099Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.3100910186767578, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__group_tmp", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_deleted`,\n `_fivetran_synced`,\n `created_at`,\n `name`,\n `updated_at`,\n `url` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`group_data` as group_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:14.923697Z", "completed_at": "2024-05-14T15:31:15.226540Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:15.226948Z", "completed_at": "2024-05-14T15:31:15.226953Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.3045532703399658, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_synced`,\n `created_at`,\n `details`,\n `external_id`,\n `group_id`,\n `name`,\n `notes`,\n `shared_comments`,\n `shared_tickets`,\n `updated_at`,\n `url`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`organization_data` as organization_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:14.895398Z", "completed_at": "2024-05-14T15:31:15.256584Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:15.256966Z", "completed_at": "2024-05-14T15:31:15.256971Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.364332914352417, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nselect `organization_id`,\n `tag`,\n `_fivetran_synced` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`organization_tag_data` as organization_tag_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tag_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:15.197471Z", "completed_at": "2024-05-14T15:31:15.651021Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:15.652651Z", "completed_at": "2024-05-14T15:31:15.652671Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.4571099281311035, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect `id`,\n `schedule_id`,\n `_fivetran_deleted`,\n `_fivetran_synced`,\n `end_date`,\n `name`,\n `start_date`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`schedule_holiday_data` as schedule_holiday_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:15.228365Z", "completed_at": "2024-05-14T15:31:15.680862Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:15.682067Z", "completed_at": "2024-05-14T15:31:15.682079Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.45522475242614746, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect `end_time`,\n `id`,\n `start_time`,\n `_fivetran_deleted`,\n `_fivetran_synced`,\n `end_time_utc`,\n `name`,\n `start_time_utc`,\n `time_zone`,\n `created_at`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`schedule_data` as schedule_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:15.258430Z", "completed_at": "2024-05-14T15:31:15.693022Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:15.693787Z", "completed_at": "2024-05-14T15:31:15.693795Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.4365580081939697, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_synced`,\n `body`,\n `created`,\n `facebook_comment`,\n `public`,\n `ticket_id`,\n `tweet`,\n `user_id`,\n `voice_comment`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_comment_data` as ticket_comment_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:15.657250Z", "completed_at": "2024-05-14T15:31:15.960056Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:15.961643Z", "completed_at": "2024-05-14T15:31:15.961663Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.3074929714202881, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp", "compiled": true, "compiled_code": "select `field_name`,\n `ticket_id`,\n `updated`,\n `_fivetran_synced`,\n `user_id`,\n `value`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_field_history_data` as ticket_field_history_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:15.685404Z", "completed_at": "2024-05-14T15:31:16.025036Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.027093Z", "completed_at": "2024-05-14T15:31:16.027106Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.3449420928955078, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nselect `id`,\n `updated_at`,\n `_fivetran_deleted`,\n `_fivetran_synced`,\n `active`,\n `created_at`,\n `display_name`,\n `end_user_visible`,\n `name`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_form_history_data` as ticket_form_history_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_form_history_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.034226Z", "completed_at": "2024-05-14T15:31:16.352002Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.353226Z", "completed_at": "2024-05-14T15:31:16.353240Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.3229680061340332, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_synced`,\n `allow_channelback`,\n `assignee_id`,\n `brand_id`,\n `created_at`,\n `description`,\n `due_at`,\n `external_id`,\n `forum_topic_id`,\n `group_id`,\n `has_incidents`,\n `is_public`,\n `organization_id`,\n `priority`,\n `problem_id`,\n `recipient`,\n `requester_id`,\n `status`,\n `subject`,\n `submitter_id`,\n `system_client`,\n `ticket_form_id`,\n `type`,\n `updated_at`,\n `url`,\n `via_channel`,\n `via_source_from_id`,\n `via_source_from_title`,\n `via_source_rel`,\n `via_source_to_address`,\n `via_source_to_name`,\n `merged_ticket_ids`,\n `via_source_from_address`,\n `followup_ids`,\n `via_followup_source_id`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data` as ticket_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:15.696353Z", "completed_at": "2024-05-14T15:31:16.387358Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.389523Z", "completed_at": "2024-05-14T15:31:16.389544Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.695784330368042, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\n\n\nselect `created_at`,\n `ticket_id`,\n `_fivetran_synced`,\n `schedule_id`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_schedule_data` as ticket_schedule_table\n\n", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_schedule_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:15.966866Z", "completed_at": "2024-05-14T15:31:16.387973Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.390105Z", "completed_at": "2024-05-14T15:31:16.390112Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.42662692070007324, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp", "compiled": true, "compiled_code": "select `tag`,\n `ticket_id`,\n `_fivetran_synced`\nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_tag_data` as ticket_tag_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tag_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.394497Z", "completed_at": "2024-05-14T15:31:16.698962Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.710837Z", "completed_at": "2024-05-14T15:31:16.710857Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.41890597343444824, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nselect `tag`,\n `user_id`,\n `_fivetran_synced` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`user_tag_data` as user_tag_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tag_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.357205Z", "completed_at": "2024-05-14T15:31:16.847579Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.877406Z", "completed_at": "2024-05-14T15:31:16.877423Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.5243661403656006, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect `time_zone`,\n `_fivetran_synced`,\n `standard_offset` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`time_zone_data` as time_zone_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.399049Z", "completed_at": "2024-05-14T15:31:16.868803Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.878790Z", "completed_at": "2024-05-14T15:31:16.878796Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.48792409896850586, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user_tmp", "compiled": true, "compiled_code": "select `id`,\n `_fivetran_synced`,\n `active`,\n `alias`,\n `authenticity_token`,\n `chat_only`,\n `created_at`,\n `details`,\n `email`,\n `external_id`,\n `last_login_at`,\n `locale`,\n `locale_id`,\n `moderator`,\n `name`,\n `notes`,\n `only_private_comments`,\n `organization_id`,\n `phone`,\n `remote_photo_url`,\n `restricted_agent`,\n `role`,\n `shared`,\n `shared_agent`,\n `signature`,\n `suspended`,\n `ticket_restriction`,\n `time_zone`,\n `two_factor_auth_enabled`,\n `updated_at`,\n `url`,\n `verified` \nfrom `dbt-package-testing`.`zendesk_integration_tests_50`.`user_data` as user_table", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tmp`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.870758Z", "completed_at": "2024-05-14T15:31:16.879179Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.883950Z", "completed_at": "2024-05-14T15:31:16.883956Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.038192033767700195, "adapter_response": {}, "message": null, "failures": null, "unique_id": "operation.zendesk.zendesk-on-run-start-0", "compiled": true, "compiled_code": "\n\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.886218Z", "completed_at": "2024-05-14T15:31:16.887174Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.889335Z", "completed_at": "2024-05-14T15:31:16.889341Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.0066680908203125, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.brand_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.887493Z", "completed_at": "2024-05-14T15:31:16.888178Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.889544Z", "completed_at": "2024-05-14T15:31:16.889547Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.010756731033325195, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.daylight_time_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.889742Z", "completed_at": "2024-05-14T15:31:16.890503Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.945336Z", "completed_at": "2024-05-14T15:31:16.945348Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0581972599029541, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.domain_name_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.947944Z", "completed_at": "2024-05-14T15:31:16.949039Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.951622Z", "completed_at": "2024-05-14T15:31:16.951627Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.007614850997924805, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.group_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.950501Z", "completed_at": "2024-05-14T15:31:16.951254Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.953656Z", "completed_at": "2024-05-14T15:31:16.953660Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.007565021514892578, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.organization_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.951978Z", "completed_at": "2024-05-14T15:31:16.952887Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.955243Z", "completed_at": "2024-05-14T15:31:16.955254Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.007055997848510742, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.organization_tag_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.958250Z", "completed_at": "2024-05-14T15:31:16.959184Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.961244Z", "completed_at": "2024-05-14T15:31:16.961249Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.006986856460571289, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.schedule_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.960093Z", "completed_at": "2024-05-14T15:31:16.960829Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.962967Z", "completed_at": "2024-05-14T15:31:16.962970Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.006455659866333008, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.961596Z", "completed_at": "2024-05-14T15:31:16.962281Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.964205Z", "completed_at": "2024-05-14T15:31:16.964209Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.005381107330322266, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_comment_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.965843Z", "completed_at": "2024-05-14T15:31:16.966575Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.968651Z", "completed_at": "2024-05-14T15:31:16.968655Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.006695985794067383, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.967505Z", "completed_at": "2024-05-14T15:31:16.968277Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.971342Z", "completed_at": "2024-05-14T15:31:16.971346Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.006653308868408203, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.969001Z", "completed_at": "2024-05-14T15:31:16.970680Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.972264Z", "completed_at": "2024-05-14T15:31:16.972267Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.006118297576904297, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.973901Z", "completed_at": "2024-05-14T15:31:16.974694Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.976608Z", "completed_at": "2024-05-14T15:31:16.976612Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.005547046661376953, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.975563Z", "completed_at": "2024-05-14T15:31:16.976251Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.978242Z", "completed_at": "2024-05-14T15:31:16.978246Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.005555152893066406, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_tag_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.976939Z", "completed_at": "2024-05-14T15:31:16.977587Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.979226Z", "completed_at": "2024-05-14T15:31:16.979229Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.004964113235473633, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.time_zone_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.980887Z", "completed_at": "2024-05-14T15:31:16.981693Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.983602Z", "completed_at": "2024-05-14T15:31:16.983606Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.006732940673828125, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.user_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.982508Z", "completed_at": "2024-05-14T15:31:16.983240Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:16.986399Z", "completed_at": "2024-05-14T15:31:16.986403Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.0068759918212890625, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.user_tag_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.988666Z", "completed_at": "2024-05-14T15:31:17.366518Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:17.378600Z", "completed_at": "2024-05-14T15:31:17.378619Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.41273021697998047, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__daylight_time_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n daylight_end_utc\n \n as \n \n daylight_end_utc\n \n, \n \n \n daylight_offset\n \n as \n \n daylight_offset\n \n, \n \n \n daylight_start_utc\n \n as \n \n daylight_start_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n year\n \n as \n \n year\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__daylight_time`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.983937Z", "completed_at": "2024-05-14T15:31:17.368646Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:17.382756Z", "completed_at": "2024-05-14T15:31:17.382774Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.42214512825012207, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__domain_name", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__domain_name_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n domain_name\n \n as \n \n domain_name\n \n, \n \n \n index\n \n as \n \n index\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__domain_name`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:16.991173Z", "completed_at": "2024-05-14T15:31:17.371888Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:17.393221Z", "completed_at": "2024-05-14T15:31:17.393235Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.4168679714202881, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__brand", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n brand_url\n \n as \n \n brand_url\n \n, \n \n \n has_help_center\n \n as \n \n has_help_center\n \n, \n \n \n help_center_state\n \n as \n \n help_center_state\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n logo_content_type\n \n as \n \n logo_content_type\n \n, \n \n \n logo_content_url\n \n as \n \n logo_content_url\n \n, \n \n \n logo_deleted\n \n as \n \n logo_deleted\n \n, \n \n \n logo_file_name\n \n as \n \n logo_file_name\n \n, \n \n \n logo_height\n \n as \n \n logo_height\n \n, \n \n \n logo_id\n \n as \n \n logo_id\n \n, \n \n \n logo_inline\n \n as \n \n logo_inline\n \n, \n \n \n logo_mapped_content_url\n \n as \n \n logo_mapped_content_url\n \n, \n \n \n logo_size\n \n as \n \n logo_size\n \n, \n \n \n logo_url\n \n as \n \n logo_url\n \n, \n \n \n logo_width\n \n as \n \n logo_width\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n subdomain\n \n as \n \n subdomain\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:17.408818Z", "completed_at": "2024-05-14T15:31:17.767241Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:17.768207Z", "completed_at": "2024-05-14T15:31:17.768219Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.3633759021759033, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n shared_comments\n \n as \n \n shared_comments\n \n, \n \n \n shared_tickets\n \n as \n \n shared_tickets\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:17.417688Z", "completed_at": "2024-05-14T15:31:17.789503Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:17.790427Z", "completed_at": "2024-05-14T15:31:17.790434Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.38329100608825684, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday_tmp`\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n end_date\n \n as \n \n end_date\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n start_date\n \n as \n \n start_date\n \n\n\n\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as timestamp ) as _fivetran_synced,\n cast(end_date as timestamp ) as holiday_end_date_at,\n cast(id as string ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as string ) as schedule_id,\n cast(start_date as timestamp ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:17.413725Z", "completed_at": "2024-05-14T15:31:17.791010Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:17.792788Z", "completed_at": "2024-05-14T15:31:17.792794Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.38618898391723633, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tag_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tag`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:17.088950Z", "completed_at": "2024-05-14T15:31:17.849624Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:17.850018Z", "completed_at": "2024-05-14T15:31:17.850024Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.7619471549987793, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__group", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:17.779737Z", "completed_at": "2024-05-14T15:31:18.122502Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.123307Z", "completed_at": "2024-05-14T15:31:18.123319Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.3479158878326416, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n end_time\n \n as \n \n end_time\n \n, \n \n \n end_time_utc\n \n as \n \n end_time_utc\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n start_time\n \n as \n \n start_time\n \n, \n \n \n start_time_utc\n \n as \n \n start_time_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as string) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:17.794809Z", "completed_at": "2024-05-14T15:31:18.123648Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.125627Z", "completed_at": "2024-05-14T15:31:18.125633Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.3337128162384033, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n body\n \n as \n \n body\n \n, \n cast(null as INT64) as \n \n call_duration\n \n , \n cast(null as INT64) as \n \n call_id\n \n , \n \n \n created\n \n as \n \n created\n \n, \n \n \n facebook_comment\n \n as \n \n facebook_comment\n \n, \n \n \n id\n \n as \n \n id\n \n, \n cast(null as INT64) as \n \n location\n \n , \n \n \n public\n \n as \n \n public\n \n, \n cast(null as INT64) as \n \n recording_url\n \n , \n cast(null as timestamp) as \n \n started_at\n \n , \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n cast(null as INT64) as \n \n transcription_status\n \n , \n cast(null as INT64) as \n \n transcription_text\n \n , \n cast(null as INT64) as \n \n trusted\n \n , \n \n \n tweet\n \n as \n \n tweet\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n voice_comment\n \n as \n \n voice_comment\n \n, \n cast(null as INT64) as \n \n voice_comment_transcription_visible\n \n \n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n body,\n cast(created as timestamp) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:17.798819Z", "completed_at": "2024-05-14T15:31:18.144633Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.145219Z", "completed_at": "2024-05-14T15:31:18.145225Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.3479311466217041, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n field_name\n \n as \n \n field_name\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n updated\n \n as \n \n updated\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n value\n \n as \n \n value\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as timestamp) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as timestamp) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:17.851404Z", "completed_at": "2024-05-14T15:31:18.294037Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.294596Z", "completed_at": "2024-05-14T15:31:18.294602Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.4442148208618164, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_form_history_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n display_name\n \n as \n \n display_name\n \n, \n \n \n end_user_visible\n \n as \n \n end_user_visible\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_form_history`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.133205Z", "completed_at": "2024-05-14T15:31:18.464292Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.464674Z", "completed_at": "2024-05-14T15:31:18.464680Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.3330512046813965, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_schedule_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as timestamp) as created_at,\n cast(schedule_id as string) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_schedule`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.127999Z", "completed_at": "2024-05-14T15:31:18.468481Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.469253Z", "completed_at": "2024-05-14T15:31:18.469257Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.344480037689209, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n allow_channelback\n \n as \n \n allow_channelback\n \n, \n \n \n assignee_id\n \n as \n \n assignee_id\n \n, \n \n \n brand_id\n \n as \n \n brand_id\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n description\n \n as \n \n description\n \n, \n \n \n due_at\n \n as \n \n due_at\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n forum_topic_id\n \n as \n \n forum_topic_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n has_incidents\n \n as \n \n has_incidents\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n is_public\n \n as \n \n is_public\n \n, \n \n \n merged_ticket_ids\n \n as \n \n merged_ticket_ids\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n priority\n \n as \n \n priority\n \n, \n \n \n problem_id\n \n as \n \n problem_id\n \n, \n \n \n recipient\n \n as \n \n recipient\n \n, \n \n \n requester_id\n \n as \n \n requester_id\n \n, \n \n \n status\n \n as \n \n status\n \n, \n \n \n subject\n \n as \n \n subject\n \n, \n \n \n submitter_id\n \n as \n \n submitter_id\n \n, \n cast(null as INT64) as \n \n system_ccs\n \n , \n \n \n system_client\n \n as \n \n system_client\n \n, \n cast(null as string) as \n \n system_ip_address\n \n , \n cast(null as INT64) as \n \n system_json_email_identifier\n \n , \n cast(null as FLOAT64) as \n \n system_latitude\n \n , \n cast(null as string) as \n \n system_location\n \n , \n cast(null as FLOAT64) as \n \n system_longitude\n \n , \n cast(null as INT64) as \n \n system_machine_generated\n \n , \n cast(null as INT64) as \n \n system_message_id\n \n , \n cast(null as INT64) as \n \n system_raw_email_identifier\n \n , \n \n \n ticket_form_id\n \n as \n \n ticket_form_id\n \n, \n \n \n type\n \n as \n \n type\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n via_channel\n \n as \n \n via_channel\n \n, \n \n \n via_source_from_address\n \n as \n \n via_source_from_address\n \n, \n \n \n via_source_from_id\n \n as \n \n via_source_from_id\n \n, \n \n \n via_source_from_title\n \n as \n \n via_source_from_title\n \n, \n \n \n via_source_rel\n \n as \n \n via_source_rel\n \n, \n \n \n via_source_to_address\n \n as \n \n via_source_to_address\n \n, \n \n \n via_source_to_name\n \n as \n \n via_source_to_name\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n assignee_id,\n brand_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.147018Z", "completed_at": "2024-05-14T15:31:18.516604Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.517040Z", "completed_at": "2024-05-14T15:31:18.517046Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.37104296684265137, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tag_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n \n tag as tags\n \n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tag`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.518746Z", "completed_at": "2024-05-14T15:31:18.527542Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.528021Z", "completed_at": "2024-05-14T15:31:18.528025Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.010404109954833984, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d", "compiled": true, "compiled_code": "\n\n\n\n\n\nwith validation_errors as (\n\n select\n time_zone, year\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__daylight_time`\n group by time_zone, year\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.529537Z", "completed_at": "2024-05-14T15:31:18.534210Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.534630Z", "completed_at": "2024-05-14T15:31:18.534634Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.006039857864379883, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__domain_name`\nwhere organization_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.536047Z", "completed_at": "2024-05-14T15:31:18.539169Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.539561Z", "completed_at": "2024-05-14T15:31:18.539564Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0043909549713134766, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect brand_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand`\nwhere brand_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.540873Z", "completed_at": "2024-05-14T15:31:18.544026Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.544376Z", "completed_at": "2024-05-14T15:31:18.544379Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.004315853118896484, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select brand_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand`\n where brand_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.545614Z", "completed_at": "2024-05-14T15:31:18.547606Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.547943Z", "completed_at": "2024-05-14T15:31:18.547946Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.003098011016845703, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization`\nwhere organization_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.549083Z", "completed_at": "2024-05-14T15:31:18.550956Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.551293Z", "completed_at": "2024-05-14T15:31:18.551296Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.002931833267211914, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select organization_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization`\n where organization_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.552393Z", "completed_at": "2024-05-14T15:31:18.554242Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.554563Z", "completed_at": "2024-05-14T15:31:18.554566Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0028810501098632812, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect holiday_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday`\nwhere holiday_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.555646Z", "completed_at": "2024-05-14T15:31:18.557303Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.557613Z", "completed_at": "2024-05-14T15:31:18.557616Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0026378631591796875, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select holiday_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday`\n where holiday_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.558656Z", "completed_at": "2024-05-14T15:31:18.562123Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.562440Z", "completed_at": "2024-05-14T15:31:18.562443Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.004453182220458984, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__organization_aggregates", "compiled": true, "compiled_code": "with organizations as (\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization`\n\n--If you use organization tags this will be included, if not it will be ignored.\n\n), organization_tags as (\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization_tag`\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(organization_tags.tags, ', ')\n\n as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n\n), domain_names as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__domain_name`\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(domain_names.domain_name, ', ')\n\n as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,tag_aggregates.organization_tags\n \n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,domain_aggregates.domain_names\n \n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n left join domain_aggregates\n using(organization_id)\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n left join tag_aggregates\n using(organization_id)\n \n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__organization_aggregates`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.563518Z", "completed_at": "2024-05-14T15:31:18.565812Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.566102Z", "completed_at": "2024-05-14T15:31:18.566105Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0032529830932617188, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect group_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group`\nwhere group_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.567073Z", "completed_at": "2024-05-14T15:31:18.568573Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.568862Z", "completed_at": "2024-05-14T15:31:18.568865Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0024352073669433594, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select group_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group`\n where group_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.570170Z", "completed_at": "2024-05-14T15:31:18.571781Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.572073Z", "completed_at": "2024-05-14T15:31:18.572075Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.002813100814819336, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_comment_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment`\nwhere ticket_comment_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.573034Z", "completed_at": "2024-05-14T15:31:18.574639Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.574965Z", "completed_at": "2024-05-14T15:31:18.574968Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.005233049392700195, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select ticket_comment_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment`\n where ticket_comment_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.296484Z", "completed_at": "2024-05-14T15:31:18.578637Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.580435Z", "completed_at": "2024-05-14T15:31:18.580437Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.28496408462524414, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user_tag", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tag_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tag`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.579162Z", "completed_at": "2024-05-14T15:31:18.581142Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.581964Z", "completed_at": "2024-05-14T15:31:18.581967Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.005614042282104492, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__latest_ticket_form", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith ticket_form_history as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_form_history`\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__latest_ticket_form`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.582139Z", "completed_at": "2024-05-14T15:31:18.584746Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.599075Z", "completed_at": "2024-05-14T15:31:18.599080Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.018060922622680664, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_form_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_form_history`\nwhere ticket_form_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.585240Z", "completed_at": "2024-05-14T15:31:18.599839Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.600581Z", "completed_at": "2024-05-14T15:31:18.600584Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.018792152404785156, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__field_calendar_spine", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data`\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1559\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n datetime_add(\n cast( '2020-02-13' as datetime),\n interval row_number() over (order by 1) - 1 day\n )\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n datetime_add(\n cast( current_date as datetime),\n interval 1 week\n )\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), calendar as (\n\n select *\n from __dbt__cte__int_zendesk__calendar_spine\n \n where date_day >= (select max(date_day) from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_calendar_spine`)\n \n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( timestamp_trunc(\n cast(case when status != 'closed' then current_timestamp else updated_at end as timestamp),\n day\n ) as date) as open_until\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and \n\n datetime_add(\n cast( ticket.open_until as datetime),\n interval 0 month\n )\n\n >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n to_hex(md5(cast(coalesce(cast(date_day as string), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as string), '_dbt_utils_surrogate_key_null_') as string))) as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_calendar_spine`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.604791Z", "completed_at": "2024-05-14T15:31:18.606601Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.606891Z", "completed_at": "2024-05-14T15:31:18.606894Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.00286102294921875, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__updates", "compiled": true, "compiled_code": "with ticket_history as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history`\n\n), ticket_comment as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_comment`\n\n), tickets as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as string) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.608006Z", "completed_at": "2024-05-14T15:31:18.610937Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.611260Z", "completed_at": "2024-05-14T15:31:18.611263Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.003971099853515625, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\nwhere ticket_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.612300Z", "completed_at": "2024-05-14T15:31:18.614017Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.614335Z", "completed_at": "2024-05-14T15:31:18.614338Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.002706766128540039, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select ticket_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n where ticket_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.615343Z", "completed_at": "2024-05-14T15:31:18.616919Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.617199Z", "completed_at": "2024-05-14T15:31:18.617201Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0024950504302978516, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_aggregates", "compiled": true, "compiled_code": "with tickets as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_tags as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_tag`\n\n), brands as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand`\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n \n string_agg(ticket_tags.tags, ', ')\n\n as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_aggregates`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.618272Z", "completed_at": "2024-05-14T15:31:18.619742Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.620037Z", "completed_at": "2024-05-14T15:31:18.620040Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.002485036849975586, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__assignee_updates", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n\n), ticket as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__assignee_updates`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.621005Z", "completed_at": "2024-05-14T15:31:18.623122Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.623444Z", "completed_at": "2024-05-14T15:31:18.623447Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.003088235855102539, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_updates", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n\n), ticket as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_updates`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.624499Z", "completed_at": "2024-05-14T15:31:18.627146Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.627442Z", "completed_at": "2024-05-14T15:31:18.627444Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0035800933837890625, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee", "compiled": true, "compiled_code": "with assignee_updates as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then \n\n datetime_diff(\n cast(valid_starting_at as datetime),\n cast(coalesce(previous_update, ticket_created_date) as datetime),\n second\n )\n\n / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n group by 1,2,3,4,5,6\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_assignee`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.628415Z", "completed_at": "2024-05-14T15:31:18.629569Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.629940Z", "completed_at": "2024-05-14T15:31:18.629943Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.002167224884033203, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_group", "compiled": true, "compiled_code": "with ticket_group_history as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_group`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.630988Z", "completed_at": "2024-05-14T15:31:18.632430Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.632748Z", "completed_at": "2024-05-14T15:31:18.632751Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.002407073974609375, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction", "compiled": true, "compiled_code": "with satisfaction_updates as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_satisfaction`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.633767Z", "completed_at": "2024-05-14T15:31:18.635341Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.635656Z", "completed_at": "2024-05-14T15:31:18.635658Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0025429725646972656, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_status", "compiled": true, "compiled_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n \n\n datetime_diff(\n cast(coalesce(valid_ending_at, current_timestamp) as datetime),\n cast(valid_starting_at as datetime),\n minute\n )\n\n as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.636703Z", "completed_at": "2024-05-14T15:31:18.639634Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.639964Z", "completed_at": "2024-05-14T15:31:18.639967Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.00391387939453125, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__sla_policy_applied", "compiled": true, "compiled_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n\n), sla_policy_name as (\n\n select \n *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_aggregates`\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast(\n\n \n json_extract_scalar(ticket_field_history.value, '$.minutes')\n\n as INT64 ) as target,\n \n\n \n json_extract_scalar(ticket_field_history.value, '$.in_business_hours')\n\n = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, current_timestamp) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__sla_policy_applied`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.646603Z", "completed_at": "2024-05-14T15:31:18.649915Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.650219Z", "completed_at": "2024-05-14T15:31:18.650222Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.004261016845703125, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "compiled": true, "compiled_code": "with agent_work_time_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__sla_policy_applied`\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n timestamp_add(current_timestamp, interval 30 day)\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_filtered_statuses`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.651312Z", "completed_at": "2024-05-14T15:31:18.653116Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.653415Z", "completed_at": "2024-05-14T15:31:18.653418Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0028030872344970703, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours", "compiled": true, "compiled_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__sla_policy_applied`\n\n), final as (\n select\n *,\n \n\n timestamp_add(sla_applied_at, interval cast(target as INT64 ) minute)\n\n as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_calendar_hours`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.654402Z", "completed_at": "2024-05-14T15:31:18.656158Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.656454Z", "completed_at": "2024-05-14T15:31:18.656457Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.002671957015991211, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "compiled": true, "compiled_code": "with requester_wait_time_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__sla_policy_applied`\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n timestamp_add(current_timestamp, interval 30 day)\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_filtered_statuses`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.657542Z", "completed_at": "2024-05-14T15:31:18.659399Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.659702Z", "completed_at": "2024-05-14T15:31:18.659704Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.002875089645385742, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "compiled": true, "compiled_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_filtered_statuses`\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n \n\n datetime_diff(\n cast(valid_ending_at as datetime),\n cast(valid_starting_at as datetime),\n minute\n )\n\n as calendar_minutes,\n sum(\n\n datetime_diff(\n cast(valid_ending_at as datetime),\n cast(valid_starting_at as datetime),\n minute\n )\n\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n timestamp_add(valid_starting_at, interval (remaining_target_minutes + calendar_minutes) minute)\n\n as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_calendar_hours`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.660761Z", "completed_at": "2024-05-14T15:31:18.662868Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.663190Z", "completed_at": "2024-05-14T15:31:18.663193Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0031201839447021484, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "compiled": true, "compiled_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_filtered_statuses`\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n \n\n datetime_diff(\n cast(valid_ending_at as datetime),\n cast(valid_starting_at as datetime),\n minute\n )\n\n as calendar_minutes,\n sum(\n\n datetime_diff(\n cast(valid_ending_at as datetime),\n cast(valid_starting_at as datetime),\n minute\n )\n\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n timestamp_add(valid_starting_at, interval (remaining_target_minutes + calendar_minutes) minute)\n\n as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_calendar_hours`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.465955Z", "completed_at": "2024-05-14T15:31:18.806272Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.807213Z", "completed_at": "2024-05-14T15:31:18.807219Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.34237217903137207, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__time_zone", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone_tmp`\n\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n standard_offset\n \n as \n \n standard_offset\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( \n\n \n split(\n standard_offset,\n ':'\n )[safe_offset(0)]\n \n\n as INT64 ) * 60 +\n (cast( \n\n \n split(\n standard_offset,\n ':'\n )[safe_offset(1)]\n \n\n as INT64 ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.473211Z", "completed_at": "2024-05-14T15:31:18.806553Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.807466Z", "completed_at": "2024-05-14T15:31:18.807471Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.337360143661499, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user", "compiled": true, "compiled_code": "with base as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tmp`\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n alias\n \n as \n \n alias\n \n, \n \n \n authenticity_token\n \n as \n \n authenticity_token\n \n, \n \n \n chat_only\n \n as \n \n chat_only\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n email\n \n as \n \n email\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n last_login_at\n \n as \n \n last_login_at\n \n, \n \n \n locale\n \n as \n \n locale\n \n, \n \n \n locale_id\n \n as \n \n locale_id\n \n, \n \n \n moderator\n \n as \n \n moderator\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n only_private_comments\n \n as \n \n only_private_comments\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n phone\n \n as \n \n phone\n \n, \n \n \n remote_photo_url\n \n as \n \n remote_photo_url\n \n, \n \n \n restricted_agent\n \n as \n \n restricted_agent\n \n, \n \n \n role\n \n as \n \n role\n \n, \n \n \n shared\n \n as \n \n shared\n \n, \n \n \n shared_agent\n \n as \n \n shared_agent\n \n, \n \n \n signature\n \n as \n \n signature\n \n, \n \n \n suspended\n \n as \n \n suspended\n \n, \n \n \n ticket_restriction\n \n as \n \n ticket_restriction\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n two_factor_auth_enabled\n \n as \n \n two_factor_auth_enabled\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n verified\n \n as \n \n verified\n \n\n\n\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n cast(last_login_at as timestamp) as last_login_at,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n email,\n name,\n organization_id,\n phone,\n role,\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.811306Z", "completed_at": "2024-05-14T15:31:18.866627Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.867617Z", "completed_at": "2024-05-14T15:31:18.867622Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0586240291595459, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__schedule_spine", "compiled": true, "compiled_code": "\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings.\n End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time)\n*/\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_data`\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1559\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n datetime_add(\n cast( '2020-02-13' as datetime),\n interval row_number() over (order by 1) - 1 day\n )\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n datetime_add(\n cast( current_date as datetime),\n interval 1 week\n )\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), timezone as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone`\n\n), daylight_time as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__daylight_time`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule` \n\n-- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules.\n), schedule_holiday as ( \n\n select\n _fivetran_synced,\n cast(date_day as timestamp ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n cast(date_day as timestamp ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n holiday_id,\n holiday_name,\n schedule_id\n\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule_holiday` \n inner join __dbt__cte__int_zendesk__calendar_spine \n on holiday_start_date_at <= cast(date_day as timestamp )\n and holiday_end_date_at >= cast(date_day as timestamp )\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard schedule (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n datetime_add(\n cast( current_timestamp as datetime),\n interval 1 year\n )\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT schedule (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n datetime_add(\n cast( current_timestamp as datetime),\n interval 1 year\n )\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(current_timestamp as date)\n\n), calculate_schedules as (\n\n select \n schedule.schedule_id,\n schedule.time_zone,\n schedule.start_time,\n schedule.end_time,\n schedule.created_at,\n schedule.schedule_name,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes_to_add,\n -- we'll use these to determine which schedule version to associate tickets with\n cast(split_timezones.valid_from as timestamp) as valid_from,\n cast(split_timezones.valid_until as timestamp) as valid_until\n\n from schedule\n left join split_timezones\n on split_timezones.time_zone = schedule.time_zone\n\n-- Now we need take holiday's into consideration and perform the following transformations to account for Holidays in existing schedules\n), holiday_start_end_times as (\n\n select\n calculate_schedules.*,\n schedule_holiday.holiday_name,\n schedule_holiday.holiday_start_date_at,\n cast(\n\n datetime_add(\n cast( schedule_holiday.holiday_end_date_at as datetime),\n interval 86400 second\n )\n\n as timestamp) as holiday_end_date_at, -- add 24*60*60 seconds\n cast(cast(timestamp_trunc(\n cast(schedule_holiday.holiday_start_date_at as timestamp),\n week\n ) as date) as timestamp) as holiday_week_start,\n cast(cast(\n \n\n datetime_add(\n cast( \n\n datetime_add(\n cast( timestamp_trunc(\n cast(schedule_holiday.holiday_end_date_at as timestamp),\n week\n ) as datetime),\n interval 1 week\n )\n\n as datetime),\n interval -1 day\n )\n\n\n as date) as timestamp) as holiday_week_end\n from schedule_holiday\n inner join calculate_schedules\n on calculate_schedules.schedule_id = schedule_holiday.schedule_id\n and schedule_holiday.holiday_start_date_at >= calculate_schedules.valid_from \n and schedule_holiday.holiday_start_date_at < calculate_schedules.valid_until\n\n-- Let's calculate the start and end date of the Holiday in terms of minutes from Sunday (like other Zendesk schedules)\n), holiday_minutes as(\n\n select\n holiday_start_end_times.*,\n \n\n datetime_diff(\n cast(holiday_start_date_at as datetime),\n cast(holiday_week_start as datetime),\n minute\n )\n\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_start,\n \n\n datetime_diff(\n cast(holiday_end_date_at as datetime),\n cast(holiday_week_start as datetime),\n minute\n )\n\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_end\n from holiday_start_end_times\n left join timezone\n on timezone.time_zone = holiday_start_end_times.time_zone\n\n-- Determine which schedule days include a holiday\n), holiday_check as (\n\n select\n *,\n case when minutes_from_sunday_start < start_time_utc and minutes_from_sunday_end > end_time_utc \n then holiday_name \n end as holiday_name_check\n from holiday_minutes\n\n-- Consolidate the holiday records that were just created\n), holiday_consolidated as (\n\n select \n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n cast(\n\n datetime_add(\n cast( holiday_week_end as datetime),\n interval 86400 second\n )\n\n as timestamp) as holiday_week_end,\n max(holiday_name_check) as holiday_name_check\n from holiday_check\n group by 1,2,3,4,5,6,7,8,9\n\n-- Since we have holiday schedules and normal schedules, we need to union them into a holistic schedule spine\n), spine_union as (\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n holiday_week_end,\n holiday_name_check\n from holiday_consolidated\n\n union all\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n null as holiday_week_start,\n null as holiday_week_end,\n null as holiday_name_check\n from calculate_schedules\n\n-- Now that we have an understanding of which weeks are holiday's let's consolidate them with non holiday weeks\n), all_periods as (\n\n select distinct\n schedule_id,\n holiday_week_start as period_start,\n holiday_week_end as period_end,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n true as is_holiday_week\n from spine_union\n where holiday_week_start is not null\n and holiday_week_end is not null\n\n union all\n\n select distinct\n schedule_id,\n valid_from as period_start,\n valid_until as period_end,\n start_time_utc,\n end_time_utc,\n cast(null as string) as holiday_name_check,\n false as is_holiday_week\n from spine_union\n\n-- We have holiday and non holiday schedules together, now let's sort them to understand the previous end and next start of neighboring schedules\n), sorted_periods as (\n\n select distinct\n *,\n lag(period_end) over (partition by schedule_id order by period_start, start_time_utc) as prev_end,\n lead(period_start) over (partition by schedule_id order by period_start, start_time_utc) as next_start\n from all_periods\n\n-- We need to adjust some non holiday schedules in order to properly fill holiday gaps in the schedules later down the transformation\n), non_holiday_period_adjustments as (\n\n select\n schedule_id, \n period_start, \n period_end,\n prev_end,\n next_start,\n -- taking first_value/last_value because prev_end and next_start are inconsistent within the schedule partitions -- they all include a record that is outside the partition. so we need to ignore those erroneous records that slip in\n coalesce(greatest(case \n when not is_holiday_week and prev_end is not null then first_value(prev_end) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_start\n end, period_start), period_start) as valid_from,\n coalesce(case \n when not is_holiday_week and next_start is not null then last_value(next_start) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_end\n end, period_end) as valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from sorted_periods\n\n-- A few window function results will be leveraged downstream. Let's generate them now.\n), gap_starter as (\n select \n *,\n max(period_end) over (partition by schedule_id) as max_valid_until,\n last_value(next_start) over (partition by schedule_id, period_start order by valid_until rows between unbounded preceding and unbounded following) as lead_next_start,\n first_value(prev_end) over (partition by schedule_id, valid_from order by start_time_utc rows between unbounded preceding and unbounded following) as first_prev_end\n from non_holiday_period_adjustments\n\n-- There may be gaps in holiday and non holiday schedules, so we need to identify where these gaps are\n), gap_adjustments as(\n\n select \n *,\n -- In order to identify the gaps we check to see if the valid_from and previous valid_until are right next to one. If we add two hours to the previous valid_until it should always be greater than the current valid_from.\n -- However, if the valid_from is greater instead then we can identify that this period has a gap that needs to be filled.\n case \n when cast(\n\n datetime_add(\n cast( valid_until as datetime),\n interval 2 hour\n )\n\n as timestamp) < cast(lead_next_start as timestamp)\n then 'gap'\n when (lead_next_start is null and valid_from < max_valid_until and period_end != max_valid_until)\n then 'gap'\n else null\n end as is_schedule_gap\n\n from gap_starter\n\n-- We know where the gaps are, so now lets prime the data to fill those gaps\n), schedule_spine_primer as (\n\n select \n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n lead_next_start,\n max_valid_until,\n holiday_name_check,\n is_holiday_week,\n max(is_schedule_gap) over (partition by schedule_id, valid_until) as is_gap_period,\n lead(valid_from) over (partition by schedule_id order by valid_from, start_time_utc) as fill_primer\n from gap_adjustments\n\n-- We know the gaps and where they are, so let's fill them with the following union\n), final_union as (\n\n -- For all gap periods, let's properly create a schedule filled before the holiday.\n select \n schedule_id,\n valid_until as valid_from,\n coalesce(last_value(fill_primer) over (partition by schedule_id, valid_until order by start_time_utc rows between unbounded preceding and unbounded following), max_valid_until) as valid_until,\n start_time_utc, \n end_time_utc, \n cast(null as string) as holiday_name_check,\n false as is_holiday_week\n from schedule_spine_primer\n where is_gap_period is not null\n\n union all\n\n -- Fill all other normal schedules.\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from schedule_spine_primer\n\n-- We can finally filter out the holiday_name_check results as the gap filling properly filled in the gaps for holidays\n), final as(\n\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n is_holiday_week\n from final_union\n where holiday_name_check is null\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.864907Z", "completed_at": "2024-05-14T15:31:18.866812Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.867786Z", "completed_at": "2024-05-14T15:31:18.867789Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.05823826789855957, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select time_zone as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone`\n where time_zone is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.836563Z", "completed_at": "2024-05-14T15:31:18.866990Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.867943Z", "completed_at": "2024-05-14T15:31:18.867945Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.058923959732055664, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect time_zone\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__time_zone`\nwhere time_zone is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.871012Z", "completed_at": "2024-05-14T15:31:18.875989Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.876895Z", "completed_at": "2024-05-14T15:31:18.876899Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.007723093032836914, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__user_aggregates", "compiled": true, "compiled_code": "with users as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n--If you use user tags this will be included, if not it will be ignored.\n\n), user_tags as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user_tag`\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n \n string_agg(user_tags.tags, ', ')\n\n as user_tags\n from user_tags\n group by 1\n\n\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,user_tag_aggregate.user_tags\n \n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n \n left join user_tag_aggregate\n using(user_id)\n \n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.872839Z", "completed_at": "2024-05-14T15:31:18.876165Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.877053Z", "completed_at": "2024-05-14T15:31:18.877055Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.0077362060546875, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect user_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\nwhere user_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.874359Z", "completed_at": "2024-05-14T15:31:18.876333Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.877206Z", "completed_at": "2024-05-14T15:31:18.877209Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.007766008377075195, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select user_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n where user_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.882884Z", "completed_at": "2024-05-14T15:31:18.886801Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.888104Z", "completed_at": "2024-05-14T15:31:18.888108Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.00946807861328125, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_enriched", "compiled": true, "compiled_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_aggregates`\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n\n), latest_ticket_form as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__latest_ticket_form`\n\n\n), latest_satisfaction_ratings as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_satisfaction`\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), requester_updates as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_updates`\n\n), assignee_updates as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__assignee_updates`\n\n), ticket_group as (\n \n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group`\n\n), organization as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__organization_aggregates`\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n latest_ticket_form.name as ticket_form_name,\n \n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n \n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n requester_org.organization_tags as requester_organization_tags,\n \n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n \n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n \n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n \n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_enriched`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.889152Z", "completed_at": "2024-05-14T15:31:18.901109Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.905116Z", "completed_at": "2024-05-14T15:31:18.905119Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.01796889305114746, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__comment_metrics", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__comment_metrics`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.907602Z", "completed_at": "2024-05-14T15:31:18.914197Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.914646Z", "completed_at": "2024-05-14T15:31:18.914650Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.00899505615234375, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_enriched`\nwhere ticket_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.909633Z", "completed_at": "2024-05-14T15:31:18.914994Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:18.916238Z", "completed_at": "2024-05-14T15:31:18.916241Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.00969696044921875, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select ticket_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_enriched`\n where ticket_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.600743Z", "completed_at": "2024-05-14T15:31:19.684157Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.686747Z", "completed_at": "2024-05-14T15:31:19.686771Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 1.0886940956115723, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_schedules", "compiled": true, "compiled_code": "\n\nwith ticket as (\n \n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_schedule as (\n \n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_schedule`\n\n), schedule as (\n \n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule`\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n\n\n \n\n \n\n \n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '360000310393' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and \n\n timestamp_add(first_schedule.created_at, interval -5 second)\n\n <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , \n\n timestamp_add(current_timestamp, interval 1000 hour)\n\n ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.737288Z", "completed_at": "2024-05-14T15:31:19.746699Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.748320Z", "completed_at": "2024-05-14T15:31:19.748325Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.05343985557556152, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours", "compiled": true, "compiled_code": "\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_filtered_statuses`\n where in_business_hours\n\n), schedule as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_schedules as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n\n datetime_diff(\n cast(least(valid_ending_at, schedule_invalidated_at) as datetime),\n cast(greatest(valid_starting_at, schedule_created_at) as datetime),\n second\n )\n\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n\n datetime_diff(\n cast(cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n\n datetime_diff(\n cast(ticket_status_crossed_with_schedule.valid_ending_at as datetime),\n cast(ticket_status_crossed_with_schedule.valid_starting_at as datetime),\n second\n )\n\n /60\n ) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp),\n week\n ) as date) as start_week_date\n \n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as INT64) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_agent_work_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time_minute minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time_minute minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n \n\n timestamp_add(timestamp_trunc(\n cast(valid_starting_at as timestamp),\n week\n ), interval cast(((7*24*60) * week_number) + breach_minutes_from_week as INT64 ) minute)\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom agent_work_business_breach", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_business_hours`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.702656Z", "completed_at": "2024-05-14T15:31:19.747764Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.749640Z", "completed_at": "2024-05-14T15:31:19.749644Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.05718517303466797, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours", "compiled": true, "compiled_code": "\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), sla_policy_applied as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__sla_policy_applied`\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), ticket_updates as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__schedule`\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n (\n\n datetime_diff(\n cast(cast(sla_policy_applied.sla_applied_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(sla_policy_applied.sla_applied_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n cast(timestamp_trunc(\n cast(sla_policy_applied.sla_applied_at as timestamp),\n week\n ) as date) as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and \n\n timestamp_add(ticket_schedules.schedule_created_at, interval -1 second)\n\n <= sla_policy_applied.sla_applied_at\n and \n\n timestamp_add(ticket_schedules.schedule_invalidated_at, interval -1 second)\n\n > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10,11,12,13,14\n\n), week_index_calc as (\n select \n *,\n \n\n datetime_diff(\n cast(least(coalesce(first_reply_time, current_timestamp()), coalesce(first_solved_time, current_timestamp())) as datetime),\n cast(sla_applied_at as datetime),\n week\n )\n\n + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as INT64) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n cast((7*24*60) as INT64) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast (\n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as date) > cast(schedule.valid_from as date)\n and cast (\n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n timestamp_trunc(\n cast(sla_applied_at as timestamp),\n week\n ) as starting_point,\n \n\n timestamp_add(cast(cast(timestamp_trunc(\n cast(sla_applied_at as timestamp),\n week\n ) as date) as timestamp), interval cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as INT64 ) minute)\n\n as sla_breach_at,\n \n\n timestamp_add(cast(cast(timestamp_trunc(\n cast(sla_applied_at as timestamp),\n week\n ) as date) as timestamp), interval cast(((7*24*60) * week_number) + (schedule_start_time) as INT64 ) minute)\n\n as sla_schedule_start_at,\n \n\n timestamp_add(cast(cast(timestamp_trunc(\n cast(sla_applied_at as timestamp),\n week\n ) as date) as timestamp), interval cast(((7*24*60) * week_number) + (schedule_end_time) as INT64 ) minute)\n\n as sla_schedule_end_at,\n cast(\n \n\n datetime_add(\n cast( \n\n datetime_add(\n cast( timestamp_trunc(\n cast(sla_applied_at as timestamp),\n week\n ) as datetime),\n interval 1 week\n )\n\n as datetime),\n interval -1 day\n )\n\n\n as date) as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_business_hours`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.750848Z", "completed_at": "2024-05-14T15:31:19.777738Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.800940Z", "completed_at": "2024-05-14T15:31:19.800946Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.052583932876586914, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours", "compiled": true, "compiled_code": "\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_filtered_statuses`\n where in_business_hours\n\n), schedule as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_schedules as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n\n datetime_diff(\n cast(least(valid_ending_at, schedule_invalidated_at) as datetime),\n cast(greatest(valid_starting_at, schedule_created_at) as datetime),\n second\n )\n\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n\n datetime_diff(\n cast(cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n\n datetime_diff(\n cast(ticket_status_crossed_with_schedule.valid_ending_at as datetime),\n cast(ticket_status_crossed_with_schedule.valid_starting_at as datetime),\n second\n )\n\n /60\n ) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp),\n week\n ) as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as INT64) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_requester_wait_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time_minute minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time_minute minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n \n\n timestamp_add(timestamp_trunc(\n cast(valid_starting_at as timestamp),\n week\n ), interval cast(((7*24*60) * week_number) + breach_minutes_from_week as INT64 ) minute)\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_business_hours`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.822393Z", "completed_at": "2024-05-14T15:31:19.827251Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.828031Z", "completed_at": "2024-05-14T15:31:19.828035Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.006574153900146484, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__reply_time_combined", "compiled": true, "compiled_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_calendar_hours`\n\n\n\n), reply_time_business_hours_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_business_hours`\n\n\n\n), ticket_updates as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as numeric) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as numeric) as week_number,\n cast(null as numeric) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n \n\n datetime_diff(\n cast(agent_reply_at as datetime),\n cast(sla_schedule_start_at as datetime),\n second\n )\n\n / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and current_timestamp() >= sla_schedule_start_at and (current_timestamp() < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= current_timestamp()) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n current_timestamp() as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + (\n\n datetime_diff(\n cast(coalesce(agent_reply_at, next_solved_at, current_time_check) as datetime),\n cast(sla_schedule_start_at as datetime),\n second\n )\n\n / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_combined`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.870909Z", "completed_at": "2024-05-14T15:31:19.878287Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.879780Z", "completed_at": "2024-05-14T15:31:19.879784Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.012192010879516602, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__sla_policies", "compiled": true, "compiled_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__reply_time_combined`\n\n), agent_work_calendar_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_calendar_hours`\n\n), requester_wait_calendar_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_calendar_hours`\n\n\n\n), agent_work_business_sla as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__agent_work_time_business_hours`\n\n), requester_wait_business_sla as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__requester_wait_time_business_hours`\n\n\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n max( is_breached_during_schedule )\n\n\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n max( is_breached_during_schedule )\n\n\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n max( is_breached_during_schedule )\n\n\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n max( is_breached_during_schedule )\n\n\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n\n\n)\n\nselect \n to_hex(md5(cast(coalesce(cast(ticket_id as string), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(metric as string), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sla_applied_at as string), '_dbt_utils_surrogate_key_null_') as string))) as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then (\n\n datetime_diff(\n cast(current_timestamp as datetime),\n cast(sla_applied_at as datetime),\n second\n )\n\n / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > current_timestamp)\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__sla_policies`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.885852Z", "completed_at": "2024-05-14T15:31:19.896401Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.927375Z", "completed_at": "2024-05-14T15:31:19.927383Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.04361891746520996, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select sla_event_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__sla_policies`\n where sla_event_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.828682Z", "completed_at": "2024-05-14T15:31:19.928170Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.928584Z", "completed_at": "2024-05-14T15:31:19.928587Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.10129523277282715, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_metrics", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n where status = 'solved'\n\n), ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_historical_assignee as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_assignee`\n\n), ticket_historical_group as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_group`\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.first_agent_assignment_date as datetime),\n minute\n )\n\n as first_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket_historical_assignee.last_agent_assignment_date as datetime),\n minute\n )\n\n as last_assignment_to_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.first_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as first_resolution_calendar_minutes,\n \n\n datetime_diff(\n cast(solved_times.last_solved_at as datetime),\n cast(ticket.created_at as datetime),\n minute\n )\n\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__updates`\n where field_name = 'comment'\n\n), users as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n\n datetime_diff(\n cast(agent_responded_at as datetime),\n cast(end_user_comment_created_at as datetime),\n second\n )\n\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n), __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n), __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_historical_status`\n\n), ticket_schedules as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n\n datetime_diff(\n cast(least(valid_ending_at, schedule_invalidated_at) as datetime),\n cast(greatest(valid_starting_at, schedule_created_at) as datetime),\n second\n )\n\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n\n datetime_diff(\n cast(cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n (\n\n datetime_diff(\n cast(ticket_status_crossed_with_schedule.status_schedule_end as datetime),\n cast(ticket_status_crossed_with_schedule.status_schedule_start as datetime),\n second\n )\n\n /60\n ) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp),\n week\n ) as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as INT64) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on \n ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n), __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__ticket_schedules`\n\n), schedule as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__schedule_spine`\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n\n datetime_diff(\n cast(cast(ticket_schedules.schedule_created_at as timestamp) as datetime),\n cast(cast(cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date)as timestamp) as datetime),\n second\n )\n\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n\n datetime_diff(\n cast(least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)) as datetime),\n cast(ticket_schedules.schedule_created_at as datetime),\n second\n )\n\n /60\n )) as raw_delta_in_minutes,\n cast(timestamp_trunc(\n cast(ticket_schedules.schedule_created_at as timestamp),\n week\n ) as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as INT64) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as INT64) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as INT64) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_end_time minute\n )\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n datetime_add(\n cast( start_week_date as datetime),\n interval week_number * (7*24*60) + ticket_week_start_time minute\n )\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n), ticket_enriched as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_enriched`\n\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_reply_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times_calendar\n\n), ticket_comments as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__comment_metrics`\n\n), ticket_work_time_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_calendar\n\n-- business hour CTEs\n\n\n), ticket_first_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_resolution_time_business\n\n), ticket_full_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_full_resolution_time_business\n\n), ticket_work_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_business\n\n), ticket_first_reply_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_reply_time_business\n\n\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then (\n\n datetime_diff(\n cast(current_timestamp as datetime),\n cast(ticket_enriched.requester_last_login_at as datetime),\n second\n )\n\n /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then (\n\n datetime_diff(\n cast(current_timestamp as datetime),\n cast(ticket_enriched.assignee_last_login_at as datetime),\n second\n )\n\n /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n\n datetime_diff(\n cast(current_timestamp as datetime),\n cast(ticket_enriched.created_at as datetime),\n second\n )\n\n /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n\n datetime_diff(\n cast(current_timestamp as datetime),\n cast(ticket_enriched.updated_at as datetime),\n second\n )\n\n /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_metrics`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:18.917204Z", "completed_at": "2024-05-14T15:31:19.928759Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.929570Z", "completed_at": "2024-05-14T15:31:19.929573Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 1.013624906539917, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__field_history_pivot", "compiled": true, "compiled_code": "-- depends_on: `dbt-package-testing`.`zendesk_integration_tests_50`.`ticket_field_history_data`\n\n\n\n\n \nwith __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__user_aggregates`\n\n), organizations as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__organization_aggregates`\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket_field_history`\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n), field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from __dbt__cte__int_zendesk__field_history_enriched\n \n where cast( timestamp_trunc(\n cast(valid_starting_at as timestamp),\n day\n ) as date) >= (select max(date_day) from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_history_pivot`)\n \n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast(timestamp_trunc(\n cast(valid_starting_at as timestamp),\n day\n ) as date) as date_day\n\n \n \n ,min(case when lower(field_name) = 'status' then filtered.value end) as status\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'assignee_id' then filtered.value end) as assignee_id\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'priority' then filtered.value end) as priority\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n to_hex(md5(cast(coalesce(cast(ticket_id as string), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(date_day as string), '_dbt_utils_surrogate_key_null_') as string))) as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_history_pivot`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.934290Z", "completed_at": "2024-05-14T15:31:19.940540Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.942361Z", "completed_at": "2024-05-14T15:31:19.942367Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.012187004089355469, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_metrics`\nwhere ticket_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.932321Z", "completed_at": "2024-05-14T15:31:19.940721Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.942738Z", "completed_at": "2024-05-14T15:31:19.942741Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.01308298110961914, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_summary", "compiled": true, "compiled_code": "with ticket_metrics as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_metrics`\n\n), user_table as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), user_sum as (\n select\n cast(1 as INT64) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as INT64) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_summary`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.936024Z", "completed_at": "2024-05-14T15:31:19.942560Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:19.943909Z", "completed_at": "2024-05-14T15:31:19.943912Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.01293325424194336, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c", "compiled": true, "compiled_code": "\n \n \n\nwith dbt_test__target as (\n\n select ticket_id as unique_field\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_metrics`\n where ticket_id is not null\n\n)\n\nselect\n unique_field,\n count(*) as n_records\n\nfrom dbt_test__target\ngroup by unique_field\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:19.937805Z", "completed_at": "2024-05-14T15:31:20.224017Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:20.227175Z", "completed_at": "2024-05-14T15:31:20.227207Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.29810667037963867, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__field_history_scd", "compiled": true, "compiled_code": "-- model needs to materialize as a table to avoid erroneous null values\n \n\n\n\nwith change_data as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_history_pivot`\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n \n\n ,status\n ,sum(case when status is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as status_field_partition\n \n\n ,assignee_id\n ,sum(case when assignee_id is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as assignee_id_field_partition\n \n\n ,priority\n ,sum(case when priority is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as priority_field_partition\n \n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n \n\n ,first_value( status ) over (partition by status_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as status\n \n \n\n ,first_value( assignee_id ) over (partition by assignee_id_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as assignee_id\n \n \n\n ,first_value( priority ) over (partition by priority_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as priority\n \n \n from set_values\n) \n\nselect *\nfrom fill_values", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_history_scd`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:20.236327Z", "completed_at": "2024-05-14T15:31:20.639951Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:20.641768Z", "completed_at": "2024-05-14T15:31:20.641789Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.4094998836517334, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_field_history", "compiled": true, "compiled_code": "with change_data as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_history_scd`\n \n \n where valid_from >= (select max(date_day) from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history`)\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history`\n where date_day = (select max(date_day) from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history` )\n\n\n\n), calendar as (\n\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`int_zendesk__field_calendar_spine`\n where date_day <= current_date\n \n and date_day >= (select max(date_day) from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history`)\n \n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n \n \n , coalesce(change_data.status, most_recent_data.status) as status\n \n , coalesce(change_data.assignee_id, most_recent_data.assignee_id) as assignee_id\n \n , coalesce(change_data.priority, most_recent_data.priority) as priority\n \n \n \n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n \n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n \n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n \n , status\n -- create a batch/partition once a new value is provided\n , sum( case when status is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as status_field_partition\n\n \n , assignee_id\n -- create a batch/partition once a new value is provided\n , sum( case when assignee_id is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as assignee_id_field_partition\n\n \n , priority\n -- create a batch/partition once a new value is provided\n , sum( case when priority is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as priority_field_partition\n\n \n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n \n -- grab the value that started this batch/partition\n , first_value( status ) over (\n partition by ticket_id, status_field_partition \n order by date_day asc rows between unbounded preceding and current row) as status\n \n -- grab the value that started this batch/partition\n , first_value( assignee_id ) over (\n partition by ticket_id, assignee_id_field_partition \n order by date_day asc rows between unbounded preceding and current row) as assignee_id\n \n -- grab the value that started this batch/partition\n , first_value( priority ) over (\n partition by ticket_id, priority_field_partition \n order by date_day asc rows between unbounded preceding and current row) as priority\n \n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( status as string ) = 'is_null' then null else status end as status\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( assignee_id as string ) = 'is_null' then null else assignee_id end as assignee_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( priority as string ) = 'is_null' then null else priority end as priority\n \n\n from fill_values\n\n), surrogate_key as (\n\n select\n to_hex(md5(cast(coalesce(cast(date_day as string), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as string), '_dbt_utils_surrogate_key_null_') as string))) as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history`"}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-05-14T15:31:20.646744Z", "completed_at": "2024-05-14T15:31:20.664600Z"}, {"name": "execute", "started_at": "2024-05-14T15:31:20.665404Z", "completed_at": "2024-05-14T15:31:20.665414Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.020750045776367188, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_backlog", "compiled": true, "compiled_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n\n\nwith ticket_field_history as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_field_history`\n\n), tickets as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__ticket`\n\n), group_names as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__group`\n\n), users as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__user`\n\n), brands as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__brand`\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n\n\n), organizations as (\n select *\n from `dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`stg_zendesk__organization`\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n --Looking at all history fields the users passed through in their dbt_project.yml file\n --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n \n --Looking at all history fields the users passed through in their dbt_project.yml file\n --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.priority\n \n \n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n \n\n \n\n --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as bigint)\n \n\n \n\n \n\n \n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "relation_name": "`dbt-package-testing`.`zendesk_integration_tests_50_zendesk_dev`.`zendesk__ticket_backlog`"}], "elapsed_time": 7.560626268386841, "args": {"exclude": [], "enable_legacy_logger": false, "printer_width": 80, "log_level": "info", "log_file_max_bytes": 10485760, "version_check": true, "quiet": false, "defer": false, "print": true, "show_resource_report": false, "send_anonymous_usage_stats": true, "indirect_selection": "eager", "partial_parse": true, "introspect": true, "populate_cache": true, "write_json": true, "static_parser": true, "which": "generate", "use_colors": true, "empty_catalog": false, "select": [], "cache_selected_only": false, "log_path": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests/logs", "static": false, "favor_state": false, "macro_debugging": false, "strict_mode": false, "compile": true, "vars": {}, "log_format": "default", "log_format_file": "debug", "use_colors_file": true, "log_level_file": "debug", "invocation_command": "dbt docs generate", "warn_error_options": {"include": [], "exclude": []}, "project_dir": "/Users/renee/Documents/dbt/zendesk/dbt_zendesk/integration_tests", "partial_parse_file_diff": true, "profiles_dir": "/Users/renee/.dbt"}} \ No newline at end of file +{"metadata": {"dbt_schema_version": "https://schemas.getdbt.com/dbt/run-results/v5.json", "dbt_version": "1.7.11", "generated_at": "2024-08-26T20:48:51.798012Z", "invocation_id": "57bbaa30-28cc-4e7b-b76f-fe2920291c4e", "env": {}}, "results": [{"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:47.856357Z", "completed_at": "2024-08-26T20:48:48.115220Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.115756Z", "completed_at": "2024-08-26T20:48:48.115768Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.32162904739379883, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__domain_name_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nselect \"index\",\n \"organization_id\",\n \"_fivetran_synced\",\n \"domain_name\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"domain_name_data\" as domain_name_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__domain_name_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:47.850986Z", "completed_at": "2024-08-26T20:48:48.156804Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.158503Z", "completed_at": "2024-08-26T20:48:48.158506Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.3719019889831543, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"year\",\n \"_fivetran_synced\",\n \"daylight_end_utc\",\n \"daylight_offset\",\n \"daylight_start_utc\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"daylight_time_data\" as daylight_time_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:47.833803Z", "completed_at": "2024-08-26T20:48:48.156420Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.158221Z", "completed_at": "2024-08-26T20:48:48.158227Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.3727750778198242, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__brand_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"brand_url\",\n \"default\",\n \"has_help_center\",\n \"help_center_state\",\n \"logo_content_type\",\n \"logo_content_url\",\n \"logo_deleted\",\n \"logo_file_name\",\n \"logo_height\",\n \"logo_id\",\n \"logo_inline\",\n \"logo_mapped_content_url\",\n \"logo_size\",\n \"logo_url\",\n \"logo_width\",\n \"name\",\n \"subdomain\",\n \"url\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"brand_data\" as brand_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.185003Z", "completed_at": "2024-08-26T20:48:48.402723Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.403202Z", "completed_at": "2024-08-26T20:48:48.403206Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.24235129356384277, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nselect \"organization_id\",\n \"tag\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"organization_tag_data\" as organization_tag_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.142705Z", "completed_at": "2024-08-26T20:48:48.399638Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.400213Z", "completed_at": "2024-08-26T20:48:48.400220Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.2915630340576172, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__group_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"created_at\",\n \"name\",\n \"updated_at\",\n \"url\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"group_data\" as group_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.193971Z", "completed_at": "2024-08-26T20:48:48.413687Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.414199Z", "completed_at": "2024-08-26T20:48:48.414204Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.24462604522705078, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"created_at\",\n \"details\",\n \"external_id\",\n \"group_id\",\n \"name\",\n \"notes\",\n \"shared_comments\",\n \"shared_tickets\",\n \"updated_at\",\n \"url\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"organization_data\" as organization_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.198644Z", "completed_at": "2024-08-26T20:48:48.436369Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.441756Z", "completed_at": "2024-08-26T20:48:48.441762Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.28787803649902344, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"schedule_id\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_date\",\n \"name\",\n \"start_date\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"schedule_holiday_data\" as schedule_holiday_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.437617Z", "completed_at": "2024-08-26T20:48:48.684651Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.685212Z", "completed_at": "2024-08-26T20:48:48.685218Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.2730062007904053, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"body\",\n \"created\",\n \"facebook_comment\",\n \"public\",\n \"ticket_id\",\n \"tweet\",\n \"user_id\",\n \"voice_comment\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_comment_data\" as ticket_comment_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.428014Z", "completed_at": "2024-08-26T20:48:48.685719Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.686229Z", "completed_at": "2024-08-26T20:48:48.686233Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.28310227394104004, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"end_time\",\n \"id\",\n \"start_time\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"end_time_utc\",\n \"name\",\n \"start_time_utc\",\n \"time_zone\",\n \"created_at\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"schedule_data\" as schedule_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.482237Z", "completed_at": "2024-08-26T20:48:48.706009Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.706534Z", "completed_at": "2024-08-26T20:48:48.706540Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.24778008460998535, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nselect \"id\",\n \"updated_at\",\n \"_fivetran_deleted\",\n \"_fivetran_synced\",\n \"active\",\n \"created_at\",\n \"display_name\",\n \"end_user_visible\",\n \"name\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_form_history_data\" as ticket_form_history_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.443864Z", "completed_at": "2024-08-26T20:48:48.724888Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.725428Z", "completed_at": "2024-08-26T20:48:48.725433Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.30675816535949707, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history_tmp", "compiled": true, "compiled_code": "select \"field_name\",\n \"ticket_id\",\n \"updated\",\n \"_fivetran_synced\",\n \"user_id\",\n \"value\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_field_history_data\" as ticket_field_history_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.711434Z", "completed_at": "2024-08-26T20:48:48.941704Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.942264Z", "completed_at": "2024-08-26T20:48:48.942271Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.2643911838531494, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\n\n\nselect \"created_at\",\n \"ticket_id\",\n \"_fivetran_synced\",\n \"schedule_id\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_schedule_data\" as ticket_schedule_table\n\n", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.719611Z", "completed_at": "2024-08-26T20:48:48.984521Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.985539Z", "completed_at": "2024-08-26T20:48:48.985542Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.2946970462799072, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag_tmp", "compiled": true, "compiled_code": "select \"tag\",\n \"ticket_id\",\n \"_fivetran_synced\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_tag_data\" as ticket_tag_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.730756Z", "completed_at": "2024-08-26T20:48:48.984187Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:48.985266Z", "completed_at": "2024-08-26T20:48:48.985272Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.2825767993927002, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"allow_channelback\",\n \"assignee_id\",\n \"brand_id\",\n \"created_at\",\n \"description\",\n \"due_at\",\n \"external_id\",\n \"forum_topic_id\",\n \"group_id\",\n \"has_incidents\",\n \"is_public\",\n \"organization_id\",\n \"priority\",\n \"problem_id\",\n \"recipient\",\n \"requester_id\",\n \"status\",\n \"subject\",\n \"submitter_id\",\n \"system_client\",\n \"ticket_form_id\",\n \"type\",\n \"updated_at\",\n \"url\",\n \"via_channel\",\n \"via_source_from_id\",\n \"via_source_from_title\",\n \"via_source_rel\",\n \"via_source_to_address\",\n \"via_source_to_name\",\n \"merged_ticket_ids\",\n \"via_source_from_address\",\n \"followup_ids\",\n \"via_followup_source_id\"\nfrom \"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\" as ticket_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.019747Z", "completed_at": "2024-08-26T20:48:49.023474Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.024095Z", "completed_at": "2024-08-26T20:48:49.024101Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.01054692268371582, "adapter_response": {}, "message": null, "failures": null, "unique_id": "operation.zendesk.zendesk-on-run-start-0", "compiled": true, "compiled_code": "\n\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.026137Z", "completed_at": "2024-08-26T20:48:49.027309Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.027867Z", "completed_at": "2024-08-26T20:48:49.027872Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.002904653549194336, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.brand_data_postgres", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.029431Z", "completed_at": "2024-08-26T20:48:49.030515Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.031001Z", "completed_at": "2024-08-26T20:48:49.031005Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.002699613571166992, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.daylight_time_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.032981Z", "completed_at": "2024-08-26T20:48:49.034798Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.035635Z", "completed_at": "2024-08-26T20:48:49.035641Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.003895998001098633, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.domain_name_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.750974Z", "completed_at": "2024-08-26T20:48:49.008444Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.008998Z", "completed_at": "2024-08-26T20:48:49.009004Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.28633785247802734, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__time_zone_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nselect \"time_zone\",\n \"_fivetran_synced\",\n \"standard_offset\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"time_zone_data\" as time_zone_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.038859Z", "completed_at": "2024-08-26T20:48:49.040106Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.042406Z", "completed_at": "2024-08-26T20:48:49.042411Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.0055501461029052734, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.group_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.040439Z", "completed_at": "2024-08-26T20:48:49.041607Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.042656Z", "completed_at": "2024-08-26T20:48:49.042658Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.005635976791381836, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.organization_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.045780Z", "completed_at": "2024-08-26T20:48:49.047034Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.049412Z", "completed_at": "2024-08-26T20:48:49.049418Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.005632877349853516, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.organization_tag_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.047427Z", "completed_at": "2024-08-26T20:48:49.048491Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.049682Z", "completed_at": "2024-08-26T20:48:49.049685Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.005650997161865234, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.schedule_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.052646Z", "completed_at": "2024-08-26T20:48:49.054484Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.056745Z", "completed_at": "2024-08-26T20:48:49.056750Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.006268024444580078, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.schedule_holiday_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.054859Z", "completed_at": "2024-08-26T20:48:49.055923Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.057140Z", "completed_at": "2024-08-26T20:48:49.057148Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0064160823822021484, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_comment_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.060785Z", "completed_at": "2024-08-26T20:48:49.061949Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.064412Z", "completed_at": "2024-08-26T20:48:49.064421Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.006070852279663086, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.062323Z", "completed_at": "2024-08-26T20:48:49.063468Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.064751Z", "completed_at": "2024-08-26T20:48:49.064756Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.006165981292724609, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_field_history_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.068100Z", "completed_at": "2024-08-26T20:48:49.069839Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.071965Z", "completed_at": "2024-08-26T20:48:49.071970Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.005828857421875, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_form_history_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.070153Z", "completed_at": "2024-08-26T20:48:49.071181Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.072231Z", "completed_at": "2024-08-26T20:48:49.072234Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.00593113899230957, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_schedule_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.075164Z", "completed_at": "2024-08-26T20:48:49.076297Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.078344Z", "completed_at": "2024-08-26T20:48:49.078349Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.004988908767700195, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.ticket_tag_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.076615Z", "completed_at": "2024-08-26T20:48:49.077641Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.078595Z", "completed_at": "2024-08-26T20:48:49.078598Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.005143165588378906, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.time_zone_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.081733Z", "completed_at": "2024-08-26T20:48:49.082884Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.085682Z", "completed_at": "2024-08-26T20:48:49.085687Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.005883932113647461, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.user_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.083231Z", "completed_at": "2024-08-26T20:48:49.084906Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.085932Z", "completed_at": "2024-08-26T20:48:49.085934Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.005975961685180664, "adapter_response": {}, "message": null, "failures": null, "unique_id": "seed.zendesk_integration_tests.user_tag_data", "compiled": null, "compiled_code": null, "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:48.975701Z", "completed_at": "2024-08-26T20:48:49.205718Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.206265Z", "completed_at": "2024-08-26T20:48:49.206272Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.2544209957122803, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user_tag_tmp", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nselect \"tag\",\n \"user_id\",\n \"_fivetran_synced\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"user_tag_data\" as user_tag_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tag_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.015047Z", "completed_at": "2024-08-26T20:48:49.260517Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.261251Z", "completed_at": "2024-08-26T20:48:49.261258Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.2720072269439697, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user_tmp", "compiled": true, "compiled_code": "select \"id\",\n \"_fivetran_synced\",\n \"active\",\n \"alias\",\n \"authenticity_token\",\n \"chat_only\",\n \"created_at\",\n \"details\",\n \"email\",\n \"external_id\",\n \"last_login_at\",\n \"locale\",\n \"locale_id\",\n \"moderator\",\n \"name\",\n \"notes\",\n \"only_private_comments\",\n \"organization_id\",\n \"phone\",\n \"remote_photo_url\",\n \"restricted_agent\",\n \"role\",\n \"shared\",\n \"shared_agent\",\n \"signature\",\n \"suspended\",\n \"ticket_restriction\",\n \"time_zone\",\n \"two_factor_auth_enabled\",\n \"updated_at\",\n \"url\",\n \"verified\" \nfrom \"postgres\".\"zendesk_integration_tests_55\".\"user_data\" as user_table", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tmp\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.088956Z", "completed_at": "2024-08-26T20:48:49.326228Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.326743Z", "completed_at": "2024-08-26T20:48:49.326749Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.2617301940917969, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__domain_name", "compiled": true, "compiled_code": "--To disable this model, set the using_domain_names variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__domain_name_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n domain_name\n \n as \n \n domain_name\n \n, \n \n \n index\n \n as \n \n index\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n domain_name,\n index\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__domain_name\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.093338Z", "completed_at": "2024-08-26T20:48:49.342915Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.343475Z", "completed_at": "2024-08-26T20:48:49.343481Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.28156495094299316, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__brand", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n brand_url\n \n as \n \n brand_url\n \n, \n \n \n has_help_center\n \n as \n \n has_help_center\n \n, \n \n \n help_center_state\n \n as \n \n help_center_state\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n logo_content_type\n \n as \n \n logo_content_type\n \n, \n \n \n logo_content_url\n \n as \n \n logo_content_url\n \n, \n \n \n logo_deleted\n \n as \n \n logo_deleted\n \n, \n \n \n logo_file_name\n \n as \n \n logo_file_name\n \n, \n \n \n logo_height\n \n as \n \n logo_height\n \n, \n \n \n logo_id\n \n as \n \n logo_id\n \n, \n \n \n logo_inline\n \n as \n \n logo_inline\n \n, \n \n \n logo_mapped_content_url\n \n as \n \n logo_mapped_content_url\n \n, \n \n \n logo_size\n \n as \n \n logo_size\n \n, \n \n \n logo_url\n \n as \n \n logo_url\n \n, \n \n \n logo_width\n \n as \n \n logo_width\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n subdomain\n \n as \n \n subdomain\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as brand_id,\n brand_url,\n name,\n subdomain,\n active as is_active\n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.230901Z", "completed_at": "2024-08-26T20:48:49.467286Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.467814Z", "completed_at": "2024-08-26T20:48:49.467820Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.26126694679260254, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__daylight_time", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__daylight_time_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n daylight_end_utc\n \n as \n \n daylight_end_utc\n \n, \n \n \n daylight_offset\n \n as \n \n daylight_offset\n \n, \n \n \n daylight_start_utc\n \n as \n \n daylight_start_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n year\n \n as \n \n year\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n daylight_end_utc,\n daylight_offset,\n daylight_start_utc,\n time_zone,\n year,\n daylight_offset * 60 as daylight_offset_minutes\n \n from fields\n)\n\nselect * from final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__daylight_time\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.287217Z", "completed_at": "2024-08-26T20:48:49.534673Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.535208Z", "completed_at": "2024-08-26T20:48:49.535216Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.2745029926300049, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization_tag", "compiled": true, "compiled_code": "--To disable this model, set the using_organization_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n organization_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tag\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.351189Z", "completed_at": "2024-08-26T20:48:49.579317Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.579854Z", "completed_at": "2024-08-26T20:48:49.579860Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.28150010108947754, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__group", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as group_id,\n name\n from fields\n \n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.370827Z", "completed_at": "2024-08-26T20:48:49.632807Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.637942Z", "completed_at": "2024-08-26T20:48:49.637948Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.2908198833465576, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__organization", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n shared_comments\n \n as \n \n shared_comments\n \n, \n \n \n shared_tickets\n \n as \n \n shared_tickets\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as organization_id,\n created_at,\n updated_at,\n details,\n name,\n external_id\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.493555Z", "completed_at": "2024-08-26T20:48:49.722099Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.722658Z", "completed_at": "2024-08-26T20:48:49.722664Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.2533721923828125, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule_holiday", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday_tmp\"\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n end_date\n \n as \n \n end_date\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n start_date\n \n as \n \n start_date\n \n\n\n\n from base\n),\n\nfinal as (\n \n select\n _fivetran_deleted,\n cast(_fivetran_synced as timestamp ) as _fivetran_synced,\n cast(end_date as timestamp ) as holiday_end_date_at,\n cast(id as TEXT ) as holiday_id,\n name as holiday_name,\n cast(schedule_id as TEXT ) as schedule_id,\n cast(start_date as timestamp ) as holiday_start_date_at\n from fields\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.562322Z", "completed_at": "2024-08-26T20:48:49.821582Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.822122Z", "completed_at": "2024-08-26T20:48:49.822130Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.2846720218658447, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_comment", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n body\n \n as \n \n body\n \n, \n cast(null as integer) as \n \n call_duration\n \n , \n cast(null as integer) as \n \n call_id\n \n , \n \n \n created\n \n as \n \n created\n \n, \n \n \n facebook_comment\n \n as \n \n facebook_comment\n \n, \n \n \n id\n \n as \n \n id\n \n, \n cast(null as integer) as \n \n location\n \n , \n \n \n public\n \n as \n \n public\n \n, \n cast(null as integer) as \n \n recording_url\n \n , \n cast(null as timestamp) as \n \n started_at\n \n , \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n cast(null as integer) as \n \n transcription_status\n \n , \n cast(null as integer) as \n \n transcription_text\n \n , \n cast(null as integer) as \n \n trusted\n \n , \n \n \n tweet\n \n as \n \n tweet\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n voice_comment\n \n as \n \n voice_comment\n \n, \n cast(null as integer) as \n \n voice_comment_transcription_visible\n \n \n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_comment_id,\n _fivetran_synced,\n body,\n cast(created as timestamp) as created_at,\n public as is_public,\n ticket_id,\n user_id,\n facebook_comment as is_facebook_comment,\n tweet as is_tweet,\n voice_comment as is_voice_comment\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.633777Z", "completed_at": "2024-08-26T20:48:49.858803Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.859327Z", "completed_at": "2024-08-26T20:48:49.859332Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.24948716163635254, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__schedule", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n end_time\n \n as \n \n end_time\n \n, \n \n \n end_time_utc\n \n as \n \n end_time_utc\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n start_time\n \n as \n \n start_time\n \n, \n \n \n start_time_utc\n \n as \n \n start_time_utc\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n cast(id as TEXT) as schedule_id, --need to convert from numeric to string for downstream models to work properly\n end_time,\n start_time,\n name as schedule_name,\n created_at,\n time_zone\n \n from fields\n where not coalesce(_fivetran_deleted, false)\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.662403Z", "completed_at": "2024-08-26T20:48:49.904828Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.905427Z", "completed_at": "2024-08-26T20:48:49.905434Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.2668118476867676, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_form_history", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_form_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_deleted\n \n as \n \n _fivetran_deleted\n \n, \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n display_name\n \n as \n \n display_name\n \n, \n \n \n end_user_visible\n \n as \n \n end_user_visible\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_form_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n display_name,\n active as is_active,\n name\n from fields\n where not coalesce(_fivetran_deleted, false)\n \n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_form_history\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.747113Z", "completed_at": "2024-08-26T20:48:49.987437Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:49.988175Z", "completed_at": "2024-08-26T20:48:49.988182Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.26736998558044434, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_field_history", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n field_name\n \n as \n \n field_name\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n updated\n \n as \n \n updated\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n value\n \n as \n \n value\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n field_name,\n cast(updated as timestamp) as valid_starting_at,\n cast(lead(updated) over (partition by ticket_id, field_name order by updated) as timestamp) as valid_ending_at,\n value,\n user_id\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.847579Z", "completed_at": "2024-08-26T20:48:50.100185Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.100749Z", "completed_at": "2024-08-26T20:48:50.100757Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.27952003479003906, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_schedule", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_schedule_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n schedule_id\n \n as \n \n schedule_id\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n cast(created_at as timestamp) as created_at,\n cast(schedule_id as TEXT) as schedule_id --need to convert from numeric to string for downstream models to work properly\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_schedule\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.883117Z", "completed_at": "2024-08-26T20:48:50.173723Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.174244Z", "completed_at": "2024-08-26T20:48:50.174251Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.31447410583496094, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket_tag", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n ticket_id\n \n as \n \n ticket_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n ticket_id,\n \n tag as tags\n \n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tag\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:49.929768Z", "completed_at": "2024-08-26T20:48:50.226156Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.226674Z", "completed_at": "2024-08-26T20:48:50.226680Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.32375216484069824, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__ticket", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n allow_channelback\n \n as \n \n allow_channelback\n \n, \n \n \n assignee_id\n \n as \n \n assignee_id\n \n, \n \n \n brand_id\n \n as \n \n brand_id\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n description\n \n as \n \n description\n \n, \n \n \n due_at\n \n as \n \n due_at\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n forum_topic_id\n \n as \n \n forum_topic_id\n \n, \n \n \n group_id\n \n as \n \n group_id\n \n, \n \n \n has_incidents\n \n as \n \n has_incidents\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n is_public\n \n as \n \n is_public\n \n, \n \n \n merged_ticket_ids\n \n as \n \n merged_ticket_ids\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n priority\n \n as \n \n priority\n \n, \n \n \n problem_id\n \n as \n \n problem_id\n \n, \n \n \n recipient\n \n as \n \n recipient\n \n, \n \n \n requester_id\n \n as \n \n requester_id\n \n, \n \n \n status\n \n as \n \n status\n \n, \n \n \n subject\n \n as \n \n subject\n \n, \n \n \n submitter_id\n \n as \n \n submitter_id\n \n, \n cast(null as integer) as \n \n system_ccs\n \n , \n \n \n system_client\n \n as \n \n system_client\n \n, \n cast(null as TEXT) as \n \n system_ip_address\n \n , \n cast(null as integer) as \n \n system_json_email_identifier\n \n , \n cast(null as float) as \n \n system_latitude\n \n , \n cast(null as TEXT) as \n \n system_location\n \n , \n cast(null as float) as \n \n system_longitude\n \n , \n cast(null as integer) as \n \n system_machine_generated\n \n , \n cast(null as integer) as \n \n system_message_id\n \n , \n cast(null as integer) as \n \n system_raw_email_identifier\n \n , \n \n \n ticket_form_id\n \n as \n \n ticket_form_id\n \n, \n \n \n type\n \n as \n \n type\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n via_channel\n \n as \n \n via_channel\n \n, \n \n \n via_source_from_address\n \n as \n \n via_source_from_address\n \n, \n \n \n via_source_from_id\n \n as \n \n via_source_from_id\n \n, \n \n \n via_source_from_title\n \n as \n \n via_source_from_title\n \n, \n \n \n via_source_rel\n \n as \n \n via_source_rel\n \n, \n \n \n via_source_to_address\n \n as \n \n via_source_to_address\n \n, \n \n \n via_source_to_name\n \n as \n \n via_source_to_name\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n id as ticket_id,\n _fivetran_synced,\n assignee_id,\n brand_id,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n description,\n due_at,\n group_id,\n external_id,\n is_public,\n organization_id,\n priority,\n recipient,\n requester_id,\n status,\n subject,\n problem_id,\n submitter_id,\n ticket_form_id,\n type,\n url,\n via_channel as created_channel,\n via_source_from_id as source_from_id,\n via_source_from_title as source_from_title,\n via_source_rel as source_rel,\n via_source_to_address as source_to_address,\n via_source_to_name as source_to_name\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.254419Z", "completed_at": "2024-08-26T20:48:50.267581Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.268645Z", "completed_at": "2024-08-26T20:48:50.268648Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.015552043914794922, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__domain_name_organization_id.a2b5ff8fd3", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__domain_name\"\nwhere organization_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.270542Z", "completed_at": "2024-08-26T20:48:50.273652Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.274173Z", "completed_at": "2024-08-26T20:48:50.274178Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.004628896713256836, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__brand_brand_id.a2419e1741", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect brand_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.275760Z", "completed_at": "2024-08-26T20:48:50.280373Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.280956Z", "completed_at": "2024-08-26T20:48:50.280961Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.006227970123291016, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__brand_brand_id.fdf8e23c9e", "compiled": true, "compiled_code": "\n \n \n\nselect\n brand_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand\"\nwhere brand_id is not null\ngroup by brand_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.282568Z", "completed_at": "2024-08-26T20:48:50.288547Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.289084Z", "completed_at": "2024-08-26T20:48:50.289089Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.00752711296081543, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.dbt_utils_unique_combination_of_columns_stg_zendesk__daylight_time_time_zone__year.88227aef3d", "compiled": true, "compiled_code": "\n\n\n\n\n\nwith validation_errors as (\n\n select\n time_zone, year\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__daylight_time\"\n group by time_zone, year\n having count(*) > 1\n\n)\n\nselect *\nfrom validation_errors\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.015035Z", "completed_at": "2024-08-26T20:48:50.267257Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.268367Z", "completed_at": "2024-08-26T20:48:50.268373Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.2769589424133301, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__time_zone", "compiled": true, "compiled_code": "--To disable this model, set the using_schedules variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone_tmp\"\n\n),\n\nfields as (\n\n select\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n standard_offset\n \n as \n \n standard_offset\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n standard_offset,\n time_zone,\n -- the standard_offset is a string written as [+/-]HH:MM\n -- let's convert it to an integer value of minutes\n cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 1\n )\n\n\n \n\n as integer ) * 60 +\n (cast( \n\n \n \n\n split_part(\n standard_offset,\n ':',\n 2\n )\n\n\n \n\n as integer ) *\n (case when standard_offset like '-%' then -1 else 1 end) ) as standard_offset_minutes\n \n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.291751Z", "completed_at": "2024-08-26T20:48:50.298381Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.298990Z", "completed_at": "2024-08-26T20:48:50.298994Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.008928060531616211, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__group_group_id.7659ed83ec", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect group_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.295740Z", "completed_at": "2024-08-26T20:48:50.299745Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.300996Z", "completed_at": "2024-08-26T20:48:50.301001Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.0066030025482177734, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__group_group_id.f0658dabcd", "compiled": true, "compiled_code": "\n \n \n\nselect\n group_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group\"\nwhere group_id is not null\ngroup by group_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.301994Z", "completed_at": "2024-08-26T20:48:50.311371Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.311951Z", "completed_at": "2024-08-26T20:48:50.311957Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.011796951293945312, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__organization_aggregates", "compiled": true, "compiled_code": "with organizations as (\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization\"\n\n--If you use organization tags this will be included, if not it will be ignored.\n\n), organization_tags as (\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization_tag\"\n\n), tag_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(organization_tags.tags, ', ')\n\n as organization_tags\n from organizations\n\n left join organization_tags\n using (organization_id)\n\n group by 1\n\n\n--If you use using_domain_names tags this will be included, if not it will be ignored.\n\n), domain_names as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__domain_name\"\n\n), domain_aggregates as (\n select\n organizations.organization_id,\n \n string_agg(domain_names.domain_name, ', ')\n\n as domain_names\n from organizations\n\n left join domain_names\n using(organization_id)\n \n group by 1\n\n\n\n), final as (\n select\n organizations.*\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,tag_aggregates.organization_tags\n \n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,domain_aggregates.domain_names\n \n\n from organizations\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n left join domain_aggregates\n using(organization_id)\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n left join tag_aggregates\n using(organization_id)\n \n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__organization_aggregates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.307916Z", "completed_at": "2024-08-26T20:48:50.312699Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.313918Z", "completed_at": "2024-08-26T20:48:50.313923Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.007318019866943359, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__organization_organization_id.de7b98c06a", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect organization_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.314888Z", "completed_at": "2024-08-26T20:48:50.321269Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.321905Z", "completed_at": "2024-08-26T20:48:50.321911Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.008888721466064453, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__organization_organization_id.152be1ab31", "compiled": true, "compiled_code": "\n \n \n\nselect\n organization_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization\"\nwhere organization_id is not null\ngroup by organization_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.318606Z", "completed_at": "2024-08-26T20:48:50.322693Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.323889Z", "completed_at": "2024-08-26T20:48:50.323893Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.006762981414794922, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__schedule_holiday_holiday_id.52eb08f782", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect holiday_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.325045Z", "completed_at": "2024-08-26T20:48:50.332146Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.332771Z", "completed_at": "2024-08-26T20:48:50.332776Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.009713888168334961, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__schedule_holiday_holiday_id.0341d5635a", "compiled": true, "compiled_code": "\n \n \n\nselect\n holiday_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday\"\nwhere holiday_id is not null\ngroup by holiday_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.328683Z", "completed_at": "2024-08-26T20:48:50.333519Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.334611Z", "completed_at": "2024-08-26T20:48:50.334615Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.007200002670288086, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_comment_ticket_comment_id.b821f4a606", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_comment_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.335533Z", "completed_at": "2024-08-26T20:48:50.341756Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.342348Z", "completed_at": "2024-08-26T20:48:50.342354Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.008540153503417969, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_comment_ticket_comment_id.ba353330cd", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_comment_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment\"\nwhere ticket_comment_id is not null\ngroup by ticket_comment_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.339447Z", "completed_at": "2024-08-26T20:48:50.343120Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.344351Z", "completed_at": "2024-08-26T20:48:50.344356Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.006310939788818359, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__latest_ticket_form", "compiled": true, "compiled_code": "--To disable this model, set the using_ticket_form_history variable within your dbt_project.yml file to False.\n\n\nwith ticket_form_history as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\n),\n\nlatest_ticket_form as (\n select\n *,\n row_number() over(partition by ticket_form_id order by updated_at desc) as latest_form_index\n from ticket_form_history\n),\n\nfinal as (\n select \n ticket_form_id,\n created_at,\n updated_at,\n display_name,\n is_active,\n name,\n latest_form_index\n from latest_ticket_form\n\n where latest_form_index = 1\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__latest_ticket_form\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.345410Z", "completed_at": "2024-08-26T20:48:50.350018Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.367907Z", "completed_at": "2024-08-26T20:48:50.367918Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.024461746215820312, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_form_history_ticket_form_id.1afe781a17", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_form_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_form_history\"\nwhere ticket_form_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.350316Z", "completed_at": "2024-08-26T20:48:50.376212Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.376730Z", "completed_at": "2024-08-26T20:48:50.376736Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.02802109718322754, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__field_calendar_spine", "compiled": true, "compiled_code": "\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1663\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), calendar as (\n\n select *\n from __dbt__cte__int_zendesk__calendar_spine\n \n where date_day >= (select max(date_day) from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_calendar_spine\")\n \n\n), ticket as (\n\n select \n *,\n -- closed tickets cannot be re-opened or updated, and solved tickets are automatically closed after a pre-defined number of days configured in your Zendesk settings\n cast( date_trunc('day', case when status != 'closed' then \n current_timestamp::timestamp\n else updated_at end) as date) as open_until\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n \n), joined as (\n\n select \n calendar.date_day,\n ticket.ticket_id\n from calendar\n inner join ticket\n on calendar.date_day >= cast(ticket.created_at as date)\n -- use this variable to extend the ticket's history past its close date (for reporting/data viz purposes :-)\n and \n\n ticket.open_until + ((interval '1 month') * (0))\n\n >= calendar.date_day\n\n), surrogate_key as (\n\n select\n *,\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from joined\n\n)\n\nselect *\nfrom surrogate_key", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_calendar_spine\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.373302Z", "completed_at": "2024-08-26T20:48:50.382274Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.387583Z", "completed_at": "2024-08-26T20:48:50.387591Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.01577901840209961, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_aggregates", "compiled": true, "compiled_code": "with tickets as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_tags as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_tag\"\n\n), brands as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand\"\n \n), ticket_tag_aggregate as (\n select\n ticket_tags.ticket_id,\n \n string_agg(ticket_tags.tags, ', ')\n\n as ticket_tags\n from ticket_tags\n group by 1\n\n), final as (\n select \n tickets.*,\n case when lower(tickets.type) = 'incident'\n then true\n else false\n end as is_incident,\n brands.name as ticket_brand_name,\n ticket_tag_aggregate.ticket_tags\n from tickets\n\n left join ticket_tag_aggregate\n using(ticket_id)\n\n left join brands\n on brands.brand_id = tickets.brand_id\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_aggregates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.390495Z", "completed_at": "2024-08-26T20:48:50.393554Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.394057Z", "completed_at": "2024-08-26T20:48:50.394062Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.005106925964355469, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__updates", "compiled": true, "compiled_code": "with ticket_history as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), ticket_comment as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_comment\"\n\n), tickets as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), updates_union as (\n select \n ticket_id,\n field_name,\n value,\n null as is_public,\n user_id,\n valid_starting_at,\n valid_ending_at\n from ticket_history\n\n union all\n\n select\n ticket_id,\n cast('comment' as TEXT) as field_name,\n body as value,\n is_public,\n user_id,\n created_at as valid_starting_at,\n lead(created_at) over (partition by ticket_id order by created_at) as valid_ending_at\n from ticket_comment\n\n), final as (\n select\n updates_union.*,\n tickets.created_at as ticket_created_date\n from updates_union\n\n left join tickets\n on tickets.ticket_id = updates_union.ticket_id\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.395872Z", "completed_at": "2024-08-26T20:48:50.399717Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.400242Z", "completed_at": "2024-08-26T20:48:50.400250Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.005500078201293945, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__ticket_ticket_id.a8229e6981", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.401871Z", "completed_at": "2024-08-26T20:48:50.405405Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.406000Z", "completed_at": "2024-08-26T20:48:50.406004Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.005134105682373047, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__ticket_ticket_id.4be7124521", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.127736Z", "completed_at": "2024-08-26T20:48:50.388962Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.390228Z", "completed_at": "2024-08-26T20:48:50.390232Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.36815834045410156, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user_tag", "compiled": true, "compiled_code": "--To disable this model, set the using_user_tags variable within your dbt_project.yml file to False.\n\n\nwith base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tag_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n user_id\n \n as \n \n user_id\n \n, \n \n \n tag\n \n as \n \n tag\n \n\n\n\n \n from base\n),\n\nfinal as (\n \n select \n user_id,\n \n tag\n \n as tags\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tag\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.407781Z", "completed_at": "2024-08-26T20:48:50.539554Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.541063Z", "completed_at": "2024-08-26T20:48:50.541066Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.13493704795837402, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__schedule_spine", "compiled": true, "compiled_code": "\n\n/*\n The purpose of this model is to create a spine of appropriate timezone offsets to use for schedules, as offsets may change due to Daylight Savings.\n End result will include `valid_from` and `valid_until` columns which we will use downstream to determine which schedule-offset to associate with each ticket (ie standard time vs daylight time)\n*/\n\nwith __dbt__cte__int_zendesk__calendar_spine as (\n-- depends_on: \"postgres\".\"zendesk_integration_tests_55\".\"ticket_data\"\n\nwith spine as (\n\n \n \n \n\n \n \n \n \n\n \n\n \n\n\n\n\n\n\n\n\nwith rawdata as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n + \n \n p8.generated_number * power(2, 8)\n + \n \n p9.generated_number * power(2, 9)\n + \n \n p10.generated_number * power(2, 10)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n cross join \n \n p as p8\n cross join \n \n p as p9\n cross join \n \n p as p10\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 1663\n order by generated_number\n\n\n\n),\n\nall_periods as (\n\n select (\n \n\n cast('2020-02-13' as date) + ((interval '1 day') * (row_number() over (order by 1) - 1))\n\n\n ) as date_day\n from rawdata\n\n),\n\nfiltered as (\n\n select *\n from all_periods\n where date_day <= \n\n current_date + ((interval '1 week') * (1))\n\n\n\n)\n\nselect * from filtered\n\n\n\n), recast as (\n\n select cast(date_day as date) as date_day\n from spine\n\n)\n\nselect *\nfrom recast\n), timezone as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone\"\n\n), daylight_time as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__daylight_time\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule\" \n\n-- in the below CTE we want to explode out each holiday period into individual days, to prevent potential fanouts downstream in joins to schedules.\n), schedule_holiday as ( \n\n select\n _fivetran_synced,\n cast(date_day as timestamp ) as holiday_start_date_at, -- For each day within a holiday we want to give it its own record. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n cast(date_day as timestamp ) as holiday_end_date_at, -- Since each day within a holiday now gets its own record, the end_date will then be the same day as the start_date. In the later CTE holiday_start_end_times, we transform these timestamps into minutes-from-beginning-of-the-week.\n holiday_id,\n holiday_name,\n schedule_id\n\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule_holiday\" \n inner join __dbt__cte__int_zendesk__calendar_spine \n on holiday_start_date_at <= cast(date_day as timestamp )\n and holiday_end_date_at >= cast(date_day as timestamp )\n\n), timezone_with_dt as (\n\n select \n timezone.*,\n daylight_time.daylight_start_utc,\n daylight_time.daylight_end_utc,\n daylight_time.daylight_offset_minutes\n\n from timezone \n left join daylight_time \n on timezone.time_zone = daylight_time.time_zone\n\n), order_timezone_dt as (\n\n select \n *,\n -- will be null for timezones without any daylight savings records (and the first entry)\n -- we will coalesce the first entry date with .... the X years ago\n lag(daylight_end_utc, 1) over (partition by time_zone order by daylight_end_utc asc) as last_daylight_end_utc,\n -- will be null for timezones without any daylight savings records (and the last entry)\n -- we will coalesce the last entry date with the current date \n lead(daylight_start_utc, 1) over (partition by time_zone order by daylight_start_utc asc) as next_daylight_start_utc\n\n from timezone_with_dt\n\n), split_timezones as (\n\n -- standard schedule (includes timezones without DT)\n -- starts: when the last Daylight Savings ended\n -- ends: when the next Daylight Savings starts\n select \n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- last_daylight_end_utc is null for the first record of the time_zone's daylight time, or if the TZ doesn't use DT\n coalesce(last_daylight_end_utc, cast('1970-01-01' as date)) as valid_from,\n\n -- daylight_start_utc is null for timezones that don't use DT\n coalesce(daylight_start_utc, cast( \n\n \n current_timestamp::timestamp\n + ((interval '1 year') * (1))\n\n as date)) as valid_until\n\n from order_timezone_dt\n\n union all \n\n -- DT schedule (excludes timezones without it)\n -- starts: when this Daylight Savings started\n -- ends: when this Daylight Savings ends\n select \n time_zone,\n -- Pacific Time is -8h during standard time and -7h during DT\n standard_offset_minutes + daylight_offset_minutes as offset_minutes,\n daylight_start_utc as valid_from,\n daylight_end_utc as valid_until\n\n from order_timezone_dt\n where daylight_offset_minutes is not null\n\n union all\n\n select\n time_zone,\n standard_offset_minutes as offset_minutes,\n\n -- Get the latest daylight_end_utc time and set that as the valid_from\n max(daylight_end_utc) as valid_from,\n\n -- If the latest_daylight_end_time_utc is less than todays timestamp, that means DST has ended. Therefore, we will make the valid_until in the future.\n cast( \n\n \n current_timestamp::timestamp\n + ((interval '1 year') * (1))\n\n as date) as valid_until\n\n from order_timezone_dt\n group by 1, 2\n -- We only want to apply this logic to time_zone's that had daylight saving time and it ended at a point. For example, Hong Kong ended DST in 1979.\n having cast(max(daylight_end_utc) as date) < cast(\n current_timestamp::timestamp\n as date)\n\n), calculate_schedules as (\n\n select \n schedule.schedule_id,\n schedule.time_zone,\n schedule.start_time,\n schedule.end_time,\n schedule.created_at,\n schedule.schedule_name,\n schedule.start_time - coalesce(split_timezones.offset_minutes, 0) as start_time_utc,\n schedule.end_time - coalesce(split_timezones.offset_minutes, 0) as end_time_utc,\n coalesce(split_timezones.offset_minutes, 0) as offset_minutes_to_add,\n -- we'll use these to determine which schedule version to associate tickets with\n cast(split_timezones.valid_from as timestamp) as valid_from,\n cast(split_timezones.valid_until as timestamp) as valid_until\n\n from schedule\n left join split_timezones\n on split_timezones.time_zone = schedule.time_zone\n\n-- Now we need take holiday's into consideration and perform the following transformations to account for Holidays in existing schedules\n), holiday_start_end_times as (\n\n select\n calculate_schedules.*,\n schedule_holiday.holiday_name,\n schedule_holiday.holiday_start_date_at,\n cast(\n\n schedule_holiday.holiday_end_date_at + ((interval '1 second') * (86400))\n\n as timestamp) as holiday_end_date_at, -- add 24*60*60 seconds\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_holiday.holiday_start_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) as holiday_week_start,\n cast(cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n schedule_holiday.holiday_end_date_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as timestamp) as holiday_week_end\n from schedule_holiday\n inner join calculate_schedules\n on calculate_schedules.schedule_id = schedule_holiday.schedule_id\n and schedule_holiday.holiday_start_date_at >= calculate_schedules.valid_from \n and schedule_holiday.holiday_start_date_at < calculate_schedules.valid_until\n\n-- Let's calculate the start and end date of the Holiday in terms of minutes from Sunday (like other Zendesk schedules)\n), holiday_minutes as(\n\n select\n holiday_start_end_times.*,\n \n (\n (\n ((holiday_start_date_at)::date - (holiday_week_start)::date)\n * 24 + date_part('hour', (holiday_start_date_at)::timestamp) - date_part('hour', (holiday_week_start)::timestamp))\n * 60 + date_part('minute', (holiday_start_date_at)::timestamp) - date_part('minute', (holiday_week_start)::timestamp))\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_start,\n \n (\n (\n ((holiday_end_date_at)::date - (holiday_week_start)::date)\n * 24 + date_part('hour', (holiday_end_date_at)::timestamp) - date_part('hour', (holiday_week_start)::timestamp))\n * 60 + date_part('minute', (holiday_end_date_at)::timestamp) - date_part('minute', (holiday_week_start)::timestamp))\n - coalesce(timezone.standard_offset_minutes, 0) as minutes_from_sunday_end\n from holiday_start_end_times\n left join timezone\n on timezone.time_zone = holiday_start_end_times.time_zone\n\n-- Determine which schedule days include a holiday\n), holiday_check as (\n\n select\n *,\n case when minutes_from_sunday_start < start_time_utc and minutes_from_sunday_end > end_time_utc \n then holiday_name \n end as holiday_name_check\n from holiday_minutes\n\n-- Consolidate the holiday records that were just created\n), holiday_consolidated as (\n\n select \n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n cast(\n\n holiday_week_end + ((interval '1 second') * (86400))\n\n as timestamp) as holiday_week_end,\n max(holiday_name_check) as holiday_name_check\n from holiday_check\n group by 1,2,3,4,5,6,7,8,9\n\n-- Since we have holiday schedules and normal schedules, we need to union them into a holistic schedule spine\n), spine_union as (\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n holiday_week_start,\n holiday_week_end,\n holiday_name_check\n from holiday_consolidated\n\n union all\n\n select\n schedule_id, \n time_zone, \n schedule_name, \n valid_from, \n valid_until, \n start_time_utc, \n end_time_utc, \n null as holiday_week_start,\n null as holiday_week_end,\n null as holiday_name_check\n from calculate_schedules\n\n-- Now that we have an understanding of which weeks are holiday's let's consolidate them with non holiday weeks\n), all_periods as (\n\n select distinct\n schedule_id,\n holiday_week_start as period_start,\n holiday_week_end as period_end,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n true as is_holiday_week\n from spine_union\n where holiday_week_start is not null\n and holiday_week_end is not null\n\n union all\n\n select distinct\n schedule_id,\n valid_from as period_start,\n valid_until as period_end,\n start_time_utc,\n end_time_utc,\n cast(null as TEXT) as holiday_name_check,\n false as is_holiday_week\n from spine_union\n\n-- We have holiday and non holiday schedules together, now let's sort them to understand the previous end and next start of neighboring schedules\n), sorted_periods as (\n\n select distinct\n *,\n lag(period_end) over (partition by schedule_id order by period_start, start_time_utc) as prev_end,\n lead(period_start) over (partition by schedule_id order by period_start, start_time_utc) as next_start\n from all_periods\n\n-- We need to adjust some non holiday schedules in order to properly fill holiday gaps in the schedules later down the transformation\n), non_holiday_period_adjustments as (\n\n select\n schedule_id, \n period_start, \n period_end,\n prev_end,\n next_start,\n -- taking first_value/last_value because prev_end and next_start are inconsistent within the schedule partitions -- they all include a record that is outside the partition. so we need to ignore those erroneous records that slip in\n coalesce(greatest(case \n when not is_holiday_week and prev_end is not null then first_value(prev_end) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_start\n end, period_start), period_start) as valid_from,\n coalesce(case \n when not is_holiday_week and next_start is not null then last_value(next_start) over (partition by schedule_id, period_start order by start_time_utc rows between unbounded preceding and unbounded following)\n else period_end\n end, period_end) as valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from sorted_periods\n\n-- A few window function results will be leveraged downstream. Let's generate them now.\n), gap_starter as (\n select \n *,\n max(period_end) over (partition by schedule_id) as max_valid_until,\n last_value(next_start) over (partition by schedule_id, period_start order by valid_until rows between unbounded preceding and unbounded following) as lead_next_start,\n first_value(prev_end) over (partition by schedule_id, valid_from order by start_time_utc rows between unbounded preceding and unbounded following) as first_prev_end\n from non_holiday_period_adjustments\n\n-- There may be gaps in holiday and non holiday schedules, so we need to identify where these gaps are\n), gap_adjustments as(\n\n select \n *,\n -- In order to identify the gaps we check to see if the valid_from and previous valid_until are right next to one. If we add two hours to the previous valid_until it should always be greater than the current valid_from.\n -- However, if the valid_from is greater instead then we can identify that this period has a gap that needs to be filled.\n case \n when cast(\n\n valid_until + ((interval '1 hour') * (2))\n\n as timestamp) < cast(lead_next_start as timestamp)\n then 'gap'\n when (lead_next_start is null and valid_from < max_valid_until and period_end != max_valid_until)\n then 'gap'\n else null\n end as is_schedule_gap\n\n from gap_starter\n\n-- We know where the gaps are, so now lets prime the data to fill those gaps\n), schedule_spine_primer as (\n\n select \n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n lead_next_start,\n max_valid_until,\n holiday_name_check,\n is_holiday_week,\n max(is_schedule_gap) over (partition by schedule_id, valid_until) as is_gap_period,\n lead(valid_from) over (partition by schedule_id order by valid_from, start_time_utc) as fill_primer\n from gap_adjustments\n\n-- We know the gaps and where they are, so let's fill them with the following union\n), final_union as (\n\n -- For all gap periods, let's properly create a schedule filled before the holiday.\n select \n schedule_id,\n valid_until as valid_from,\n coalesce(last_value(fill_primer) over (partition by schedule_id, valid_until order by start_time_utc rows between unbounded preceding and unbounded following), max_valid_until) as valid_until,\n start_time_utc, \n end_time_utc, \n cast(null as TEXT) as holiday_name_check,\n false as is_holiday_week\n from schedule_spine_primer\n where is_gap_period is not null\n\n union all\n\n -- Fill all other normal schedules.\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n holiday_name_check,\n is_holiday_week\n from schedule_spine_primer\n\n-- We can finally filter out the holiday_name_check results as the gap filling properly filled in the gaps for holidays\n), final as(\n\n select\n schedule_id, \n valid_from,\n valid_until,\n start_time_utc,\n end_time_utc,\n is_holiday_week\n from final_union\n where holiday_name_check is null\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.535981Z", "completed_at": "2024-08-26T20:48:50.540096Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.541543Z", "completed_at": "2024-08-26T20:48:50.541546Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.007249116897583008, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__time_zone_time_zone.b25b3452b1", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect time_zone\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.544604Z", "completed_at": "2024-08-26T20:48:50.549744Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.550237Z", "completed_at": "2024-08-26T20:48:50.550242Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.0075299739837646484, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__time_zone_time_zone.67995adbaf", "compiled": true, "compiled_code": "\n \n \n\nselect\n time_zone as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__time_zone\"\nwhere time_zone is not null\ngroup by time_zone\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.547530Z", "completed_at": "2024-08-26T20:48:50.550481Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.551935Z", "completed_at": "2024-08-26T20:48:50.551939Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.009047985076904297, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__assignee_updates", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.assignee_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.assignee_id\n\n), final as (\n select \n ticket_id,\n assignee_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__assignee_updates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.553737Z", "completed_at": "2024-08-26T20:48:50.561499Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.562313Z", "completed_at": "2024-08-26T20:48:50.562321Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.01099395751953125, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_updates", "compiled": true, "compiled_code": "with ticket_updates as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_requester as (\n select\n ticket.ticket_id,\n ticket.requester_id,\n ticket_updates.valid_starting_at\n\n from ticket\n\n left join ticket_updates\n on ticket_updates.ticket_id = ticket.ticket_id\n and ticket_updates.user_id = ticket.requester_id\n\n), final as (\n select \n ticket_id,\n requester_id,\n max(valid_starting_at) as last_updated,\n count(*) as total_updates\n from ticket_requester\n\n group by 1, 2\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_updates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.556921Z", "completed_at": "2024-08-26T20:48:50.562648Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.564397Z", "completed_at": "2024-08-26T20:48:50.564402Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.009219169616699219, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__sla_policy_applied", "compiled": true, "compiled_code": "-- step 1, figure out when sla was applied to tickets\n\n-- more on SLA policies here: https://support.zendesk.com/hc/en-us/articles/204770038-Defining-and-using-SLA-policies-Professional-and-Enterprise-\n-- SLA policies are calculated for next_reply_time, first_reply_time, agent_work_time, requester_wait_time. If you're company uses other SLA metrics, and would like this\n-- package to support those, please reach out to the Fivetran team on Slack.\n\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n\n), sla_policy_name as (\n\n select \n *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = ('sla_policy')\n\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n), sla_policy_applied as (\n\n select\n ticket_field_history.ticket_id,\n ticket.created_at as ticket_created_at,\n ticket_field_history.valid_starting_at,\n ticket.status as ticket_current_status,\n ticket_field_history.field_name as metric,\n case when ticket_field_history.field_name = 'first_reply_time' then row_number() over (partition by ticket_field_history.ticket_id, ticket_field_history.field_name order by ticket_field_history.valid_starting_at desc) else 1 end as latest_sla,\n case when ticket_field_history.field_name = 'first_reply_time' then ticket.created_at else ticket_field_history.valid_starting_at end as sla_applied_at,\n cast(\n\n ticket_field_history.value::json #>> '{minutes}'\n\n as integer ) as target,\n \n\n ticket_field_history.value::json #>> '{in_business_hours}'\n\n = 'true' as in_business_hours\n from ticket_field_history\n join ticket\n on ticket.ticket_id = ticket_field_history.ticket_id\n where ticket_field_history.value is not null\n and ticket_field_history.field_name in ('next_reply_time', 'first_reply_time', 'agent_work_time', 'requester_wait_time')\n\n), final as (\n select\n sla_policy_applied.*,\n sla_policy_name.value as sla_policy_name\n from sla_policy_applied\n left join sla_policy_name\n on sla_policy_name.ticket_id = sla_policy_applied.ticket_id\n and sla_policy_applied.valid_starting_at >= sla_policy_name.valid_starting_at\n and sla_policy_applied.valid_starting_at < coalesce(sla_policy_name.valid_ending_at, \n current_timestamp::timestamp\n) \n where sla_policy_applied.latest_sla = 1\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__sla_policy_applied\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.198280Z", "completed_at": "2024-08-26T20:48:50.539227Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.540579Z", "completed_at": "2024-08-26T20:48:50.540589Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.37294578552246094, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk_source.stg_zendesk__user", "compiled": true, "compiled_code": "with base as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tmp\"\n\n),\n\nfields as (\n\n select\n /*\n The below macro is used to generate the correct SQL for package staging models. It takes a list of columns \n that are expected/needed (staging_columns from dbt_zendesk_source/models/tmp/) and compares it with columns \n in the source (source_columns from dbt_zendesk_source/macros/).\n For more information refer to our dbt_fivetran_utils documentation (https://github.com/fivetran/dbt_fivetran_utils.git).\n */\n \n \n \n _fivetran_synced\n \n as \n \n _fivetran_synced\n \n, \n \n \n active\n \n as \n \n active\n \n, \n \n \n alias\n \n as \n \n alias\n \n, \n \n \n authenticity_token\n \n as \n \n authenticity_token\n \n, \n \n \n chat_only\n \n as \n \n chat_only\n \n, \n \n \n created_at\n \n as \n \n created_at\n \n, \n \n \n details\n \n as \n \n details\n \n, \n \n \n email\n \n as \n \n email\n \n, \n \n \n external_id\n \n as \n \n external_id\n \n, \n \n \n id\n \n as \n \n id\n \n, \n \n \n last_login_at\n \n as \n \n last_login_at\n \n, \n \n \n locale\n \n as \n \n locale\n \n, \n \n \n locale_id\n \n as \n \n locale_id\n \n, \n \n \n moderator\n \n as \n \n moderator\n \n, \n \n \n name\n \n as \n \n name\n \n, \n \n \n notes\n \n as \n \n notes\n \n, \n \n \n only_private_comments\n \n as \n \n only_private_comments\n \n, \n \n \n organization_id\n \n as \n \n organization_id\n \n, \n \n \n phone\n \n as \n \n phone\n \n, \n \n \n remote_photo_url\n \n as \n \n remote_photo_url\n \n, \n \n \n restricted_agent\n \n as \n \n restricted_agent\n \n, \n \n \n role\n \n as \n \n role\n \n, \n \n \n shared\n \n as \n \n shared\n \n, \n \n \n shared_agent\n \n as \n \n shared_agent\n \n, \n \n \n signature\n \n as \n \n signature\n \n, \n \n \n suspended\n \n as \n \n suspended\n \n, \n \n \n ticket_restriction\n \n as \n \n ticket_restriction\n \n, \n \n \n time_zone\n \n as \n \n time_zone\n \n, \n \n \n two_factor_auth_enabled\n \n as \n \n two_factor_auth_enabled\n \n, \n \n \n updated_at\n \n as \n \n updated_at\n \n, \n \n \n url\n \n as \n \n url\n \n, \n \n \n verified\n \n as \n \n verified\n \n\n\n\n \n from base\n),\n\nfinal as ( \n \n select \n id as user_id,\n external_id,\n _fivetran_synced,\n cast(last_login_at as timestamp) as last_login_at,\n cast(created_at as timestamp) as created_at,\n cast(updated_at as timestamp) as updated_at,\n email,\n name,\n organization_id,\n phone,\n role,\n ticket_restriction,\n time_zone,\n locale,\n active as is_active,\n suspended as is_suspended\n\n \n\n\n\n\n\n from fields\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.566471Z", "completed_at": "2024-08-26T20:48:50.572122Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.575309Z", "completed_at": "2024-08-26T20:48:50.575315Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.013344049453735352, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_assignee", "compiled": true, "compiled_code": "with assignee_updates as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'assignee_id'\n\n), calculate_metrics as (\n select\n ticket_id,\n field_name as assignee_id,\n value,\n ticket_created_date,\n valid_starting_at,\n lag(valid_starting_at) over (partition by ticket_id order by valid_starting_at) as previous_update,\n lag(value) over (partition by ticket_id order by valid_starting_at) as previous_assignee,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_assignee_id,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_agent_assignment_date,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_assignee_id,\n count(value) over (partition by ticket_id) as assignee_stations_count\n from assignee_updates\n\n), unassigned_time as (\n select\n ticket_id,\n sum(case when assignee_id is not null and previous_assignee is null \n then \n (\n (\n (\n ((valid_starting_at)::date - (coalesce(previous_update, ticket_created_date))::date)\n * 24 + date_part('hour', (valid_starting_at)::timestamp) - date_part('hour', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + date_part('minute', (valid_starting_at)::timestamp) - date_part('minute', (coalesce(previous_update, ticket_created_date))::timestamp))\n * 60 + floor(date_part('second', (valid_starting_at)::timestamp)) - floor(date_part('second', (coalesce(previous_update, ticket_created_date))::timestamp)))\n / 60\n else 0\n end) as ticket_unassigned_duration_calendar_minutes,\n count(distinct value) as unique_assignee_count\n from calculate_metrics\n\n group by 1\n\n), window_group as (\n select\n calculate_metrics.ticket_id,\n calculate_metrics.first_agent_assignment_date,\n calculate_metrics.first_assignee_id,\n calculate_metrics.last_agent_assignment_date,\n calculate_metrics.last_assignee_id,\n calculate_metrics.assignee_stations_count\n from calculate_metrics\n\n group by 1,2,3,4,5,6\n\n), final as (\n select\n window_group.*,\n unassigned_time.unique_assignee_count,\n unassigned_time.ticket_unassigned_duration_calendar_minutes\n from window_group\n\n left join unassigned_time\n using(ticket_id)\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.572823Z", "completed_at": "2024-08-26T20:48:50.578397Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.579995Z", "completed_at": "2024-08-26T20:48:50.580001Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.010161161422729492, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_group", "compiled": true, "compiled_code": "with ticket_group_history as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'group_id'\n\n), group_breakdown as (\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n value as group_id\n from ticket_group_history\n\n), final as (\n select\n ticket_id,\n count(group_id) as group_stations_count\n from group_breakdown\n\n group by 1\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_group\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.575609Z", "completed_at": "2024-08-26T20:48:50.579290Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.581513Z", "completed_at": "2024-08-26T20:48:50.581516Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.014973878860473633, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_satisfaction", "compiled": true, "compiled_code": "with satisfaction_updates as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name in ('satisfaction_score', 'satisfaction_comment', 'satisfaction_reason_code') \n\n), latest_reason as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_reason\n from satisfaction_updates\n\n where field_name = 'satisfaction_reason_code'\n\n), latest_comment as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_comment\n from satisfaction_updates\n\n where field_name = 'satisfaction_comment'\n\n), first_and_latest_score as (\n select\n ticket_id,\n first_value(value) over (partition by ticket_id order by valid_starting_at, ticket_id rows unbounded preceding) as first_satisfaction_score,\n first_value(value) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as latest_satisfaction_score\n from satisfaction_updates\n\n where field_name = 'satisfaction_score' and value != 'offered'\n\n), satisfaction_scores as (\n select\n ticket_id,\n count(value) over (partition by ticket_id) as count_satisfaction_scores,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'good' and value = 'bad'\n then 1\n else 0\n end as good_to_bad_score,\n case when lag(value) over (partition by ticket_id order by valid_starting_at desc) = 'bad' and value = 'good'\n then 1\n else 0\n end as bad_to_good_score\n from satisfaction_updates\n where field_name = 'satisfaction_score'\n\n), score_group as (\n select\n ticket_id,\n count_satisfaction_scores,\n sum(good_to_bad_score) as total_good_to_bad_score,\n sum(bad_to_good_score) as total_bad_to_good_score\n from satisfaction_scores\n\n group by 1, 2\n\n), window_group as (\n select\n satisfaction_updates.ticket_id,\n latest_reason.latest_satisfaction_reason,\n latest_comment.latest_satisfaction_comment,\n first_and_latest_score.first_satisfaction_score,\n first_and_latest_score.latest_satisfaction_score,\n score_group.count_satisfaction_scores,\n score_group.total_good_to_bad_score,\n score_group.total_bad_to_good_score\n\n from satisfaction_updates\n\n left join latest_reason\n on satisfaction_updates.ticket_id = latest_reason.ticket_id\n\n left join latest_comment\n on satisfaction_updates.ticket_id = latest_comment.ticket_id\n\n left join first_and_latest_score\n on satisfaction_updates.ticket_id = first_and_latest_score.ticket_id\n\n left join score_group\n on satisfaction_updates.ticket_id = score_group.ticket_id\n\n group by 1, 2, 3, 4, 5, 6, 7, 8\n\n), final as (\n select\n ticket_id,\n latest_satisfaction_reason,\n latest_satisfaction_comment,\n first_satisfaction_score,\n latest_satisfaction_score,\n case when count_satisfaction_scores > 0\n then (count_satisfaction_scores - 1) --Subtracting one as the first score is always \"offered\".\n else count_satisfaction_scores\n end as count_satisfaction_scores,\n case when total_good_to_bad_score > 0\n then true\n else false\n end as is_good_to_bad_satisfaction_score,\n case when total_bad_to_good_score > 0\n then true\n else false\n end as is_bad_to_good_satisfaction_score\n from window_group\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.584083Z", "completed_at": "2024-08-26T20:48:50.588812Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.594074Z", "completed_at": "2024-08-26T20:48:50.594078Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.018635034561157227, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_historical_status", "compiled": true, "compiled_code": "-- To do -- can we delete ticket_status_counter and unique_status_counter?\n\nwith ticket_status_history as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'status'\n\n)\n\n select\n \n ticket_id,\n valid_starting_at,\n valid_ending_at,\n \n (\n (\n ((coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (coalesce(valid_ending_at, \n current_timestamp::timestamp\n))::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as status_duration_calendar_minutes,\n value as status,\n -- MIGHT BE ABLE TO DELETE ROWS BELOW\n row_number() over (partition by ticket_id order by valid_starting_at) as ticket_status_counter,\n row_number() over (partition by ticket_id, value order by valid_starting_at) as unique_status_counter\n\n from ticket_status_history", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.378365Z", "completed_at": "2024-08-26T20:48:50.588565Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.593794Z", "completed_at": "2024-08-26T20:48:50.593800Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.22140192985534668, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__ticket_schedules", "compiled": true, "compiled_code": "\n\nwith ticket as (\n \n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_schedule as (\n \n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_schedule\"\n\n), schedule as (\n \n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule\"\n\n\n), default_schedule_events as (\n-- Goal: understand the working schedules applied to tickets, so that we can then determine the applicable business hours/schedule.\n-- Your default schedule is used for all tickets, unless you set up a trigger to apply a specific schedule to specific tickets.\n\n-- This portion of the query creates ticket_schedules for these \"default\" schedules, as the ticket_schedule table only includes\n-- trigger schedules\n\n\n\n \n\n \n\n \n\n select\n ticket.ticket_id,\n ticket.created_at as schedule_created_at,\n '360000310393' as schedule_id\n from ticket\n left join ticket_schedule as first_schedule\n on first_schedule.ticket_id = ticket.ticket_id\n and \n\n first_schedule.created_at + ((interval '1 second') * (-5))\n\n <= ticket.created_at\n and first_schedule.created_at >= ticket.created_at \n where first_schedule.ticket_id is null\n\n), schedule_events as (\n \n select\n *\n from default_schedule_events\n \n union all\n \n select \n ticket_id,\n created_at as schedule_created_at,\n schedule_id\n from ticket_schedule\n\n), ticket_schedules as (\n \n select \n ticket_id,\n schedule_id,\n schedule_created_at,\n coalesce(lead(schedule_created_at) over (partition by ticket_id order by schedule_created_at)\n , \n\n \n current_timestamp::timestamp\n + ((interval '1 hour') * (1000))\n\n ) as schedule_invalidated_at\n from schedule_events\n\n)\nselect\n *\nfrom ticket_schedules", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.590142Z", "completed_at": "2024-08-26T20:48:50.597712Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.600101Z", "completed_at": "2024-08-26T20:48:50.600105Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.01365518569946289, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__reply_time_calendar_hours", "compiled": true, "compiled_code": "--REPLY TIME SLA\n-- step 2, figure out when the sla will breach for sla's in calendar hours. The calculation is relatively straightforward.\n\nwith sla_policy_applied as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), final as (\n select\n *,\n \n\n sla_applied_at + ((interval '1 minute') * (cast(target as integer )))\n\n as sla_breach_at\n from sla_policy_applied\n where not in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.594571Z", "completed_at": "2024-08-26T20:48:50.599833Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.602383Z", "completed_at": "2024-08-26T20:48:50.602387Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.01700305938720703, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__user_aggregates", "compiled": true, "compiled_code": "with users as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n--If you use user tags this will be included, if not it will be ignored.\n\n), user_tags as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user_tag\"\n \n), user_tag_aggregate as (\n select\n user_tags.user_id,\n \n string_agg(user_tags.tags, ', ')\n\n as user_tags\n from user_tags\n group by 1\n\n\n\n), final as (\n select \n users.*\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,user_tag_aggregate.user_tags\n \n from users\n\n --If you use user tags this will be included, if not it will be ignored.\n \n left join user_tag_aggregate\n using(user_id)\n \n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.603162Z", "completed_at": "2024-08-26T20:48:50.610608Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.614719Z", "completed_at": "2024-08-26T20:48:50.614725Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.01877284049987793, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.not_null_stg_zendesk__user_user_id.102d572926", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect user_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.607064Z", "completed_at": "2024-08-26T20:48:50.613612Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.619022Z", "completed_at": "2024-08-26T20:48:50.619027Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.01873302459716797, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk_source.unique_stg_zendesk__user_user_id.3d3e346b11", "compiled": true, "compiled_code": "\n \n \n\nselect\n user_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\nwhere user_id is not null\ngroup by user_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.614987Z", "completed_at": "2024-08-26T20:48:50.621298Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.623840Z", "completed_at": "2024-08-26T20:48:50.623844Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.016805171966552734, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__agent_work_time_filtered_statuses", "compiled": true, "compiled_code": "with agent_work_time_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'agent_work_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the agent work time sla was active for the ticket.\n), agent_work_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, agent_work_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n \n current_timestamp::timestamp\n + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n agent_work_time_sla.sla_applied_at,\n agent_work_time_sla.target, \n agent_work_time_sla.sla_policy_name,\n agent_work_time_sla.ticket_created_at,\n agent_work_time_sla.in_business_hours\n from ticket_historical_status\n join agent_work_time_sla\n on ticket_historical_status.ticket_id = agent_work_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open') -- these are the only statuses that count as \"agent work time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom agent_work_time_filtered_statuses", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.624513Z", "completed_at": "2024-08-26T20:48:50.635069Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.637176Z", "completed_at": "2024-08-26T20:48:50.637181Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.02486896514892578, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_filtered_statuses", "compiled": true, "compiled_code": "with requester_wait_time_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n where metric = 'requester_wait_time'\n\n), ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n \n--This captures the statuses of the ticket while the requester wait time sla was active for the ticket.\n), requester_wait_time_filtered_statuses as (\n\n select \n ticket_historical_status.ticket_id,\n greatest(ticket_historical_status.valid_starting_at, requester_wait_time_sla.sla_applied_at) as valid_starting_at,\n coalesce(\n ticket_historical_status.valid_ending_at, \n \n\n \n current_timestamp::timestamp\n + ((interval '1 day') * (30))\n\n ) as valid_ending_at, --assumes current status continues into the future. This is necessary to predict future SLA breaches (not just past).\n ticket_historical_status.status as ticket_status,\n requester_wait_time_sla.sla_applied_at,\n requester_wait_time_sla.target,\n requester_wait_time_sla.sla_policy_name,\n requester_wait_time_sla.ticket_created_at,\n requester_wait_time_sla.in_business_hours\n from ticket_historical_status\n join requester_wait_time_sla\n on ticket_historical_status.ticket_id = requester_wait_time_sla.ticket_id\n where ticket_historical_status.status in ('new', 'open', 'on-hold', 'hold') -- these are the only statuses that count as \"requester wait time\"\n and sla_applied_at < valid_ending_at\n\n)\nselect *\nfrom requester_wait_time_filtered_statuses", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.653730Z", "completed_at": "2024-08-26T20:48:50.672858Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.684987Z", "completed_at": "2024-08-26T20:48:50.684994Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.03695201873779297, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__reply_time_business_hours", "compiled": true, "compiled_code": "\n\n-- step 3, determine when an SLA will breach for SLAs that are in business hours\n\nwith ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), sla_policy_applied as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__sla_policy_applied\"\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), ticket_updates as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), schedule_business_hours as (\n\n select \n schedule_id,\n sum(end_time - start_time) as total_schedule_weekly_business_minutes\n -- referring to stg_zendesk__schedule instead of int_zendesk__schedule_spine just to calculate total minutes\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__schedule\"\n group by 1\n\n), ticket_sla_applied_with_schedules as (\n\n select \n sla_policy_applied.*,\n ticket_schedules.schedule_id,\n (\n (\n (\n (\n ((cast(sla_policy_applied.sla_applied_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(sla_policy_applied.sla_applied_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n schedule_business_hours.total_schedule_weekly_business_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_policy_applied.sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from sla_policy_applied\n left join ticket_schedules on sla_policy_applied.ticket_id = ticket_schedules.ticket_id\n and \n\n ticket_schedules.schedule_created_at + ((interval '1 second') * (-1))\n\n <= sla_policy_applied.sla_applied_at\n and \n\n ticket_schedules.schedule_invalidated_at + ((interval '1 second') * (-1))\n\n > sla_policy_applied.sla_applied_at\n left join schedule_business_hours \n on ticket_schedules.schedule_id = schedule_business_hours.schedule_id\n where sla_policy_applied.in_business_hours\n and metric in ('next_reply_time', 'first_reply_time')\n\n), first_reply_solve_times as (\n select\n ticket_sla_applied_with_schedules.ticket_id,\n ticket_sla_applied_with_schedules.ticket_created_at,\n ticket_sla_applied_with_schedules.valid_starting_at,\n ticket_sla_applied_with_schedules.ticket_current_status,\n ticket_sla_applied_with_schedules.metric,\n ticket_sla_applied_with_schedules.latest_sla,\n ticket_sla_applied_with_schedules.sla_applied_at,\n ticket_sla_applied_with_schedules.target,\n ticket_sla_applied_with_schedules.in_business_hours,\n ticket_sla_applied_with_schedules.sla_policy_name,\n ticket_sla_applied_with_schedules.schedule_id,\n ticket_sla_applied_with_schedules.start_time_in_minutes_from_week,\n ticket_sla_applied_with_schedules.total_schedule_weekly_business_minutes,\n ticket_sla_applied_with_schedules.start_week_date,\n min(reply_time.reply_at) as first_reply_time,\n min(ticket_solved_times.solved_at) as first_solved_time\n from ticket_sla_applied_with_schedules\n left join reply_time\n on reply_time.ticket_id = ticket_sla_applied_with_schedules.ticket_id\n and reply_time.reply_at > ticket_sla_applied_with_schedules.sla_applied_at\n left join ticket_solved_times\n on ticket_sla_applied_with_schedules.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > ticket_sla_applied_with_schedules.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10,11,12,13,14\n\n), week_index_calc as (\n select \n *,\n \n (\n ((least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::date - (sla_applied_at)::date)\n / 7 + case\n when date_part('dow', (sla_applied_at)::timestamp) <= date_part('dow', (least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())))::timestamp) then\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 0 else -1 end\n else\n case when sla_applied_at <= least(coalesce(first_reply_time, now()), coalesce(first_solved_time, now())) then 1 else 0 end\n end)\n + 1 as week_index\n from first_reply_solve_times\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 52\n order by generated_number\n\n\n\n), weeks_cross_ticket_sla_applied as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select\n week_index_calc.*,\n cast(weeks.generated_number - 1 as integer) as week_number\n\n from week_index_calc\n cross join weeks\n where week_index >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_sla_applied.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast((7*24*60) as integer) as ticket_week_end_time\n from weeks_cross_ticket_sla_applied\n\n), intercepted_periods as (\n\n select \n weekly_periods.*,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n (schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) as lapsed_business_minutes,\n sum(schedule.end_time_utc - greatest(ticket_week_start_time,schedule.start_time_utc)) over \n (partition by ticket_id, metric, sla_applied_at \n order by week_number, schedule.start_time_utc\n rows between unbounded preceding and current row) as sum_lapsed_business_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast (\n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n\n), intercepted_periods_with_breach_flag as (\n \n select \n *,\n target - sum_lapsed_business_minutes as remaining_minutes,\n case when (target - sum_lapsed_business_minutes) < 0 \n and \n (lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) >= 0 \n or \n lag(target - sum_lapsed_business_minutes) over\n (partition by ticket_id, metric, sla_applied_at order by week_number, schedule_start_time) is null) \n then true else false end as is_breached_during_schedule -- this flags the scheduled period on which the breach took place\n from intercepted_periods\n\n), intercepted_periods_with_breach_flag_calculated as (\n\n select\n *,\n schedule_end_time + remaining_minutes as breached_at_minutes,\n date_trunc('week', sla_applied_at) as starting_point,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time + remaining_minutes) as integer )))\n\n as sla_breach_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_start_time) as integer )))\n\n as sla_schedule_start_at,\n \n\n cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as timestamp) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + (schedule_end_time) as integer )))\n\n as sla_schedule_end_at,\n cast(\n\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n sla_applied_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) + ((interval '1 day') * (6))\n\n as date) as week_end_date\n from intercepted_periods_with_breach_flag\n\n), reply_time_business_hours_sla as (\n\n select\n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n greatest(sla_applied_at,sla_schedule_start_at) as sla_schedule_start_at,\n sla_schedule_end_at,\n target,\n sum_lapsed_business_minutes,\n in_business_hours,\n sla_breach_at,\n is_breached_during_schedule,\n total_schedule_weekly_business_minutes,\n max(case when is_breached_during_schedule then sla_breach_at else null end) over (partition by ticket_id, metric, sla_applied_at, target) as sla_breach_exact_time,\n week_number\n from intercepted_periods_with_breach_flag_calculated\n\n) \n\nselect * \nfrom reply_time_business_hours_sla", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_business_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.666584Z", "completed_at": "2024-08-26T20:48:50.673652Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.685564Z", "completed_at": "2024-08-26T20:48:50.685568Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.04808497428894043, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_enriched", "compiled": true, "compiled_code": "-- this model enriches the ticket table with ticket-related dimensions. This table will not include any metrics.\n-- for metrics, see ticket_metrics!\n\nwith ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_aggregates\"\n\n--If you use using_ticket_form_history this will be included, if not it will be ignored.\n\n), latest_ticket_form as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__latest_ticket_form\"\n\n\n), latest_satisfaction_ratings as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_satisfaction\"\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), requester_updates as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_updates\"\n\n), assignee_updates as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__assignee_updates\"\n\n), ticket_group as (\n \n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group\"\n\n), organization as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), joined as (\n\n select \n\n ticket.*,\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n latest_ticket_form.name as ticket_form_name,\n \n\n latest_satisfaction_ratings.count_satisfaction_scores as ticket_total_satisfaction_scores,\n latest_satisfaction_ratings.first_satisfaction_score as ticket_first_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_score as ticket_satisfaction_score,\n latest_satisfaction_ratings.latest_satisfaction_comment as ticket_satisfaction_comment,\n latest_satisfaction_ratings.latest_satisfaction_reason as ticket_satisfaction_reason,\n latest_satisfaction_ratings.is_good_to_bad_satisfaction_score,\n latest_satisfaction_ratings.is_bad_to_good_satisfaction_score,\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n organization.domain_names as ticket_organization_domain_names,\n requester_org.domain_names as requester_organization_domain_names,\n \n\n requester.external_id as requester_external_id,\n requester.created_at as requester_created_at,\n requester.updated_at as requester_updated_at,\n requester.role as requester_role,\n requester.email as requester_email,\n requester.name as requester_name,\n requester.is_active as is_requester_active,\n requester.locale as requester_locale,\n requester.time_zone as requester_time_zone,\n coalesce(requester_updates.total_updates, 0) as requester_ticket_update_count,\n requester_updates.last_updated as requester_ticket_last_update_at,\n requester.last_login_at as requester_last_login_at,\n requester.organization_id as requester_organization_id,\n requester_org.name as requester_organization_name,\n\n --If you use organization tags this will be included, if not it will be ignored.\n \n requester_org.organization_tags as requester_organization_tags,\n \n requester_org.external_id as requester_organization_external_id,\n requester_org.created_at as requester_organization_created_at,\n requester_org.updated_at as requester_organization_updated_at,\n submitter.external_id as submitter_external_id,\n submitter.role as submitter_role,\n case when submitter.role in ('agent','admin') \n then true \n else false\n end as is_agent_submitted,\n submitter.email as submitter_email,\n submitter.name as submitter_name,\n submitter.is_active as is_submitter_active,\n submitter.locale as submitter_locale,\n submitter.time_zone as submitter_time_zone,\n assignee.external_id as assignee_external_id,\n assignee.role as assignee_role,\n assignee.email as assignee_email,\n assignee.name as assignee_name,\n assignee.is_active as is_assignee_active,\n assignee.locale as assignee_locale,\n assignee.time_zone as assignee_time_zone,\n coalesce(assignee_updates.total_updates, 0) as assignee_ticket_update_count,\n assignee_updates.last_updated as assignee_ticket_last_update_at,\n assignee.last_login_at as assignee_last_login_at,\n ticket_group.name as group_name,\n organization.name as organization_name\n\n --If you use using_user_tags this will be included, if not it will be ignored.\n \n ,requester.user_tags as requester_tag,\n submitter.user_tags as submitter_tag,\n assignee.user_tags as assignee_tag\n \n\n \n from ticket\n\n --Requester Joins\n join users as requester\n on requester.user_id = ticket.requester_id\n\n left join organization as requester_org\n on requester_org.organization_id = requester.organization_id\n\n left join requester_updates\n on requester_updates.ticket_id = ticket.ticket_id\n and requester_updates.requester_id = ticket.requester_id\n \n --Submitter Joins\n join users as submitter\n on submitter.user_id = ticket.submitter_id\n \n --Assignee Joins\n left join users as assignee\n on assignee.user_id = ticket.assignee_id\n\n left join assignee_updates\n on assignee_updates.ticket_id = ticket.ticket_id\n and assignee_updates.assignee_id = ticket.assignee_id\n\n --Ticket, Org, and Brand Joins\n left join ticket_group\n on ticket_group.group_id = ticket.group_id\n\n --If you use using_ticket_form_history this will be included, if not it will be ignored.\n \n left join latest_ticket_form\n on latest_ticket_form.ticket_form_id = ticket.ticket_form_id\n \n\n left join organization\n on organization.organization_id = ticket.organization_id\n\n left join latest_satisfaction_ratings\n on latest_satisfaction_ratings.ticket_id = ticket.ticket_id\n)\n\nselect *\nfrom joined", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_enriched\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.674240Z", "completed_at": "2024-08-26T20:48:50.686367Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.700002Z", "completed_at": "2024-08-26T20:48:50.700008Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.03015589714050293, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__comment_metrics", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), ticket_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n),\n\ncomment_counts as (\n select\n ticket_id,\n last_comment_added_at,\n sum(case when commenter_role = 'internal_comment' and is_public = true\n then 1\n else 0\n end) as count_public_agent_comments,\n sum(case when commenter_role = 'internal_comment'\n then 1\n else 0\n end) as count_agent_comments,\n sum(case when commenter_role = 'external_comment'\n then 1\n else 0\n end) as count_end_user_comments,\n sum(case when is_public = true\n then 1\n else 0\n end) as count_public_comments,\n sum(case when is_public = false\n then 1\n else 0\n end) as count_internal_comments,\n count(*) as total_comments,\n count(distinct case when commenter_role = 'internal_comment'\n then user_id\n end) as count_ticket_handoffs,\n sum(case when commenter_role = 'internal_comment' and is_public = true and previous_commenter_role != 'first_comment'\n then 1\n else 0\n end) as count_agent_replies\n from ticket_comments\n\n group by 1, 2\n),\n\nfinal as (\n select\n *,\n count_public_agent_comments = 1 as is_one_touch_resolution,\n count_public_agent_comments = 2 as is_two_touch_resolution\n from comment_counts\n)\n\nselect * \nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__comment_metrics\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.702604Z", "completed_at": "2024-08-26T20:48:50.718427Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.807133Z", "completed_at": "2024-08-26T20:48:50.807140Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.10892486572265625, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__agent_work_time_business_hours", "compiled": true, "compiled_code": "\n\n-- AGENT WORK TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new' or 'open' status.\n\n-- Additionally, for business hours, only 'new' or 'open' status hours are counted if they are also during business hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n agent_work_time_filtered_statuses.ticket_id,\n agent_work_time_filtered_statuses.sla_applied_at,\n agent_work_time_filtered_statuses.target, \n agent_work_time_filtered_statuses.sla_policy_name, \n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from agent_work_time_filtered_statuses\n left join ticket_schedules\n on agent_work_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target, \n sla_policy_name, \n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_agent_work_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_agent_work_time.ticket_id,\n weekly_period_agent_work_time.sla_applied_at,\n weekly_period_agent_work_time.target,\n weekly_period_agent_work_time.sla_policy_name,\n weekly_period_agent_work_time.valid_starting_at,\n weekly_period_agent_work_time.valid_ending_at,\n weekly_period_agent_work_time.week_number,\n weekly_period_agent_work_time.ticket_week_start_time_minute,\n weekly_period_agent_work_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_agent_work_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_agent_work_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_agent_work_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n \n), agent_work_business_breach as (\n \n select \n *,\n \n\n date_trunc('week', valid_starting_at) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom agent_work_business_breach", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.713597Z", "completed_at": "2024-08-26T20:48:50.724563Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.807577Z", "completed_at": "2024-08-26T20:48:50.807587Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.10878705978393555, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__agent_work_time_calendar_hours", "compiled": true, "compiled_code": "-- Calculate breach time for agent work time, calendar hours\nwith agent_work_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_filtered_statuses\"\n where not in_business_hours\n\n), agent_work_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from agent_work_time_filtered_statuses\n\n), agent_work_time_calendar_minutes_flagged as (\n\nselect \n agent_work_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom agent_work_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from agent_work_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.813471Z", "completed_at": "2024-08-26T20:48:50.833870Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.842729Z", "completed_at": "2024-08-26T20:48:50.842736Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.03311586380004883, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_business_hours", "compiled": true, "compiled_code": "\n\n-- REQUESTER WAIT TIME\n-- This is complicated, as SLAs minutes are only counted while the ticket is in 'new', 'open', and 'on-hold' status.\n\n-- Additionally, for business hours, only 'new', 'open', and 'on-hold' status hours are counted if they are also during business hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where in_business_hours\n\n), schedule as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_schedules as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n \n-- cross schedules with work time\n), ticket_status_crossed_with_schedule as (\n \n select\n requester_wait_time_filtered_statuses.ticket_id,\n requester_wait_time_filtered_statuses.sla_applied_at,\n requester_wait_time_filtered_statuses.target,\n requester_wait_time_filtered_statuses.sla_policy_name,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as valid_starting_at,\n least(valid_ending_at, schedule_invalidated_at) as valid_ending_at,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n valid_starting_at as status_valid_starting_at,\n valid_ending_at as status_valid_ending_at\n\n from requester_wait_time_filtered_statuses\n left join ticket_schedules\n on requester_wait_time_filtered_statuses.ticket_id = ticket_schedules.ticket_id\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n sla_applied_at,\n target,\n sla_policy_name,\n schedule_id,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.valid_starting_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as valid_starting_at_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.valid_ending_at)::date - (ticket_status_crossed_with_schedule.valid_starting_at)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.valid_ending_at)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.valid_starting_at)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.valid_starting_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7,8,9,10\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((valid_starting_at_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_period_requester_wait_time as (\n\n select \n\n ticket_id,\n sla_applied_at,\n valid_starting_at,\n valid_ending_at,\n status_valid_starting_at,\n status_valid_ending_at,\n target,\n sla_policy_name,\n valid_starting_at_in_minutes_from_week,\n raw_delta_in_minutes,\n week_number,\n schedule_id,\n start_week_date,\n cast(greatest(0, valid_starting_at_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time_minute,\n cast(least(valid_starting_at_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time_minute\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods_agent as (\n \n select \n weekly_period_requester_wait_time.ticket_id,\n weekly_period_requester_wait_time.sla_applied_at,\n weekly_period_requester_wait_time.target,\n weekly_period_requester_wait_time.sla_policy_name,\n weekly_period_requester_wait_time.valid_starting_at,\n weekly_period_requester_wait_time.valid_ending_at,\n weekly_period_requester_wait_time.week_number,\n weekly_period_requester_wait_time.ticket_week_start_time_minute,\n weekly_period_requester_wait_time.ticket_week_end_time_minute,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time_minute, schedule.end_time_utc) - greatest(weekly_period_requester_wait_time.ticket_week_start_time_minute, schedule.start_time_utc) as scheduled_minutes\n from weekly_period_requester_wait_time\n join schedule on ticket_week_start_time_minute <= schedule.end_time_utc \n and ticket_week_end_time_minute >= schedule.start_time_utc\n and weekly_period_requester_wait_time.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time_minute))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time_minute))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), intercepted_periods_with_running_total as (\n \n select \n *,\n sum(scheduled_minutes) over \n (partition by ticket_id, sla_applied_at \n order by valid_starting_at, week_number, schedule_end_time\n rows between unbounded preceding and current row)\n as running_total_scheduled_minutes\n\n from intercepted_periods_agent\n\n\n), intercepted_periods_agent_with_breach_flag as (\n select \n intercepted_periods_with_running_total.*,\n target - running_total_scheduled_minutes as remaining_target_minutes,\n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) as lag_check,\n case when (target - running_total_scheduled_minutes) = 0 then true\n when (target - running_total_scheduled_minutes) < 0 \n and \n (lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) > 0 \n or \n lag(target - running_total_scheduled_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at, week_number, schedule_end_time) is null) \n then true else false end as is_breached_during_schedule\n \n from intercepted_periods_with_running_total\n\n), intercepted_periods_agent_filtered as (\n\n select\n *,\n (remaining_target_minutes + scheduled_minutes) as breach_minutes,\n greatest(ticket_week_start_time_minute, schedule_start_time) + (remaining_target_minutes + scheduled_minutes) as breach_minutes_from_week\n from intercepted_periods_agent_with_breach_flag\n\n), requester_wait_business_breach as (\n \n select \n *,\n \n\n date_trunc('week', valid_starting_at) + ((interval '1 minute') * (cast(((7*24*60) * week_number) + breach_minutes_from_week as integer )))\n\n as sla_breach_at\n from intercepted_periods_agent_filtered\n\n)\n\nselect * \nfrom requester_wait_business_breach", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.824066Z", "completed_at": "2024-08-26T20:48:50.842968Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.845622Z", "completed_at": "2024-08-26T20:48:50.845626Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.035264015197753906, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__requester_wait_time_calendar_hours", "compiled": true, "compiled_code": "-- Calculate breach time for requester wait time, calendar hours\nwith requester_wait_time_filtered_statuses as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_filtered_statuses\"\n where not in_business_hours\n\n), requester_wait_time_calendar_minutes as (\n\n select \n *,\n \n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n as calendar_minutes,\n sum(\n (\n (\n ((valid_ending_at)::date - (valid_starting_at)::date)\n * 24 + date_part('hour', (valid_ending_at)::timestamp) - date_part('hour', (valid_starting_at)::timestamp))\n * 60 + date_part('minute', (valid_ending_at)::timestamp) - date_part('minute', (valid_starting_at)::timestamp))\n ) \n over (partition by ticket_id, sla_applied_at order by valid_starting_at rows between unbounded preceding and current row) as running_total_calendar_minutes\n from requester_wait_time_filtered_statuses\n\n), requester_wait_time_calendar_minutes_flagged as (\n\nselect \n requester_wait_time_calendar_minutes.*,\n target - running_total_calendar_minutes as remaining_target_minutes,\n case when (target - running_total_calendar_minutes) < 0 \n and \n (lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) >= 0 \n or \n lag(target - running_total_calendar_minutes) over\n (partition by ticket_id, sla_applied_at order by valid_starting_at) is null) \n then true else false end as is_breached_during_schedule\n \nfrom requester_wait_time_calendar_minutes\n\n), final as (\n select\n *,\n (remaining_target_minutes + calendar_minutes) as breach_minutes,\n \n\n valid_starting_at + ((interval '1 minute') * ((remaining_target_minutes + calendar_minutes)))\n\n as sla_breach_at\n from requester_wait_time_calendar_minutes_flagged\n\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.834365Z", "completed_at": "2024-08-26T20:48:50.844207Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.846727Z", "completed_at": "2024-08-26T20:48:50.846731Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.024590015411376953, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__reply_time_combined", "compiled": true, "compiled_code": "with reply_time_calendar_hours_sla as (\n \n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_calendar_hours\"\n\n\n\n), reply_time_business_hours_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_business_hours\"\n\n\n\n), ticket_updates as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), reply_time_breached_at as (\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_applied_at as sla_schedule_start_at,\n cast(null as timestamp) as sla_schedule_end_at,\n cast(null as numeric(28,6)) as sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_at,\n cast(null as numeric(28,6)) as week_number,\n cast(null as numeric(28,6)) as total_schedule_weekly_business_minutes\n from reply_time_calendar_hours_sla\n\n\n\n union all\n\n select \n ticket_id,\n sla_policy_name,\n metric,\n ticket_created_at,\n sla_applied_at,\n sla_schedule_start_at,\n sla_schedule_end_at,\n sum_lapsed_business_minutes,\n target,\n in_business_hours,\n sla_breach_exact_time as sla_breach_at,\n week_number,\n total_schedule_weekly_business_minutes\n from reply_time_business_hours_sla\n\n\n-- Now that we have the breach time, see when the first reply after the sla policy was applied took place.\n), ticket_solved_times as (\n select\n ticket_id,\n valid_starting_at as solved_at\n from ticket_updates\n where field_name = 'status'\n and value in ('solved','closed')\n\n), reply_time as (\n select \n ticket_comment.ticket_id,\n ticket_comment.valid_starting_at as reply_at,\n commenter.role\n from ticket_updates as ticket_comment\n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n where field_name = 'comment' \n and ticket_comment.is_public\n and commenter.role in ('agent','admin')\n\n), reply_time_breached_at_with_next_reply_timestamp as (\n\n select \n reply_time_breached_at.ticket_id,\n reply_time_breached_at.sla_policy_name,\n reply_time_breached_at.metric,\n reply_time_breached_at.ticket_created_at,\n reply_time_breached_at.sla_applied_at,\n reply_time_breached_at.sum_lapsed_business_minutes,\n reply_time_breached_at.target,\n reply_time_breached_at.in_business_hours,\n reply_time_breached_at.sla_breach_at,\n reply_time_breached_at.week_number,\n min(reply_time_breached_at.sla_schedule_start_at) as sla_schedule_start_at,\n min(reply_time_breached_at.sla_schedule_end_at) as sla_schedule_end_at,\n min(reply_at) as agent_reply_at,\n min(solved_at) as next_solved_at\n from reply_time_breached_at\n left join reply_time\n on reply_time.ticket_id = reply_time_breached_at.ticket_id\n and reply_time.reply_at > reply_time_breached_at.sla_applied_at\n left join ticket_solved_times\n on reply_time_breached_at.ticket_id = ticket_solved_times.ticket_id\n and ticket_solved_times.solved_at > reply_time_breached_at.sla_applied_at\n group by 1,2,3,4,5,6,7,8,9,10\n\n), lagging_time_block as (\n select\n *,\n row_number() over (partition by ticket_id, metric, sla_applied_at order by sla_schedule_start_at) as day_index,\n lead(sla_schedule_start_at) over (partition by ticket_id, sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at) as next_schedule_start,\n min(sla_breach_at) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at rows unbounded preceding) as first_sla_breach_at,\n\t\tcoalesce(lag(sum_lapsed_business_minutes) over (partition by sla_policy_name, metric, sla_applied_at order by sla_schedule_start_at), 0) as sum_lapsed_business_minutes_new,\n \n (\n (\n (\n ((agent_reply_at)::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (agent_reply_at)::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (agent_reply_at)::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (agent_reply_at)::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60 as total_runtime_minutes -- total minutes from sla_schedule_start_at and agent reply time, before taking into account SLA end time\n from reply_time_breached_at_with_next_reply_timestamp\n\n), filtered_reply_times as (\n select\n *\n from lagging_time_block\n where (\n in_business_hours\n and ((\n agent_reply_at >= sla_schedule_start_at and agent_reply_at <= sla_schedule_end_at) -- ticket is replied to between a schedule window\n or (agent_reply_at < sla_schedule_start_at and sum_lapsed_business_minutes_new = 0 and sla_breach_at = first_sla_breach_at and day_index = 1) -- ticket is replied to before any schedule begins and no business minutes have been spent on it\n or (agent_reply_at is null and next_solved_at >= sla_schedule_start_at and next_solved_at < next_schedule_start) -- There are no reply times, but the ticket is closed and we should capture the closed date as the first and/or next reply time if there is not one preceding.\n or (next_solved_at is null and agent_reply_at is null and now() >= sla_schedule_start_at and (now() < next_schedule_start or next_schedule_start is null)) -- ticket is not replied to and therefore active. But only bring through the active SLA record that is most recent (after the last SLA schedule starts but before the next, or if there does not exist a next SLA schedule start time) \n or (agent_reply_at > sla_schedule_end_at and (agent_reply_at < next_schedule_start or next_schedule_start is null)) -- ticket is replied to outside sla schedule hours\n ) and sla_schedule_start_at <= now()) -- To help limit the data we do not want to bring through any schedule rows in the future.\n or not in_business_hours\n\n), reply_time_breached_at_remove_old_sla as (\n select\n *,\n now() as current_time_check,\n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) as updated_sla_policy_starts_at,\n case when \n lead(sla_applied_at) over (partition by ticket_id, metric, in_business_hours order by sla_applied_at) --updated sla policy start at time\n < sla_breach_at then true else false end as is_stale_sla_policy,\n case when (sla_breach_at < agent_reply_at and sla_breach_at < next_solved_at)\n or (sla_breach_at < agent_reply_at and next_solved_at is null)\n or (agent_reply_at is null and sla_breach_at < next_solved_at)\n or (agent_reply_at is null and next_solved_at is null)\n then true\n else false\n end as is_sla_breached,\n sum_lapsed_business_minutes_new + total_runtime_minutes as total_new_minutes -- add total runtime to sum_lapsed_business_minutes_new (the sum_lapsed_business_minutes from prior row)\n from filtered_reply_times\n\n), reply_time_breach as ( \n select \n *,\n case when is_sla_breached\n then sla_breach_at -- If the SLA was breached then record that time as the breach \n else coalesce(agent_reply_at, next_solved_at) -- If the SLA was not breached then record either the agent_reply_at or next_solve_at as the breach event time as it was achieved.\n end as sla_update_at,\n case when total_runtime_minutes < 0 -- agent has already replied to prior to this SLA schedule\n then 0 -- so don't add new minutes to the SLA\n when total_new_minutes > sum_lapsed_business_minutes -- if total runtime, regardless of when the SLA schedule ended, is more than the total lapsed business minutes, that means the agent replied after the SLA schedule\n then sum_lapsed_business_minutes -- the elapsed time after the SLA end time should not be calculated as part of the business minutes, therefore sla_elapsed_time should only be sum_lapsed_business_minutes\n else sum_lapsed_business_minutes_new + (\n (\n (\n (\n ((coalesce(agent_reply_at, next_solved_at, current_time_check))::date - (sla_schedule_start_at)::date)\n * 24 + date_part('hour', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('hour', (sla_schedule_start_at)::timestamp))\n * 60 + date_part('minute', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp) - date_part('minute', (sla_schedule_start_at)::timestamp))\n * 60 + floor(date_part('second', (coalesce(agent_reply_at, next_solved_at, current_time_check))::timestamp)) - floor(date_part('second', (sla_schedule_start_at)::timestamp)))\n / 60) -- otherwise, the sla_elapsed_time will be sum_lapsed_business_minutes_new (the prior record's sum_lapsed_business_minutes) plus the minutes between SLA schedule start and agent_reply_time. If the agent hasn't replied yet, then the minute counter is still running, hence the coalesce of agent_reply_time and current_time_check.\n end as sla_elapsed_time\n from reply_time_breached_at_remove_old_sla \n)\n\nselect *\nfrom reply_time_breach", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_combined\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.851817Z", "completed_at": "2024-08-26T20:48:50.874375Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.907020Z", "completed_at": "2024-08-26T20:48:50.907028Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.062139034271240234, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.unique_zendesk__ticket_enriched_ticket_id.7c3c6ca9ef", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.848762Z", "completed_at": "2024-08-26T20:48:50.880573Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.907362Z", "completed_at": "2024-08-26T20:48:50.907367Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.06310009956359863, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.not_null_zendesk__ticket_enriched_ticket_id.e3efc5bf0a", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_enriched\"\nwhere ticket_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.920294Z", "completed_at": "2024-08-26T20:48:50.974449Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.975044Z", "completed_at": "2024-08-26T20:48:50.975052Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.06402897834777832, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__sla_policies", "compiled": true, "compiled_code": "--final step where we union together all of the reply time, agent work time, and requester wait time sla's\n\nwith reply_time_sla as (\n\n select * \n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__reply_time_combined\"\n\n), agent_work_calendar_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_calendar_hours\"\n\n), requester_wait_calendar_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_calendar_hours\"\n\n\n\n), agent_work_business_sla as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__agent_work_time_business_hours\"\n\n), requester_wait_business_sla as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__requester_wait_time_business_hours\"\n\n\n\n), all_slas_unioned as (\n select\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_update_at as sla_breach_at,\n sla_elapsed_time,\n is_sla_breached\n from reply_time_sla\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\nunion all\n\n select\n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n false as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_calendar_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from requester_wait_calendar_sla\n\n group by 1, 2, 3, 4, 5, 6\n\n\n\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'agent_work_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n from agent_work_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\nunion all \n\n select \n ticket_id,\n sla_policy_name,\n 'requester_wait_time' as metric,\n sla_applied_at,\n target,\n true as in_business_hours,\n max(sla_breach_at) as sla_breach_at,\n max(running_total_scheduled_minutes) as sla_elapsed_time,\n \n\n bool_or( is_breached_during_schedule )\n\n\n \n from requester_wait_business_sla\n \n group by 1, 2, 3, 4, 5, 6\n\n\n\n)\n\nselect \n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(metric as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(sla_applied_at as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as sla_event_id,\n ticket_id,\n sla_policy_name,\n metric,\n sla_applied_at,\n target,\n in_business_hours,\n sla_breach_at,\n case when sla_elapsed_time is null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (sla_applied_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (sla_applied_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (sla_applied_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (sla_applied_at)::timestamp)))\n / 60) --This will create an entry for active sla's\n else sla_elapsed_time\n end as sla_elapsed_time,\n sla_breach_at > current_timestamp as is_active_sla,\n case when (sla_breach_at > \n current_timestamp::timestamp\n)\n then null\n else is_sla_breached\n end as is_sla_breach\nfrom all_slas_unioned", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__sla_policies\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.927136Z", "completed_at": "2024-08-26T20:48:50.975979Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.977428Z", "completed_at": "2024-08-26T20:48:50.977434Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.06627178192138672, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_metrics", "compiled": true, "compiled_code": "with __dbt__cte__int_zendesk__ticket_resolution_times_calendar as (\nwith historical_solved_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n where status = 'solved'\n\n), ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_historical_assignee as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_assignee\"\n\n), ticket_historical_group as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_group\"\n\n), solved_times as (\n \n select\n \n ticket_id,\n min(valid_starting_at) as first_solved_at,\n max(valid_starting_at) as last_solved_at,\n count(status) as solved_count \n\n from historical_solved_status\n group by 1\n\n)\n\n select\n\n ticket.ticket_id,\n ticket.created_at,\n solved_times.first_solved_at,\n solved_times.last_solved_at,\n ticket_historical_assignee.unique_assignee_count,\n ticket_historical_assignee.assignee_stations_count,\n ticket_historical_group.group_stations_count,\n ticket_historical_assignee.first_assignee_id,\n ticket_historical_assignee.last_assignee_id,\n ticket_historical_assignee.first_agent_assignment_date,\n ticket_historical_assignee.last_agent_assignment_date,\n ticket_historical_assignee.ticket_unassigned_duration_calendar_minutes,\n solved_times.solved_count as total_resolutions,\n case when solved_times.solved_count <= 1\n then 0\n else solved_times.solved_count - 1 --subtracting one as the first solve is not a reopen.\n end as count_reopens,\n\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.first_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.first_agent_assignment_date)::timestamp))\n as first_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket_historical_assignee.last_agent_assignment_date)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket_historical_assignee.last_agent_assignment_date)::timestamp))\n as last_assignment_to_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.first_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.first_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.first_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as first_resolution_calendar_minutes,\n \n (\n (\n ((solved_times.last_solved_at)::date - (ticket.created_at)::date)\n * 24 + date_part('hour', (solved_times.last_solved_at)::timestamp) - date_part('hour', (ticket.created_at)::timestamp))\n * 60 + date_part('minute', (solved_times.last_solved_at)::timestamp) - date_part('minute', (ticket.created_at)::timestamp))\n as final_resolution_calendar_minutes\n\n from ticket\n\n left join ticket_historical_assignee\n using(ticket_id)\n\n left join ticket_historical_group\n using(ticket_id)\n\n left join solved_times\n using(ticket_id)\n), __dbt__cte__int_zendesk__comments_enriched as (\nwith ticket_comment as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__updates\"\n where field_name = 'comment'\n\n), users as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), joined as (\n\n select \n\n ticket_comment.*,\n case when commenter.role = 'end-user' then 'external_comment'\n when commenter.role in ('agent','admin') then 'internal_comment'\n else 'unknown' end as commenter_role\n \n from ticket_comment\n \n join users as commenter\n on commenter.user_id = ticket_comment.user_id\n\n), add_previous_commenter_role as (\n /*\n In int_zendesk__ticket_reply_times we will only be focusing on reply times between public tickets.\n The below union explicitly identifies the previous commenter roles of public and not public comments.\n */\n select\n *,\n coalesce(\n lag(commenter_role) over (partition by ticket_id order by valid_starting_at, commenter_role)\n , 'first_comment') \n as previous_commenter_role\n from joined\n where is_public\n\n union all\n\n select\n *,\n 'non_public_comment' as previous_commenter_role\n from joined\n where not is_public\n)\n\nselect \n *,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_comment_added_at,\n sum(case when not is_public then 1 else 0 end) over (partition by ticket_id order by valid_starting_at rows between unbounded preceding and current row) as previous_internal_comment_count\nfrom add_previous_commenter_role\n), __dbt__cte__int_zendesk__ticket_reply_times as (\nwith ticket_public_comments as (\n\n select *\n from __dbt__cte__int_zendesk__comments_enriched\n where is_public\n\n), end_user_comments as (\n \n select \n ticket_id,\n valid_starting_at as end_user_comment_created_at,\n ticket_created_date,\n commenter_role,\n previous_internal_comment_count,\n previous_commenter_role = 'first_comment' as is_first_comment\n from ticket_public_comments \n where (commenter_role = 'external_comment'\n and ticket_public_comments.previous_commenter_role != 'external_comment') -- we only care about net new end user comments\n or previous_commenter_role = 'first_comment' -- We also want to take into consideration internal first comment replies\n\n), reply_timestamps as ( \n\n select\n end_user_comments.ticket_id,\n -- If the commentor was internal, a first comment, and had previous non public internal comments then we want the ticket created date to be the end user comment created date\n -- Otherwise we will want to end user comment created date\n case when is_first_comment then end_user_comments.ticket_created_date else end_user_comments.end_user_comment_created_at end as end_user_comment_created_at,\n end_user_comments.is_first_comment,\n min(case when is_first_comment \n and end_user_comments.commenter_role != 'external_comment' \n and (end_user_comments.previous_internal_comment_count > 0)\n then end_user_comments.end_user_comment_created_at \n else agent_comments.valid_starting_at end) as agent_responded_at\n from end_user_comments\n left join ticket_public_comments as agent_comments\n on agent_comments.ticket_id = end_user_comments.ticket_id\n and agent_comments.commenter_role = 'internal_comment'\n and agent_comments.valid_starting_at > end_user_comments.end_user_comment_created_at\n group by 1,2,3\n\n)\n\n select\n *,\n (\n (\n (\n (\n ((agent_responded_at)::date - (end_user_comment_created_at)::date)\n * 24 + date_part('hour', (agent_responded_at)::timestamp) - date_part('hour', (end_user_comment_created_at)::timestamp))\n * 60 + date_part('minute', (agent_responded_at)::timestamp) - date_part('minute', (end_user_comment_created_at)::timestamp))\n * 60 + floor(date_part('second', (agent_responded_at)::timestamp)) - floor(date_part('second', (end_user_comment_created_at)::timestamp)))\n / 60) as reply_time_calendar_minutes\n from reply_timestamps\n order by 1,2\n), __dbt__cte__int_zendesk__ticket_reply_times_calendar as (\nwith ticket as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n)\n\nselect\n\n ticket.ticket_id,\n sum(case when is_first_comment then reply_time_calendar_minutes\n else null end) as first_reply_time_calendar_minutes,\n sum(reply_time_calendar_minutes) as total_reply_time_calendar_minutes --total combined time the customer waits for internal response\n \nfrom ticket\nleft join ticket_reply_times\n using (ticket_id)\n\ngroup by 1\n), __dbt__cte__int_zendesk__ticket_work_time_calendar as (\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), calendar_minutes as (\n \n select \n ticket_id,\n status,\n case when status in ('pending') then status_duration_calendar_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold') then status_duration_calendar_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when status in ('new', 'open', 'hold', 'pending') then status_duration_calendar_minutes \n else 0 end as solve_time_in_minutes, \n case when status in ('new', 'open') then status_duration_calendar_minutes\n else 0 end as agent_work_time_in_minutes,\n case when status in ('hold') then status_duration_calendar_minutes\n else 0 end as on_hold_time_in_minutes,\n case when status = 'new' then status_duration_calendar_minutes\n else 0 end as new_status_duration_minutes,\n case when status = 'open' then status_duration_calendar_minutes\n else 0 end as open_status_duration_minutes,\n case when status = 'deleted' then 1\n else 0 end as ticket_deleted,\n first_value(valid_starting_at) over (partition by ticket_id order by valid_starting_at desc, ticket_id rows unbounded preceding) as last_status_assignment_date,\n case when lag(status) over (partition by ticket_id order by valid_starting_at) = 'deleted' and status != 'deleted'\n then 1\n else 0\n end as ticket_recoveries\n\n from ticket_historical_status\n\n)\n\nselect \n ticket_id,\n last_status_assignment_date,\n sum(ticket_deleted) as ticket_deleted_count,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_calendar_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_calendar_minutes,\n sum(solve_time_in_minutes) as solve_time_in_calendar_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_calendar_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_calendar_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_calendar_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_calendar_minutes,\n sum(ticket_recoveries) as total_ticket_recoveries\nfrom calendar_minutes\ngroup by 1, 2\n), __dbt__cte__int_zendesk__ticket_first_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_first_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.first_solved_at) as first_solved_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.first_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_first_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_first_resolution_time\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n\n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as first_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_full_resolution_time_business as (\n\n\nwith ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_full_resolution_time as (\n\n select \n ticket_resolution_times_calendar.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(ticket_resolution_times_calendar.last_solved_at) as last_solved_at,\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(ticket_resolution_times_calendar.last_solved_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from ticket_resolution_times_calendar\n join ticket_schedules on ticket_resolution_times_calendar.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_resolution_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_full_resolution_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_full_resolution_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n\n weeks_cross_ticket_full_resolution_time.*,\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_resolution_time\n\n), intercepted_periods as (\n\n select \n ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n)\n\n select \n ticket_id,\n sum(scheduled_minutes) as full_resolution_business_minutes\n from intercepted_periods\n group by 1\n), __dbt__cte__int_zendesk__ticket_work_time_business as (\n\n\nwith ticket_historical_status as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_historical_status\"\n\n), ticket_schedules as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), ticket_status_crossed_with_schedule as (\n \n select\n ticket_historical_status.ticket_id,\n ticket_historical_status.status as ticket_status,\n ticket_schedules.schedule_id,\n\n -- take the intersection of the intervals in which the status and the schedule were both active, for calculating the business minutes spent working on the ticket\n greatest(valid_starting_at, schedule_created_at) as status_schedule_start,\n least(valid_ending_at, schedule_invalidated_at) as status_schedule_end,\n\n -- bringing the following in the determine which schedule (Daylight Savings vs Standard time) to use\n ticket_historical_status.valid_starting_at as status_valid_starting_at,\n ticket_historical_status.valid_ending_at as status_valid_ending_at\n\n from ticket_historical_status\n left join ticket_schedules\n on ticket_historical_status.ticket_id = ticket_schedules.ticket_id\n -- making sure there is indeed real overlap\n where \n (\n (\n (\n ((least(valid_ending_at, schedule_invalidated_at))::date - (greatest(valid_starting_at, schedule_created_at))::date)\n * 24 + date_part('hour', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('hour', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + date_part('minute', (least(valid_ending_at, schedule_invalidated_at))::timestamp) - date_part('minute', (greatest(valid_starting_at, schedule_created_at))::timestamp))\n * 60 + floor(date_part('second', (least(valid_ending_at, schedule_invalidated_at))::timestamp)) - floor(date_part('second', (greatest(valid_starting_at, schedule_created_at))::timestamp)))\n > 0\n\n), ticket_full_solved_time as (\n\n select \n ticket_id,\n ticket_status,\n schedule_id,\n status_schedule_start,\n status_schedule_end,\n status_valid_starting_at,\n status_valid_ending_at,\n (\n (\n (\n (\n ((cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_status_crossed_with_schedule.status_schedule_start as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n (\n (\n (\n (\n ((ticket_status_crossed_with_schedule.status_schedule_end)::date - (ticket_status_crossed_with_schedule.status_schedule_start)::date)\n * 24 + date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('hour', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp) - date_part('minute', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp))\n * 60 + floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_end)::timestamp)) - floor(date_part('second', (ticket_status_crossed_with_schedule.status_schedule_start)::timestamp)))\n /60\n ) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_status_crossed_with_schedule.status_schedule_start + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n\n from ticket_status_crossed_with_schedule\n group by 1,2,3,4,5,6,7\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_full_solved_time as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n ticket_full_solved_time.*,\n cast(generated_number - 1 as integer) as week_number\n from ticket_full_solved_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number -1\n\n), weekly_periods as (\n\n select\n\n weeks_cross_ticket_full_solved_time.*,\n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n \n from weeks_cross_ticket_full_solved_time\n\n), intercepted_periods as (\n \n select \n weekly_periods.ticket_id,\n weekly_periods.week_number,\n weekly_periods.schedule_id,\n weekly_periods.ticket_status,\n weekly_periods.ticket_week_start_time,\n weekly_periods.ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(weekly_periods.ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on \n ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as timestamp) > cast(schedule.valid_from as timestamp)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as timestamp) < cast(schedule.valid_until as timestamp)\n \n), business_minutes as (\n \n select \n ticket_id,\n ticket_status,\n case when ticket_status in ('pending') then scheduled_minutes\n else 0 end as agent_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold') then scheduled_minutes\n else 0 end as requester_wait_time_in_minutes,\n case when ticket_status in ('new', 'open', 'hold', 'pending') then scheduled_minutes\n else 0 end as solve_time_in_minutes,\n case when ticket_status in ('new', 'open') then scheduled_minutes\n else 0 end as agent_work_time_in_minutes,\n case when ticket_status in ('hold') then scheduled_minutes\n else 0 end as on_hold_time_in_minutes,\n case when ticket_status = 'new' then scheduled_minutes\n else 0 end as new_status_duration_minutes,\n case when ticket_status = 'open' then scheduled_minutes\n else 0 end as open_status_duration_minutes\n from intercepted_periods\n\n)\n \n select \n ticket_id,\n sum(agent_wait_time_in_minutes) as agent_wait_time_in_business_minutes,\n sum(requester_wait_time_in_minutes) as requester_wait_time_in_business_minutes,\n sum(solve_time_in_minutes) as solve_time_in_business_minutes,\n sum(agent_work_time_in_minutes) as agent_work_time_in_business_minutes,\n sum(on_hold_time_in_minutes) as on_hold_time_in_business_minutes,\n sum(new_status_duration_minutes) as new_status_duration_in_business_minutes,\n sum(open_status_duration_minutes) as open_status_duration_in_business_minutes\n from business_minutes\n group by 1\n), __dbt__cte__int_zendesk__ticket_first_reply_time_business as (\n\n\nwith ticket_reply_times as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times\n\n), ticket_schedules as (\n\n select \n *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__ticket_schedules\"\n\n), schedule as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__schedule_spine\"\n\n), first_reply_time as (\n\n select\n ticket_id,\n end_user_comment_created_at,\n agent_responded_at\n\n from ticket_reply_times\n where is_first_comment\n\n), ticket_first_reply_time as (\n\n select \n first_reply_time.ticket_id,\n ticket_schedules.schedule_created_at,\n ticket_schedules.schedule_invalidated_at,\n ticket_schedules.schedule_id,\n\n -- bringing this in the determine which schedule (Daylight Savings vs Standard time) to use\n min(first_reply_time.agent_responded_at) as agent_responded_at,\n\n (\n (\n (\n (\n ((cast(ticket_schedules.schedule_created_at as timestamp))::date - (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::date)\n * 24 + date_part('hour', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('hour', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + date_part('minute', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp) - date_part('minute', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp))\n * 60 + floor(date_part('second', (cast(ticket_schedules.schedule_created_at as timestamp))::timestamp)) - floor(date_part('second', (cast(-- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date)as timestamp))::timestamp)))\n /60\n ) as start_time_in_minutes_from_week,\n greatest(0,\n (\n \n (\n (\n (\n ((least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::date - (ticket_schedules.schedule_created_at)::date)\n * 24 + date_part('hour', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('hour', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + date_part('minute', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp) - date_part('minute', (ticket_schedules.schedule_created_at)::timestamp))\n * 60 + floor(date_part('second', (least(ticket_schedules.schedule_invalidated_at, min(first_reply_time.agent_responded_at)))::timestamp)) - floor(date_part('second', (ticket_schedules.schedule_created_at)::timestamp)))\n /60\n )) as raw_delta_in_minutes,\n -- Sunday as week start date\ncast(\n\n date_trunc('week', \n\n ticket_schedules.schedule_created_at + ((interval '1 day') * (1))\n\n) + ((interval '1 day') * (-1))\n\n as date) as start_week_date\n \n from first_reply_time\n join ticket_schedules on first_reply_time.ticket_id = ticket_schedules.ticket_id\n group by 1, 2, 3, 4\n\n), weeks as (\n\n \n\n \n\n with p as (\n select 0 as generated_number union all select 1\n ), unioned as (\n\n select\n\n \n p0.generated_number * power(2, 0)\n + \n \n p1.generated_number * power(2, 1)\n + \n \n p2.generated_number * power(2, 2)\n + \n \n p3.generated_number * power(2, 3)\n + \n \n p4.generated_number * power(2, 4)\n + \n \n p5.generated_number * power(2, 5)\n + \n \n p6.generated_number * power(2, 6)\n + \n \n p7.generated_number * power(2, 7)\n \n \n + 1\n as generated_number\n\n from\n\n \n p as p0\n cross join \n \n p as p1\n cross join \n \n p as p2\n cross join \n \n p as p3\n cross join \n \n p as p4\n cross join \n \n p as p5\n cross join \n \n p as p6\n cross join \n \n p as p7\n \n \n\n )\n\n select *\n from unioned\n where generated_number <= 208\n order by generated_number\n\n\n\n), weeks_cross_ticket_first_reply as (\n -- because time is reported in minutes since the beginning of the week, we have to split up time spent on the ticket into calendar weeks\n select \n\n ticket_first_reply_time.*,\n cast(generated_number - 1 as integer) as week_number\n\n from ticket_first_reply_time\n cross join weeks\n where floor((start_time_in_minutes_from_week + raw_delta_in_minutes) / (7*24*60)) >= generated_number - 1\n\n), weekly_periods as (\n \n select \n weeks_cross_ticket_first_reply.*, \n -- for each week, at what minute do we start counting?\n cast(greatest(0, start_time_in_minutes_from_week - week_number * (7*24*60)) as integer) as ticket_week_start_time,\n -- for each week, at what minute do we stop counting?\n cast(least(start_time_in_minutes_from_week + raw_delta_in_minutes - week_number * (7*24*60), (7*24*60)) as integer) as ticket_week_end_time\n from weeks_cross_ticket_first_reply\n\n), intercepted_periods as (\n\n select ticket_id,\n week_number,\n weekly_periods.schedule_id,\n ticket_week_start_time,\n ticket_week_end_time,\n schedule.start_time_utc as schedule_start_time,\n schedule.end_time_utc as schedule_end_time,\n least(ticket_week_end_time, schedule.end_time_utc) - greatest(ticket_week_start_time, schedule.start_time_utc) as scheduled_minutes\n from weekly_periods\n join schedule on ticket_week_start_time <= schedule.end_time_utc \n and ticket_week_end_time >= schedule.start_time_utc\n and weekly_periods.schedule_id = schedule.schedule_id\n -- this chooses the Daylight Savings Time or Standard Time version of the schedule\n -- We have everything calculated within a week, so take us to the appropriate week first by adding the week_number * minutes-in-a-week to the minute-mark where we start and stop counting for the week\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_end_time))\n\n as date) > cast(schedule.valid_from as date)\n and cast( \n\n start_week_date + ((interval '1 minute') * (week_number * (7*24*60) + ticket_week_start_time))\n\n as date) < cast(schedule.valid_until as date)\n \n)\n\n select ticket_id,\n sum(scheduled_minutes) as first_reply_time_business_minutes\n from intercepted_periods\n group by 1\n), ticket_enriched as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_enriched\"\n\n), ticket_resolution_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_resolution_times_calendar\n\n), ticket_reply_times_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_reply_times_calendar\n\n), ticket_comments as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__comment_metrics\"\n\n), ticket_work_time_calendar as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_calendar\n\n-- business hour CTEs\n\n\n), ticket_first_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_resolution_time_business\n\n), ticket_full_resolution_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_full_resolution_time_business\n\n), ticket_work_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_work_time_business\n\n), ticket_first_reply_time_business as (\n\n select *\n from __dbt__cte__int_zendesk__ticket_first_reply_time_business\n\n\n-- end business hour CTEs\n\n), calendar_hour_metrics as (\n\nselect\n ticket_enriched.*,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.first_reply_time_calendar_minutes\n end as first_reply_time_calendar_minutes,\n case when coalesce(ticket_comments.count_public_agent_comments, 0) = 0\n then null\n else ticket_reply_times_calendar.total_reply_time_calendar_minutes\n end as total_reply_time_calendar_minutes,\n coalesce(ticket_comments.count_agent_comments, 0) as count_agent_comments,\n coalesce(ticket_comments.count_public_agent_comments, 0) as count_public_agent_comments,\n coalesce(ticket_comments.count_end_user_comments, 0) as count_end_user_comments,\n coalesce(ticket_comments.count_public_comments, 0) as count_public_comments,\n coalesce(ticket_comments.count_internal_comments, 0) as count_internal_comments,\n coalesce(ticket_comments.total_comments, 0) as total_comments,\n coalesce(ticket_comments.count_ticket_handoffs, 0) as count_ticket_handoffs, -- the number of distinct internal users who commented on the ticket\n ticket_comments.last_comment_added_at as ticket_last_comment_date,\n ticket_resolution_times_calendar.unique_assignee_count,\n ticket_resolution_times_calendar.assignee_stations_count,\n ticket_resolution_times_calendar.group_stations_count,\n ticket_resolution_times_calendar.first_assignee_id,\n ticket_resolution_times_calendar.last_assignee_id,\n ticket_resolution_times_calendar.first_agent_assignment_date,\n ticket_resolution_times_calendar.last_agent_assignment_date,\n ticket_resolution_times_calendar.first_solved_at,\n ticket_resolution_times_calendar.last_solved_at,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.first_assignment_to_resolution_calendar_minutes\n else null\n end as first_assignment_to_resolution_calendar_minutes,\n case when ticket_enriched.status in ('solved', 'closed')\n then ticket_resolution_times_calendar.last_assignment_to_resolution_calendar_minutes\n else null\n end as last_assignment_to_resolution_calendar_minutes,\n ticket_resolution_times_calendar.ticket_unassigned_duration_calendar_minutes,\n ticket_resolution_times_calendar.first_resolution_calendar_minutes,\n ticket_resolution_times_calendar.final_resolution_calendar_minutes,\n ticket_resolution_times_calendar.total_resolutions as count_resolutions,\n ticket_resolution_times_calendar.count_reopens,\n ticket_work_time_calendar.ticket_deleted_count,\n ticket_work_time_calendar.total_ticket_recoveries,\n ticket_work_time_calendar.last_status_assignment_date,\n ticket_work_time_calendar.new_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.open_status_duration_in_calendar_minutes,\n ticket_work_time_calendar.agent_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.requester_wait_time_in_calendar_minutes,\n ticket_work_time_calendar.solve_time_in_calendar_minutes,\n ticket_work_time_calendar.agent_work_time_in_calendar_minutes,\n ticket_work_time_calendar.on_hold_time_in_calendar_minutes,\n coalesce(ticket_comments.count_agent_replies, 0) as total_agent_replies,\n \n case when ticket_enriched.is_requester_active = true and ticket_enriched.requester_last_login_at is not null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.requester_last_login_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.requester_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.requester_last_login_at)::timestamp)))\n /60)\n end as requester_last_login_age_minutes,\n case when ticket_enriched.is_assignee_active = true and ticket_enriched.assignee_last_login_at is not null\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.assignee_last_login_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.assignee_last_login_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.assignee_last_login_at)::timestamp)))\n /60)\n end as assignee_last_login_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.created_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.created_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.created_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.created_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_minutes,\n case when lower(ticket_enriched.status) not in ('solved','closed')\n then (\n (\n (\n (\n ((\n current_timestamp::timestamp\n)::date - (ticket_enriched.updated_at)::date)\n * 24 + date_part('hour', (\n current_timestamp::timestamp\n)::timestamp) - date_part('hour', (ticket_enriched.updated_at)::timestamp))\n * 60 + date_part('minute', (\n current_timestamp::timestamp\n)::timestamp) - date_part('minute', (ticket_enriched.updated_at)::timestamp))\n * 60 + floor(date_part('second', (\n current_timestamp::timestamp\n)::timestamp)) - floor(date_part('second', (ticket_enriched.updated_at)::timestamp)))\n /60)\n end as unsolved_ticket_age_since_update_minutes,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_one_touch_resolution \n then true\n else false\n end as is_one_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_two_touch_resolution,\n case when lower(ticket_enriched.status) in ('solved','closed') and not ticket_comments.is_one_touch_resolution\n and not ticket_comments.is_two_touch_resolution \n then true\n else false \n end as is_multi_touch_resolution\n\n\nfrom ticket_enriched\n\nleft join ticket_reply_times_calendar\n using (ticket_id)\n\nleft join ticket_resolution_times_calendar\n using (ticket_id)\n\nleft join ticket_work_time_calendar\n using (ticket_id)\n\nleft join ticket_comments\n using(ticket_id)\n\n\n\n), business_hour_metrics as (\n\n select \n ticket_enriched.ticket_id,\n ticket_first_resolution_time_business.first_resolution_business_minutes,\n ticket_full_resolution_time_business.full_resolution_business_minutes,\n ticket_first_reply_time_business.first_reply_time_business_minutes,\n ticket_work_time_business.agent_wait_time_in_business_minutes,\n ticket_work_time_business.requester_wait_time_in_business_minutes,\n ticket_work_time_business.solve_time_in_business_minutes,\n ticket_work_time_business.agent_work_time_in_business_minutes,\n ticket_work_time_business.on_hold_time_in_business_minutes,\n ticket_work_time_business.new_status_duration_in_business_minutes,\n ticket_work_time_business.open_status_duration_in_business_minutes\n\n from ticket_enriched\n\n left join ticket_first_resolution_time_business\n using (ticket_id)\n\n left join ticket_full_resolution_time_business\n using (ticket_id)\n \n left join ticket_first_reply_time_business\n using (ticket_id) \n \n left join ticket_work_time_business\n using (ticket_id)\n\n)\n\nselect\n calendar_hour_metrics.*,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.first_resolution_business_minutes,0)\n else null\n end as first_resolution_business_minutes,\n case when calendar_hour_metrics.status in ('solved', 'closed')\n then coalesce(business_hour_metrics.full_resolution_business_minutes,0)\n else null\n end as full_resolution_business_minutes,\n case when coalesce(calendar_hour_metrics.count_public_agent_comments, 0) = 0\n then null\n else coalesce(business_hour_metrics.first_reply_time_business_minutes,0)\n end as first_reply_time_business_minutes,\n coalesce(business_hour_metrics.agent_wait_time_in_business_minutes,0) as agent_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.requester_wait_time_in_business_minutes,0) as requester_wait_time_in_business_minutes,\n coalesce(business_hour_metrics.solve_time_in_business_minutes,0) as solve_time_in_business_minutes,\n coalesce(business_hour_metrics.agent_work_time_in_business_minutes,0) as agent_work_time_in_business_minutes,\n coalesce(business_hour_metrics.on_hold_time_in_business_minutes,0) as on_hold_time_in_business_minutes,\n coalesce(business_hour_metrics.new_status_duration_in_business_minutes,0) as new_status_duration_in_business_minutes,\n coalesce(business_hour_metrics.open_status_duration_in_business_minutes,0) as open_status_duration_in_business_minutes\n\nfrom calendar_hour_metrics\n\nleft join business_hour_metrics \n using (ticket_id)\n\n", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_metrics\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.978579Z", "completed_at": "2024-08-26T20:48:50.984885Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.991202Z", "completed_at": "2024-08-26T20:48:50.991210Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.014867067337036133, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.unique_zendesk__sla_policies_sla_event_id.5daff4d2bd", "compiled": true, "compiled_code": "\n \n \n\nselect\n sla_event_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__sla_policies\"\nwhere sla_event_id is not null\ngroup by sla_event_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.985290Z", "completed_at": "2024-08-26T20:48:50.992478Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.994534Z", "completed_at": "2024-08-26T20:48:50.994541Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.015221118927001953, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_summary", "compiled": true, "compiled_code": "with ticket_metrics as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_metrics\"\n\n), user_table as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), user_sum as (\n select\n cast(1 as integer) as summary_helper,\n sum(case when is_active = true\n then 1\n else 0\n end) as user_count,\n sum(case when lower(role) != 'end-user' and is_active = true\n then 1\n else 0\n end) as active_agent_count,\n sum(case when is_active = false\n then 1\n else 0\n end) as deleted_user_count,\n sum(case when lower(role) = 'end-user' and is_active = true\n then 1\n else 0\n end) as end_user_count,\n sum(case when is_suspended = true\n then 1\n else 0\n end) as suspended_user_count\n from user_table\n\n group by 1\n\n), ticket_metric_sum as (\n select \n cast(1 as integer) as summary_helper,\n sum(case when lower(status) = 'new'\n then 1\n else 0\n end) as new_ticket_count,\n sum(case when lower(status) = 'hold'\n then 1\n else 0\n end) as on_hold_ticket_count,\n sum(case when lower(status) = 'open'\n then 1\n else 0\n end) as open_ticket_count,\n sum(case when lower(status) = 'pending'\n then 1\n else 0\n end) as pending_ticket_count,\n sum(case when lower(type) = 'problem'\n then 1\n else 0\n end) as problem_ticket_count,\n sum(case when first_assignee_id != last_assignee_id\n then 1\n else 0\n end) as reassigned_ticket_count,\n sum(case when count_reopens > 0\n then 1\n else 0\n end) as reopened_ticket_count,\n\n sum(case when lower(ticket_satisfaction_score) in ('offered', 'good', 'bad')\n then 1\n else 0\n end) as surveyed_satisfaction_ticket_count,\n\n sum(case when assignee_id is null and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unassigned_unsolved_ticket_count,\n sum(case when total_agent_replies < 0\n then 1\n else 0\n end) as unreplied_ticket_count,\n sum(case when total_agent_replies < 0 and lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unreplied_unsolved_ticket_count,\n sum(case when lower(status) not in ('solved', 'closed')\n then 1\n else 0\n end) as unsolved_ticket_count,\n sum(case when lower(status) in ('solved', 'closed')\n then 1\n else 0\n end) as solved_ticket_count,\n sum(case when lower(status) in ('deleted')\n then 1\n else 0\n end) as deleted_ticket_count,\n sum(case when total_ticket_recoveries > 0\n then 1\n else 0\n end) as recovered_ticket_count,\n sum(case when assignee_stations_count > 0\n then 1\n else 0\n end) as assigned_ticket_count,\n count(count_internal_comments) as total_internal_comments,\n count(count_public_comments) as total_public_comments,\n count(total_comments)\n from ticket_metrics\n \n group by 1\n\n\n), final as (\n select\n user_sum.user_count,\n user_sum.active_agent_count,\n user_sum.deleted_user_count,\n user_sum.end_user_count,\n user_sum.suspended_user_count,\n ticket_metric_sum.new_ticket_count,\n ticket_metric_sum.on_hold_ticket_count,\n ticket_metric_sum.open_ticket_count,\n ticket_metric_sum.pending_ticket_count,\n ticket_metric_sum.solved_ticket_count,\n ticket_metric_sum.problem_ticket_count,\n ticket_metric_sum.assigned_ticket_count,\n ticket_metric_sum.reassigned_ticket_count,\n ticket_metric_sum.reopened_ticket_count,\n ticket_metric_sum.surveyed_satisfaction_ticket_count,\n ticket_metric_sum.unassigned_unsolved_ticket_count,\n ticket_metric_sum.unreplied_ticket_count,\n ticket_metric_sum.unreplied_unsolved_ticket_count,\n ticket_metric_sum.unsolved_ticket_count,\n ticket_metric_sum.recovered_ticket_count,\n ticket_metric_sum.deleted_ticket_count\n from user_sum\n\n left join ticket_metric_sum\n using(summary_helper)\n)\n\nselect *\nfrom final", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_summary\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.988106Z", "completed_at": "2024-08-26T20:48:50.992835Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:50.994882Z", "completed_at": "2024-08-26T20:48:50.994886Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.01527094841003418, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.not_null_zendesk__ticket_metrics_ticket_id.3466b76bbd", "compiled": true, "compiled_code": "\n \n \n\n\n\nselect ticket_id\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is null\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.995245Z", "completed_at": "2024-08-26T20:48:50.999931Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:51.000604Z", "completed_at": "2024-08-26T20:48:51.000609Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.007960796356201172, "adapter_response": {}, "message": null, "failures": null, "unique_id": "test.zendesk.unique_zendesk__ticket_metrics_ticket_id.f3dc8eba5c", "compiled": true, "compiled_code": "\n \n \n\nselect\n ticket_id as unique_field,\n count(*) as n_records\n\nfrom \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_metrics\"\nwhere ticket_id is not null\ngroup by ticket_id\nhaving count(*) > 1\n\n\n", "relation_name": null}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:50.913060Z", "completed_at": "2024-08-26T20:48:51.199900Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:51.200509Z", "completed_at": "2024-08-26T20:48:51.200516Z"}], "thread_id": "Thread-4 (worker)", "execution_time": 0.2902050018310547, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__field_history_pivot", "compiled": true, "compiled_code": "-- depends_on: \"postgres\".\"zendesk_integration_tests_55\".\"ticket_field_history_data\"\n\n\n\n\n \nwith __dbt__cte__int_zendesk__updater_information as (\nwith users as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__user_aggregates\"\n\n), organizations as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__organization_aggregates\"\n\n), final as (\n select\n users.user_id as updater_user_id\n ,users.name as updater_name\n ,users.role as updater_role\n ,users.email as updater_email\n ,users.external_id as updater_external_id\n ,users.locale as updater_locale\n ,users.is_active as updater_is_active\n\n --If you use user tags this will be included, if not it will be ignored.\n \n ,users.user_tags as updater_user_tags\n \n\n ,users.last_login_at as updater_last_login_at\n ,users.time_zone as updater_time_zone\n ,organizations.organization_id as updater_organization_id\n\n --If you use using_domain_names tags this will be included, if not it will be ignored.\n \n ,organizations.domain_names as updater_organization_domain_names\n \n\n --If you use organization tags this will be included, if not it will be ignored.\n \n ,organizations.organization_tags as updater_organization_organization_tags\n \n from users\n\n left join organizations\n using(organization_id)\n)\n\nselect * \nfrom final\n), __dbt__cte__int_zendesk__field_history_enriched as (\nwith ticket_field_history as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket_field_history\"\n\n), updater_info as (\n select *\n from __dbt__cte__int_zendesk__updater_information\n\n), final as (\n select\n ticket_field_history.*\n\n \n\n from ticket_field_history\n\n left join updater_info\n on ticket_field_history.user_id = updater_info.updater_user_id\n)\nselect *\nfrom final\n), field_history as (\n\n select\n ticket_id,\n field_name,\n valid_ending_at,\n valid_starting_at\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n\n -- doing this to figure out what values are actually null and what needs to be backfilled in zendesk__ticket_field_history\n ,case when value is null then 'is_null' else value end as value\n\n from __dbt__cte__int_zendesk__field_history_enriched\n \n where cast( date_trunc('day', valid_starting_at) as date) >= (select max(date_day) from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_history_pivot\")\n \n\n), event_order as (\n\n select \n *,\n row_number() over (\n partition by cast(valid_starting_at as date), ticket_id, field_name\n order by valid_starting_at desc\n ) as row_num\n from field_history\n\n), filtered as (\n\n -- Find the last event that occurs on each day for each ticket\n\n select *\n from event_order\n where row_num = 1\n\n), pivots as (\n\n -- For each column that is in both the ticket_field_history_columns variable and the field_history table,\n -- pivot out the value into it's own column. This will feed the daily slowly changing dimension model.\n\n select \n ticket_id,\n cast(date_trunc('day', valid_starting_at) as date) as date_day\n\n \n \n ,min(case when lower(field_name) = 'status' then filtered.value end) as status\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'assignee_id' then filtered.value end) as assignee_id\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n ,min(case when lower(field_name) = 'priority' then filtered.value end) as priority\n\n --Only runs if the user passes updater fields through the final ticket field history model\n \n \n \n from filtered\n group by 1,2\n\n), surrogate_key as (\n\n select \n *,\n md5(cast(coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id\n from pivots\n\n)\n\nselect *\nfrom surrogate_key", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_history_pivot\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:51.202937Z", "completed_at": "2024-08-26T20:48:51.461067Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:51.461598Z", "completed_at": "2024-08-26T20:48:51.461605Z"}], "thread_id": "Thread-1 (worker)", "execution_time": 0.28623104095458984, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.int_zendesk__field_history_scd", "compiled": true, "compiled_code": "-- model needs to materialize as a table to avoid erroneous null values\n \n\n\n\nwith change_data as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_history_pivot\"\n\n), set_values as (\n\n-- each row of the pivoted table includes field values if that field was updated on that day\n-- we need to backfill to persist values that have been previously updated and are still valid \n select \n date_day as valid_from,\n ticket_id,\n ticket_day_id\n\n \n\n ,status\n ,sum(case when status is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as status_field_partition\n \n\n ,assignee_id\n ,sum(case when assignee_id is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as assignee_id_field_partition\n \n\n ,priority\n ,sum(case when priority is null \n then 0 \n else 1 \n end) over (order by ticket_id, date_day rows unbounded preceding) as priority_field_partition\n \n\n from change_data\n\n), fill_values as (\n select\n valid_from, \n ticket_id,\n ticket_day_id\n\n \n\n ,first_value( status ) over (partition by status_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as status\n \n \n\n ,first_value( assignee_id ) over (partition by assignee_id_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as assignee_id\n \n \n\n ,first_value( priority ) over (partition by priority_field_partition, ticket_id order by valid_from asc rows between unbounded preceding and current row) as priority\n \n \n from set_values\n) \n\nselect *\nfrom fill_values", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_history_scd\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:51.492396Z", "completed_at": "2024-08-26T20:48:51.749252Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:51.749807Z", "completed_at": "2024-08-26T20:48:51.749813Z"}], "thread_id": "Thread-3 (worker)", "execution_time": 0.28700709342956543, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_field_history", "compiled": true, "compiled_code": "with change_data as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_history_scd\"\n \n \n where valid_from >= (select max(date_day) from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\")\n\n-- If no issue fields have been updated since the last incremental run, the pivoted_daily_history CTE will return no record/rows.\n-- When this is the case, we need to grab the most recent day's records from the previously built table so that we can persist \n-- those values into the future.\n\n), most_recent_data as ( \n\n select \n *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\"\n where date_day = (select max(date_day) from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\" )\n\n\n\n), calendar as (\n\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"int_zendesk__field_calendar_spine\"\n where date_day <= current_date\n \n and date_day >= (select max(date_day) from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\")\n \n\n), joined as (\n\n select \n calendar.date_day,\n calendar.ticket_id\n \n \n , coalesce(change_data.status, most_recent_data.status) as status\n \n , coalesce(change_data.assignee_id, most_recent_data.assignee_id) as assignee_id\n \n , coalesce(change_data.priority, most_recent_data.priority) as priority\n \n \n \n\n from calendar\n left join change_data\n on calendar.ticket_id = change_data.ticket_id\n and calendar.date_day = change_data.valid_from\n \n \n left join most_recent_data\n on calendar.ticket_id = most_recent_data.ticket_id\n and calendar.date_day = most_recent_data.date_day\n \n\n), set_values as (\n\n select\n date_day,\n ticket_id\n\n \n , status\n -- create a batch/partition once a new value is provided\n , sum( case when status is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as status_field_partition\n\n \n , assignee_id\n -- create a batch/partition once a new value is provided\n , sum( case when assignee_id is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as assignee_id_field_partition\n\n \n , priority\n -- create a batch/partition once a new value is provided\n , sum( case when priority is null then 0 else 1 end) over ( partition by ticket_id\n order by date_day rows unbounded preceding) as priority_field_partition\n\n \n\n from joined\n),\n\nfill_values as (\n\n select \n date_day,\n ticket_id\n\n \n -- grab the value that started this batch/partition\n , first_value( status ) over (\n partition by ticket_id, status_field_partition \n order by date_day asc rows between unbounded preceding and current row) as status\n \n -- grab the value that started this batch/partition\n , first_value( assignee_id ) over (\n partition by ticket_id, assignee_id_field_partition \n order by date_day asc rows between unbounded preceding and current row) as assignee_id\n \n -- grab the value that started this batch/partition\n , first_value( priority ) over (\n partition by ticket_id, priority_field_partition \n order by date_day asc rows between unbounded preceding and current row) as priority\n \n\n from set_values\n\n), fix_null_values as (\n\n select \n date_day,\n ticket_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( status as TEXT ) = 'is_null' then null else status end as status\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( assignee_id as TEXT ) = 'is_null' then null else assignee_id end as assignee_id\n \n\n -- we de-nulled the true null values earlier in order to differentiate them from nulls that just needed to be backfilled\n , case when cast( priority as TEXT ) = 'is_null' then null else priority end as priority\n \n\n from fill_values\n\n), surrogate_key as (\n\n select\n md5(cast(coalesce(cast(date_day as TEXT), '_dbt_utils_surrogate_key_null_') || '-' || coalesce(cast(ticket_id as TEXT), '_dbt_utils_surrogate_key_null_') as TEXT)) as ticket_day_id,\n *\n\n from fix_null_values\n)\n\nselect *\nfrom surrogate_key", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\""}, {"status": "success", "timing": [{"name": "compile", "started_at": "2024-08-26T20:48:51.785648Z", "completed_at": "2024-08-26T20:48:51.795179Z"}, {"name": "execute", "started_at": "2024-08-26T20:48:51.795742Z", "completed_at": "2024-08-26T20:48:51.795749Z"}], "thread_id": "Thread-2 (worker)", "execution_time": 0.012038946151733398, "adapter_response": {}, "message": null, "failures": null, "unique_id": "model.zendesk.zendesk__ticket_backlog", "compiled": true, "compiled_code": "--This model will only run if 'status' is included within the `ticket_field_history_columns` variable.\n\n\nwith ticket_field_history as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_field_history\"\n\n), tickets as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__ticket\"\n\n), group_names as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__group\"\n\n), users as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__user\"\n\n), brands as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__brand\"\n\n--The below model is excluded if the user does not include ticket_form_id in the variable as a low percentage of accounts use ticket forms.\n\n\n), organizations as (\n select *\n from \"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"stg_zendesk__organization\"\n\n), backlog as (\n select\n ticket_field_history.date_day\n ,ticket_field_history.ticket_id\n ,ticket_field_history.status\n ,tickets.created_channel\n --Looking at all history fields the users passed through in their dbt_project.yml file\n --Standard ID field where the name can easily be joined from stg model.\n ,assignee.name as assignee_name\n\n \n --Looking at all history fields the users passed through in their dbt_project.yml file\n --All other fields are not ID's and can simply be included in the query.\n ,ticket_field_history.priority\n \n \n\n from ticket_field_history\n\n left join tickets\n on tickets.ticket_id = ticket_field_history.ticket_id\n\n \n\n \n\n --Join not needed if fields is not located in variable, otherwise it is included.\n left join users as assignee\n on assignee.user_id = cast(ticket_field_history.assignee_id as bigint)\n \n\n \n\n \n\n \n\n where ticket_field_history.status not in ('closed', 'solved', 'deleted')\n)\n\nselect *\nfrom backlog", "relation_name": "\"postgres\".\"zendesk_integration_tests_55_zendesk_dev\".\"zendesk__ticket_backlog\""}], "elapsed_time": 5.023838996887207, "args": {"cache_selected_only": false, "strict_mode": false, "empty_catalog": false, "static_parser": true, "indirect_selection": "eager", "compile": true, "log_level_file": "debug", "enable_legacy_logger": false, "select": [], "static": false, "printer_width": 80, "invocation_command": "dbt docs generate -t postgres", "log_format": "default", "vars": {}, "log_format_file": "debug", "quiet": false, "favor_state": false, "use_colors_file": true, "project_dir": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests", "defer": false, "target": "postgres", "macro_debugging": false, "log_path": "/Users/joseph.markiewicz/Documents/dbt_packages/zendesk/dbt_zendesk/integration_tests/logs", "use_colors": true, "write_json": true, "send_anonymous_usage_stats": true, "introspect": true, "partial_parse_file_diff": true, "log_level": "info", "exclude": [], "profiles_dir": "/Users/joseph.markiewicz/.dbt", "partial_parse": true, "which": "generate", "print": true, "populate_cache": true, "warn_error_options": {"include": [], "exclude": []}, "log_file_max_bytes": 10485760, "show_resource_report": false, "version_check": true}} \ No newline at end of file diff --git a/integration_tests/dbt_project.yml b/integration_tests/dbt_project.yml index 53532764..bc918b9d 100644 --- a/integration_tests/dbt_project.yml +++ b/integration_tests/dbt_project.yml @@ -1,7 +1,7 @@ config-version: 2 name: 'zendesk_integration_tests' -version: '0.16.0' +version: '0.17.0' profile: 'integration_tests' @@ -33,8 +33,7 @@ vars: # using_organization_tags: false # fivetran_integrity_sla_first_reply_time_exclusion_tickets: (1,56,80) # fivetran_consistency_ticket_metrics_exclusion_tickets: (11092,11093,11094) - # fivetran_integrity_sla_count_match_tickets: (76) - + # fivetran_integrity_sla_count_match_tickets: (76,11106) ## Upcoming PR will address this. models: +schema: "zendesk_{{ var('directed_schema','dev') }}" diff --git a/integration_tests/seeds/ticket_data.csv b/integration_tests/seeds/ticket_data.csv index 8e3cca5c..c92be7f1 100644 --- a/integration_tests/seeds/ticket_data.csv +++ b/integration_tests/seeds/ticket_data.csv @@ -2,7 +2,7 @@ id,_fivetran_synced,allow_channelback,assignee_id,brand_id,created_at,descriptio 1595,2020-03-20 02:32:49.426,FALSE,,360003529474,2020-02-19 01:54:52,description1,,,,360006965034,FALSE,TRUE,370295712714,,,email@email.com,396331237134,deleted,subject1,396331237134,,360002048693,incident,2020-02-19 01:55:11,https://zendesk.com/api/v2/tickets/1595.json,web,,,,example@email.com,,[],,, 16988,2021-01-13 20:09:16.325,FALSE,418284131934,360003529474,2020-12-22 00:19:23,description1,,,,360013366274,FALSE,TRUE,370469077513,,,email@email.com,1500656884401,solved,subject1,1500656884401,,360002048693,,2021-01-13 18:42:39,https://zendesk.com/api/v2/tickets/16988.json,email,,,,example@email.com,Support,[],,[], 14173,2020-11-11 20:08:45.130,FALSE,396371699653,360003529474,2020-10-28 12:03:02,description1,,,,360006965034,FALSE,TRUE,370321120273,,,email@email.com,424883466453,closed,subject1,424883466453,,360002048693,,2020-11-11 17:01:32,https://zendesk.com/api/v2/tickets/14173.json,email,,,,example@email.com,Support,[],,, -11071,2020-10-02 14:08:33.216,FALSE,,360003529474,2020-08-28 18:06:36,description1,,,,,FALSE,TRUE,,,,email@email.com,419755385214,deleted,subject1,419755385214,,360002048693,,2020-09-02 11:01:27,https://zendesk.com/api/v2/tickets/11071.json,email,,,,X,Support,[],,, +11071,2020-10-02 14:08:33.216,FALSE,1111,360003529474,2020-08-28 18:06:36,Ticket to test field history changes,,,,360006965034,FALSE,TRUE,,urgent,,email@email.com,2222,closed,subject1,2222,,360002048693,,2020-11-15 11:01:27,https://zendesk.com/api/v2/tickets/11071.json,email,,,,X,Support,[],,, 1966,2020-03-25 20:32:23.617,FALSE,396315360434,360003529474,2020-02-27 06:05:08,description1,,,,360006965034,FALSE,TRUE,370295721514,,,email@email.com,402813302773,closed,subject1,402813302773,,360002048693,,2020-03-25 16:03:26,https://zendesk.com/api/v2/tickets/1966.json,email,,,,example@email.com,Support,[1967],,, 11013,2020-10-02 20:08:20.449,FALSE,402851697393,360003529474,2020-08-27 23:09:52,description1,,,,360008376313,FALSE,TRUE,370297881854,,,email@email.com,419688934974,deleted,subject1,419688934974,,360002048693,,2020-09-02 15:53:16,https://zendesk.com/api/v2/tickets/11013.json,email,,,,X,Support,[],,, 1404,2020-03-05 04:53:46.466,FALSE,396371699653,360003529474,2020-02-13 21:43:58,description1,,,,360006965034,FALSE,TRUE,370295709874,,,email@email.com,403125197514,closed,subject1,403125197514,,360002048693,,2020-02-28 01:01:57,https://zendesk.com/api/v2/tickets/1404.json,email,,,,example@email.com,Support,,,, diff --git a/integration_tests/seeds/ticket_field_history_data.csv b/integration_tests/seeds/ticket_field_history_data.csv index 138a9a68..62794bc6 100644 --- a/integration_tests/seeds/ticket_field_history_data.csv +++ b/integration_tests/seeds/ticket_field_history_data.csv @@ -1,8 +1,25 @@ field_name,ticket_id,updated,_fivetran_synced,user_id,value +status,11071,2020-08-28 11:01:27,2020-10-02 14:08:33.216,,open +assignee_id,11071,2020-08-28 11:01:27,2020-03-11 14:32:23.872,,1111 +priority,11071,2020-08-28 11:01:27,2020-03-11 14:32:23.872,,normal +status,11071,2020-08-29 11:01:27,2020-10-02 14:08:33.216,,pending +status,11071,2020-09-02 11:01:27,2020-10-02 14:08:33.216,,on-hold +status,11071,2020-09-05 11:01:27,2020-10-02 14:08:33.216,,pending +status,11071,2020-09-15 11:01:27,2020-10-02 14:08:33.216,,open +status,11071,2020-09-30 11:01:27,2020-10-02 14:08:33.216,,pending +priority,11071,2020-09-30 11:01:27,2020-03-11 14:32:23.872,,medium +status,11071,2020-10-01 11:01:27,2020-10-02 14:08:33.216,,on-hold +status,11071,2020-10-15 11:01:27,2020-10-02 14:08:33.216,,open +priority,11071,2020-10-15 11:01:27,2020-03-11 14:32:23.872,,high +status,11071,2020-10-17 11:01:27,2020-10-02 14:08:33.216,,on-hold +status,11071,2020-10-20 11:01:27,2020-10-02 14:08:33.216,,pending +status,11071,2020-11-02 11:01:27,2020-10-02 14:08:33.216,,open +priority,11071,2020-11-02 11:01:27,2020-03-11 14:32:23.872,,urgent +status,11071,2020-11-14 11:01:27,2020-10-02 14:08:33.216,,solved +status,11071,2020-11-15 11:01:27,2020-10-02 14:08:33.216,,closed status,6964,2020-06-01 21:11:59,2020-07-02 02:09:05.984,,solved status,974,2020-02-10 21:47:41,2020-03-12 02:32:23.808,,solved priority,980,2020-02-10 22:06:57,2020-03-12 02:32:23.808,,solved -status,11071,2020-09-02 11:01:27,2020-10-02 14:08:33.216,,solved status,8205,2020-07-07 23:01:47,2020-08-07 02:09:08.192,,solved status,103,2020-02-10 08:36:38,2020-03-11 14:32:23.872,,solved assignee_id,108,2020-02-10 08:36:38,2020-03-11 14:32:23.872,,1111 diff --git a/integration_tests/seeds/user_data.csv b/integration_tests/seeds/user_data.csv index 42291f9f..18862e43 100644 --- a/integration_tests/seeds/user_data.csv +++ b/integration_tests/seeds/user_data.csv @@ -1,6 +1,6 @@ id,_fivetran_synced,active,alias,authenticity_token,chat_only,created_at,details,email,external_id,last_login_at,locale,locale_id,moderator,name,notes,only_private_comments,organization_id,phone,remote_photo_url,restricted_agent,role,shared,shared_agent,signature,suspended,ticket_restriction,time_zone,two_factor_auth_enabled,updated_at,url,verified -403958466973,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:55:12,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370297696174,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:55:12,https://fivetran1813.zendesk.com/api/v2/users/403958466973.json,TRUE -403969371634,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:41:37,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:41:37,https://fivetran1813.zendesk.com/api/v2/users/403969371634.json,TRUE +1111,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:55:12,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,Arthur Agent,,FALSE,370326203233,,,TRUE,agent,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:55:12,https://fivetran1813.zendesk.com/api/v2/users/403958466973.json,TRUE +2222,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:41:37,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,Earnest End User,,FALSE,370297696174,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:41:37,https://fivetran1813.zendesk.com/api/v2/users/403969371634.json,TRUE 403957746773,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:35:14,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370319191913,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:35:15,https://fivetran1813.zendesk.com/api/v2/users/403957746773.json,TRUE 403970285734,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 23:07:41,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370326203233,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 23:07:41,https://fivetran1813.zendesk.com/api/v2/users/403970285734.json,TRUE 403969943274,2020-03-05 05:03:34.208,TRUE,,,FALSE,2020-02-26 22:57:51,,example@email.com,,2023-02-26 22:55:12,en-US,1,FALSE,John Doe,,FALSE,370326203233,,,TRUE,end-user,FALSE,FALSE,,FALSE,requested,Pacific Time (US & Canada),FALSE,2020-02-26 22:57:52,https://fivetran1813.zendesk.com/api/v2/users/403969943274.json,TRUE diff --git a/integration_tests/tests/consistency/consistency_sla_policy_count.sql b/integration_tests/tests/consistency/consistency_sla_policy_count.sql index 65e31d7f..cc09c9c4 100644 --- a/integration_tests/tests/consistency/consistency_sla_policy_count.sql +++ b/integration_tests/tests/consistency/consistency_sla_policy_count.sql @@ -6,7 +6,7 @@ with prod as ( select - 1 as join_key, + ticket_id, count(*) as total_slas from {{ target.schema }}_zendesk_prod.zendesk__sla_policies group by 1 @@ -14,7 +14,7 @@ with prod as ( dev as ( select - 1 as join_key, + ticket_id, count(*) as total_slas from {{ target.schema }}_zendesk_dev.zendesk__sla_policies group by 1 @@ -22,12 +22,13 @@ dev as ( final as ( select - prod.join_key, + prod.ticket_id, + dev.ticket_id, prod.total_slas as prod_sla_total, dev.total_slas as dev_sla_total from prod full outer join dev - on dev.join_key = prod.join_key + on dev.ticket_id = prod.ticket_id ) select * diff --git a/models/ticket_history/int_zendesk__field_calendar_spine.sql b/models/ticket_history/int_zendesk__field_calendar_spine.sql index 1e821310..5e961604 100644 --- a/models/ticket_history/int_zendesk__field_calendar_spine.sql +++ b/models/ticket_history/int_zendesk__field_calendar_spine.sql @@ -1,7 +1,7 @@ {{ config( materialized='incremental', - partition_by = {'field': 'date_day', 'data_type': 'date'} if target.type not in ['spark', 'databricks'] else ['date_day'], + partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'], unique_key='ticket_day_id', incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert', file_format='delta' diff --git a/models/ticket_history/int_zendesk__field_history_pivot.sql b/models/ticket_history/int_zendesk__field_history_pivot.sql index 56600191..3fa83345 100644 --- a/models/ticket_history/int_zendesk__field_history_pivot.sql +++ b/models/ticket_history/int_zendesk__field_history_pivot.sql @@ -3,7 +3,7 @@ {{ config( materialized='incremental', - partition_by = {'field': 'date_day', 'data_type': 'date'} if target.type not in ['spark', 'databricks'] else ['date_day'], + partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month'} if target.type not in ['spark', 'databricks'] else ['date_day'], unique_key='ticket_day_id', incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert', file_format='delta' diff --git a/models/zendesk__ticket_field_history.sql b/models/zendesk__ticket_field_history.sql index 5879e458..5de1e100 100644 --- a/models/zendesk__ticket_field_history.sql +++ b/models/zendesk__ticket_field_history.sql @@ -1,7 +1,7 @@ {{ config( materialized='incremental', - partition_by = {'field': 'date_day', 'data_type': 'date'} if target.type not in ['spark', 'databricks'] else ['date_day'], + partition_by = {'field': 'date_day', 'data_type': 'date', 'granularity': 'month' } if target.type not in ['spark', 'databricks'] else ['date_day'], unique_key='ticket_day_id', incremental_strategy = 'merge' if target.type not in ('snowflake', 'postgres', 'redshift') else 'delete+insert', file_format='delta'