From 1dca903290d32d7148423ca0fd5349994657aaa2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Czzhaobraze=E2=80=9D?= Date: Thu, 11 Jul 2024 16:37:26 -0400 Subject: [PATCH 01/32] fix ja table of content nav --- assets/js/documents.js | 25 ++++++++++++++++++++++++- assets/js/toc.js | 12 +++--------- vercel.json | 3 --- 3 files changed, 27 insertions(+), 13 deletions(-) diff --git a/assets/js/documents.js b/assets/js/documents.js index a3f7cc34e9d..ae7a5319467 100644 --- a/assets/js/documents.js +++ b/assets/js/documents.js @@ -15,6 +15,29 @@ function generateUUID() { // Public Domain/MIT return (c === 'x' ? r : (r & 0x3 | 0x8)).toString(16); }); } + +function unEncodeURIComponent(str) { + let decodedStr = decodeURIComponent(str); + decodedStr = decodedStr.replace(/%21|%27|%28|%29|%2A/g, function(match) { + switch (match) { + case '%21': + return '!'; + case '%27': + return "'"; + case '%28': + return '('; + case '%29': + return ')'; + case '%2A': + return '*'; + default: + return match; + } + }); + + return decodedStr; +} + function string_to_slug(str) { if (str) { str = str.toLowerCase().replace(/\s/g, '-').replace(/[^\w-]/g, ''); @@ -219,7 +242,7 @@ $(document).ready(function() { // Prevent default anchor click behavior event.preventDefault(); // Store hash - var hash = this.hash; + var hash = unEncodeURIComponent(this.hash); // Using jQuery's animate() method to add smooth page scroll // The optional number (800) specifies the number of milliseconds it takes to scroll to the specified area $('html, body').animate({ diff --git a/assets/js/toc.js b/assets/js/toc.js index bdb80357dbc..a5ae032f65b 100644 --- a/assets/js/toc.js +++ b/assets/js/toc.js @@ -19,12 +19,6 @@ }, settings = $.extend(defaults, options); - function fixedEncodeURIComponent (str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { - return '%' + c.charCodeAt(0).toString(16); - }); - } - var headers = $(settings.headers).filter(function() { // get all headers with an ID var previousSiblingName = $(this).prev().attr( "name" ); @@ -77,18 +71,18 @@ } // extra div tags at html += before to prevent highlighting of parent if (this_level === level) // same level as before; same indenting - html += "
" + header.innerHTML + "
"; + html += "
" + header.innerHTML + "
"; else if (this_level <= level){ // higher level than before; end parent ol for(i = this_level; i < level; i++) { html += "" } - html += "
" + header.innerHTML + "
"; + html += "
" + header.innerHTML + "
"; } else if (this_level > level) { // lower level than before; expand the previous to contain a ol for(i = this_level; i > level; i--) { html += "<"+ settings.listType + settings.bootstrapStyling +">" } - html += "
" + header.innerHTML + "
"; + html += "
" + header.innerHTML + "
"; } level = this_level; // update for the next one }); diff --git a/vercel.json b/vercel.json index ab4c741c347..e835fed3242 100644 --- a/vercel.json +++ b/vercel.json @@ -18,9 +18,6 @@ "destination": "/api/404.html" } ], - "github": { - "silent": true - }, "headers": [ { "source": "/(.*)", From 34073ea0c9ed451dcdd2de00ad4a30c26e5e672e Mon Sep 17 00:00:00 2001 From: Lydia Xie <87040416+lydia-xie@users.noreply.github.com> Date: Thu, 11 Jul 2024 13:58:20 -0700 Subject: [PATCH 02/32] Update template_catalog_item_liquid.md --- _docs/_hidden/private_betas/template_catalog_item_liquid.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/_docs/_hidden/private_betas/template_catalog_item_liquid.md b/_docs/_hidden/private_betas/template_catalog_item_liquid.md index fc45aedca26..2382edb5169 100644 --- a/_docs/_hidden/private_betas/template_catalog_item_liquid.md +++ b/_docs/_hidden/private_betas/template_catalog_item_liquid.md @@ -9,10 +9,10 @@ hidden: true # Templating catalog items including Liquid - Similar to [Connected Content]({{site.baseurl}}/user_guide/personalization_and_dynamic_content/connected_content), the `:rerender` flag must be included in the Liquid tag in order to render its Liquid content. Note that the `:rerender` flag is only one level deep, meaning that it won't apply to any nested Liquid tag calls. + Similar to [Connected Content]({{site.baseurl}}/user_guide/personalization_and_dynamic_content/connected_content), the `:rerender` flag must be included in the Liquid tag to render its Liquid content. Note that the `:rerender` flag is only one level deep, meaning it won't apply to any nested Liquid tag calls. {% alert important %} - Templating catalog items that include Liquid is in early access. Contact your Braze account manager if you're interested in participating in the early access. + Templating catalog items that include Liquid is in early access. Reach out to your Braze account manager if you're interested in participating in the early access. {% endalert %} If a catalog item contains user profile fields (within a Liquid personalization tag), these values must be defined earlier in the message via Liquid before the templating in order to render the Liquid properly. If `:rerender` flag is not provided, it will render the raw Liquid content. @@ -46,4 +46,4 @@ Welcome to our store, Peter! Catalog Liquid tags can't be used recursively inside catalogs. {% endalert %} -[15]: {% image_buster /assets/img_archive/catalog_liquid_templating.png %} \ No newline at end of file +[15]: {% image_buster /assets/img_archive/catalog_liquid_templating.png %} From 408749073b1cb04e04a4c2604e7d0d5d3d867ae1 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=80=9Czzhaobraze=E2=80=9D?= Date: Fri, 12 Jul 2024 10:13:11 -0400 Subject: [PATCH 03/32] fix layout redirect url --- _plugins/urlnavmenu_generator.rb | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_plugins/urlnavmenu_generator.rb b/_plugins/urlnavmenu_generator.rb index 0bd907f14d2..afa54eb4a7a 100644 --- a/_plugins/urlnavmenu_generator.rb +++ b/_plugins/urlnavmenu_generator.rb @@ -283,7 +283,7 @@ def build_menu_html(menu_hash,parent_key,level) end cur_url = @baseurl + curinfo.url - if curinfo['redirect_to'] + if curinfo['redirect_to'] && level == 0 cur_url = curinfo['redirect_to'].gsub!(/^\/docs\//, "#{@baseurl}\/") end From 3088a51c127caf99555c25b250c726f55b135410 Mon Sep 17 00:00:00 2001 From: btoo <8883465+btoo@users.noreply.github.com> Date: Fri, 12 Jul 2024 11:08:27 -0400 Subject: [PATCH 04/32] rename appboy to braze in sdk_authentication.md `Register a callback function for invalid tokens` --- _docs/_developer_guide/platform_wide/sdk_authentication.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_developer_guide/platform_wide/sdk_authentication.md b/_docs/_developer_guide/platform_wide/sdk_authentication.md index 6c24e1fe8e0..12b1db46972 100644 --- a/_docs/_developer_guide/platform_wide/sdk_authentication.md +++ b/_docs/_developer_guide/platform_wide/sdk_authentication.md @@ -274,7 +274,7 @@ braze.subscribeToSdkAuthenticationFailures((error) => { // TODO: Optionally log to your error-reporting service // TODO: Check if the `user_id` within the `error` matches the currently logged-in user const updated_jwt = await getNewTokenSomehow(error); - appboy.setSdkAuthenticationSignature(updated_jwt); + braze.setSdkAuthenticationSignature(updated_jwt); }); ``` {% endtab %} From c3813ffb4a3d55ca6e5ee07a793a472f0b9f7c7a Mon Sep 17 00:00:00 2001 From: Matthew Hicks Date: Fri, 12 Jul 2024 10:19:08 -0600 Subject: [PATCH 05/32] update price drop docs to look like back in stock --- .../catalogs/back_in_stock_notifications.md | 2 +- .../catalogs/price_drop_notifications.md | 50 +++++++++++++++---- 2 files changed, 40 insertions(+), 12 deletions(-) diff --git a/_docs/_user_guide/personalization_and_dynamic_content/catalogs/back_in_stock_notifications.md b/_docs/_user_guide/personalization_and_dynamic_content/catalogs/back_in_stock_notifications.md index 18d11096161..b7ba7e7a5e5 100644 --- a/_docs/_user_guide/personalization_and_dynamic_content/catalogs/back_in_stock_notifications.md +++ b/_docs/_user_guide/personalization_and_dynamic_content/catalogs/back_in_stock_notifications.md @@ -66,7 +66,7 @@ Follow these steps to set up back-in-stock notifications in a specific catalog. Notification rules in these settings do not replace Canvas notification settings, such as Quiet Hours. {% endalert %} -## Using back-in-stock notifications in a Canvas or campaign +## Using back-in-stock notifications in a Canvas After setting up the back-in-stock feature in a catalog, follow these steps to use with Canvas. diff --git a/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md b/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md index d3bb1455ca4..d4f1a78995f 100644 --- a/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md +++ b/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md @@ -10,13 +10,19 @@ description: "This reference article describes how to create price drop notifica > Using a combination of price drop notifications through Braze catalogs and a Canvas, you can notify customers when an item's price has decreased. Any time a customer performs a selected custom event, they can be automatically subscribed to be notified when the item's price is reduced. -## How it works +{% alert important %} +Price drop notifications for catalogs are currently in early access. Contact your account manager if you're interested in participating in this early access. +{% endalert %} + +When a user triggers a custom event for an item, we'll automatically subscribe them to receive price drop notifications for that item. When the item's price meets your inventory rule (such as a drop larger than 50%), all subscribers will be eligible for notifications through a campaign or Canvas. However, only users who opted into notifications will receive notifications. -When a selected custom event is performed by a user and has a `type` property that includes `price_drop`, it can be used to create a price drop subscription for a user and a catalog item it occurred for. When the item's price drops by the specified amount, users can be notified through a Canvas. +## How price drop notifications work -Price drop notifications are determined by notification rules. If the user has not opted in to notifications, they will be filtered out and will not receive messaging. +You'll set up a custom event to use as a subscription event, such as a `product_clicked` event. This event must contain a property of the item ID (catalog item IDs). We suggest you include a catalog name, but this isn't required. You'll also provide the name of a price field, which must be a number-data type. When a selected custom event is performed by a user and has a `type` property that includes `price_drop`, it can be used to create a price drop subscription for a user and a catalog item it occurred for. -Users are subscribed for 90 days. If an item does not drop in price in 90 days, the user is removed from the subscription. Braze will process up to 10 item updates per minute. This means if you update 11 items in one minute, only the first 10 items will trigger the notifications. +When an item has a price change that meets your price rule, we'll look up all your users who are subscribed to that item (users who did the subscription event) and send a Braze custom event that you can use to trigger a campaign or Canvas. + +The event properties are sent alongside your user, so you can template in the item details into the campaign or Canvas that sends! ## Setting up price drop notifications @@ -25,18 +31,33 @@ Follow these steps to set up price drop notifications in a specific catalog. 1. Go to your catalog and select the **Settings** tab.
2. Select the **Price Drop** toggle.
3. If the global catalog settings have not been configured, you will be prompted to set up the custom events and properties that will be used to trigger notifications: -

+
![Catalog settings drawer.][2]{: style="max-width:70%;"} - **Custom event for subscribing:** The Braze custom event used to subscribe a user for catalog notifications. When this event occurs, the user who performed the event will be subscribed. - **Custom event for unsubscribing:** The Braze custom event used to unsubscribe a user from notifications. - **Item ID event property:** The property on the above custom event used to determine the item for a subscription or unsubscription. This property on the custom event should contain an item ID that exists in a catalog. The custom event must contain a `catalog_name` property to specify which catalog this item is in. - **Fallback Catalog:** The catalog used for the subscription if there isn't a `catalog_name` property in the custom event. + - A sample custom event would look like + ```json + { + "events": [ + { + "external_id": "", + "name": "subscription", + "time": "2024-04-15T19:22:28Z", + "properties": { + "id": "shirt-xl", + "catalog_name": "on_sale_products", + "type": ["price_drop"] + } + } + ] + } + ```

4. Select **Save** and continue to the catalog's **Settings** page.
-5. Set your notification rule. There are two options: **Notify all subscribed users** and **Notify a certain number of users per a certain number of minutes**. -

- - Selecting **Notify all subscribed users** notifies all customers who are waiting when the item's price drops. - - Selecting **Notify a certain number of users per a certain number of minutes** notifies a specified number of customers per your configured notification period. Braze will notify the specified numbers of customers in increments until there are no more customers to notify, or until the item's price goes back up. Your notification rate cannot exceed notifying 10,000 users per minute. -

+5. Set your notification rule. There are two options: + - **Notify all subscribed users** notifies all customers who are waiting when the item's price drops. + - **Notify a certain number of users per a certain number of minutes** notifies a specified number of customers per your configured notification period. Braze will notify the specified numbers of customers in increments until there are no more customers to notify, or until the item's price goes back up. Your notification rate cannot exceed notifying 10,000 users per minute. 6. Set the **Price field in catalog**. This is the catalog field that will be used to determine the item's price. It must be a number type.
7. Set the **Price drop rule**. This is the logic used to determine if a notification should be sent. A price drop can be configured as a percentage price change or how much value the price field has changed by.
8. Select **Save settings**. @@ -66,4 +87,11 @@ Using {%raw%}``{{canvas_entry_properties.${catalog_update}.item_id}}``{%endraw%} Use this Liquid tag {%raw%}``{% catalog_items {{canvas_entry_properties.${catalog_update}.item_id}} %}}``{%endraw%} at the top of your message, then use {%raw%}`{{items[0].}}`{%endraw%} to access data about that item throughout the message. -[1]: {% image_buster /assets/img/price_drop_notifications.png %} \ No newline at end of file +## Considerations + +- Users are subscribed for 90 days. If an item does not drop in price in 90 days, the user is removed from the subscription. +- When using the **Notify all subscribed users** notification rule, Braze will notify 100,000 over 10 minutes. +- Braze will process up to 10 item updates per minute. This means if you update 11 items in one minute, only the first 10 items can trigger a price drop the notification. + +[1]: {% image_buster /assets/img/price_drop_notifications.png %} +[2]: {% image_buster /assets/img/catalog_settings_drawer.png %} From b12297b2ee3448cc5460d2773e1b3ca7021d8009 Mon Sep 17 00:00:00 2001 From: isaiah robinson <95643215+internetisaiah@users.noreply.github.com> Date: Fri, 12 Jul 2024 15:59:30 -0700 Subject: [PATCH 06/32] Update cross_referencing.md --- .../content_management/cross_referencing.md | 65 ++----------------- 1 file changed, 7 insertions(+), 58 deletions(-) diff --git a/_docs/_contributing/content_management/cross_referencing.md b/_docs/_contributing/content_management/cross_referencing.md index 16af1eb52f7..11f95674c7a 100644 --- a/_docs/_contributing/content_management/cross_referencing.md +++ b/_docs/_contributing/content_management/cross_referencing.md @@ -14,15 +14,14 @@ noindex: true ## Create a cross-reference -When creating a cross-reference, you can either use the in-line method or reference-style method. The in-line method prioritizes clarity, while the reference-style method prioritizes readability. +{% alert important %} +Since Liquid's {% raw %}`{% tab %}`{% endraw %} tag does not support reference-style links, only in-line links are documented below. Existing reference links will continue to work, but are no longer recommended. +{% endalert %} -{% tabs %} -{% tab in-line %} Open the relevant Markdown file, then create your in-line link. -{% subtabs %} -{% subtab Markdown %} - +{% tabs %} +{% tab Markdown %} {% raw %} ```markdown [LINK_TEXT]({{site.baseurl}}/SHORT_URL) @@ -44,9 +43,9 @@ Your in-line link should be similar to the following: Before continuing, [create your SSH token]({{site.baseurl}}/docs/developer_guide/platform_wide/sdk_authentication). ``` {% endraw %} -{% endsubtab %} +{% endtab %} -{% subtab HTML %} +{% tab HTML %} {% raw %} ```markdown @@ -69,55 +68,5 @@ Your in-line link should be similar to the following: To learn about the different custom attribute data types you can use to segment users, view Custom attribute data types. ``` {% endraw %} -{% endsubtab %} -{% endsubtabs %} - -{% endtab %} - -{% tab reference-style %} -Open the relevant Markdown file, then create your reference-style link. - -```markdown -[LINK_TEXT][REFERENCE_NUMBER] -``` - -Replace the following: - -| Placeholder | Description | -|--------------------|--------------------------------------------------------------------------| -| `LINK_TEXT` | The page title or related action. | -| `REFERENCE_NUMBER` | Assign any positive integer that's not already assigned to another reference-style link on this page. | -{: .reset-td-br-1 .reset-td-br-2} - -Your references should be similar to the following: - -```markdown -Before continuing, [create your SSH token][2]. When you're finished, see [Step 2: Uploading your token][5]. -``` - -At the bottom of the page, you'll add the related links. - -{% raw %} -```markdown -[REFERENCE_NUMBER]: {{site.baseurl}}SHORT_URL -``` -{% endraw %} - -Replace the following: - -| Placeholder | Description | -|--------------------|---------------------------------------------------------| -| `REFERENCE_NUMBER` | The number of the reference you'd like to link to. | -| `SHORT_URL` | The page URL with `https://www.braze.com/docs` removed. | -{: .reset-td-br-1 .reset-td-br-2} - -Your links should be similar to the following: - -{% raw %} -```markdown -[2]: {{site.baseurl}}/developer_guide/platform_wide/sdk_authentication/ -[5]: {{site.baseurl}}/developer_guide/platform_wide/swift#step-2-uploading-your-token -``` -{% endraw %} {% endtab %} {% endtabs %} From cf7d5650de3c617f2b33470dc9b8dd8a7e2c2928 Mon Sep 17 00:00:00 2001 From: isaiah robinson <95643215+internetisaiah@users.noreply.github.com> Date: Fri, 12 Jul 2024 16:01:30 -0700 Subject: [PATCH 07/32] Update images.md --- .../content_management/images.md | 70 +------------------ 1 file changed, 3 insertions(+), 67 deletions(-) diff --git a/_docs/_contributing/content_management/images.md b/_docs/_contributing/content_management/images.md index b4f17690975..cc2dc747d6e 100644 --- a/_docs/_contributing/content_management/images.md +++ b/_docs/_contributing/content_management/images.md @@ -46,10 +46,10 @@ braze-docs ### Step 2: Link to the image -When linking to your new image, you can either use in-line or reference-style syntax. In-line syntax prioritizes clarity, while reference-style syntax prioritizes readability. +{% alert important %} +Since Liquid's {% raw %}`{% tab %}`{% endraw %} tag does not support reference-style links, only in-line links are documented below. Existing reference links will continue to work, but are no longer recommended. +{% endalert %} -{% tabs %} -{% tab in-line %} In your Markdown file, link to your new image using the in-line syntax. {% raw %} @@ -73,58 +73,6 @@ Your in-line image should be similar to the following: ![The form for creating a new pull request on GitHub.]({% image_buster /assets/img/contributing/getting_started/github_pull_request.png %}) ``` {% endraw %} -{% endtab %} - -{% tab reference-style %} -In your Markdown file, link to your new image using the reference-style syntax. - -{% raw %} -```markdown -![ALT_TEXT.][REFERENCE_NUMBER] -``` -{% endraw %} - -Replace the following: - -| Placeholder | Description | -|--------------------|-------------------------------------------------------------------------------------------------------------------------| -| `ALT_TEXT` | The alt text for the image. This is required to make Braze Docs equally accessible for those using screen readers. | -| `REFERENCE_NUMBER` | Assign any positive integer that's not already assigned to another reference-style link on this page. | -{: .reset-td-br-1 .reset-td-br-2} - -Your in-line image should be similar to the following: - -{% raw %} -```markdown -![The form for creating a new pull request on GitHub.][10] -``` -{% endraw %} - -At the bottom of the page, add your reference. - -{% raw %} -```markdown -[REFERENCE_NUMBER]: {% image_buster /assets/img/DIRECTORY/IMAGE.png %} -``` -{% endraw %} - -Replace the following: - -| Placeholder | Description | -|--------------------|---------------------------------------------------------| -| `REFERENCE_NUMBER` | The number of the reference you'd like to link to. | -| `IMAGE` | The relative path to your image starting from the `img` directory. | -{: .reset-td-br-1 .reset-td-br-2} - -Your links should be similar to the following: - -{% raw %} -```markdown -[10]: {% image_buster /assets/img/contributing/getting_started/github_pull_request.png %} -``` -{% endraw %} -{% endtab %} -{% endtabs %} ### Step 3: Set the image's maximum width (optional) @@ -138,23 +86,11 @@ You can set the image's maximum width by appending the following Liquid code to Replace `NUMBER` with the maximum width you'd like to set as a percentage. Your image link should be similar to the following: -{% tabs %} -{% tab in-line %} {% raw %} ```markdown ![The form for creating a new pull request on GitHub.]({% image_buster /assets/img/contributing/getting_started/github_pull_request.png %}){: style="max-width:65%;"} ``` {% endraw %} -{% endtab %} - -{% tab reference-style %} -{% raw %} -```markdown -![The form for creating a new pull request on GitHub.][10]{: style="max-width:65%;"} -``` -{% endraw %} -{% endtab %} -{% endtabs %} ## Updating an image From eb6936a244fdb6969ed22b7f1e06aa0fb1922490 Mon Sep 17 00:00:00 2001 From: isaiah robinson <95643215+internetisaiah@users.noreply.github.com> Date: Fri, 12 Jul 2024 16:05:19 -0700 Subject: [PATCH 08/32] Update cross_referencing.md --- _docs/_contributing/content_management/cross_referencing.md | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/_docs/_contributing/content_management/cross_referencing.md b/_docs/_contributing/content_management/cross_referencing.md index 11f95674c7a..9069d1b6576 100644 --- a/_docs/_contributing/content_management/cross_referencing.md +++ b/_docs/_contributing/content_management/cross_referencing.md @@ -18,10 +18,10 @@ noindex: true Since Liquid's {% raw %}`{% tab %}`{% endraw %} tag does not support reference-style links, only in-line links are documented below. Existing reference links will continue to work, but are no longer recommended. {% endalert %} -Open the relevant Markdown file, then create your in-line link. - {% tabs %} {% tab Markdown %} +Open the relevant Markdown file, then create your in-line link. + {% raw %} ```markdown [LINK_TEXT]({{site.baseurl}}/SHORT_URL) @@ -46,6 +46,7 @@ Before continuing, [create your SSH token]({{site.baseurl}}/docs/developer_guide {% endtab %} {% tab HTML %} +Open the relevant Markdown file, then create your in-line link. {% raw %} ```markdown From 2c537a93172f03ebb7f286954caae6d7f41730c9 Mon Sep 17 00:00:00 2001 From: Rachel Feinberg <135255868+rachel-feinberg@users.noreply.github.com> Date: Fri, 12 Jul 2024 16:10:08 -0700 Subject: [PATCH 09/32] Update _docs/_contributing/content_management/cross_referencing.md --- _docs/_contributing/content_management/cross_referencing.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_contributing/content_management/cross_referencing.md b/_docs/_contributing/content_management/cross_referencing.md index 9069d1b6576..1acf598bd61 100644 --- a/_docs/_contributing/content_management/cross_referencing.md +++ b/_docs/_contributing/content_management/cross_referencing.md @@ -15,7 +15,7 @@ noindex: true ## Create a cross-reference {% alert important %} -Since Liquid's {% raw %}`{% tab %}`{% endraw %} tag does not support reference-style links, only in-line links are documented below. Existing reference links will continue to work, but are no longer recommended. +Because Liquid's {% raw %}`{% tab %}`{% endraw %} tag does not support reference-style links, only in-line links are documented below. Existing reference links will continue to work, but are no longer recommended. {% endalert %} {% tabs %} From b13b9eef65d68b8d2e028dbcd055529f6b6f1395 Mon Sep 17 00:00:00 2001 From: Matthew Hicks Date: Mon, 15 Jul 2024 07:57:33 -0600 Subject: [PATCH 10/32] Update _docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md Co-authored-by: Dave Hensley --- .../catalogs/price_drop_notifications.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md b/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md index d4f1a78995f..7f0c0415484 100644 --- a/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md +++ b/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md @@ -91,7 +91,7 @@ Use this Liquid tag {%raw%}``{% catalog_items {{canvas_en - Users are subscribed for 90 days. If an item does not drop in price in 90 days, the user is removed from the subscription. - When using the **Notify all subscribed users** notification rule, Braze will notify 100,000 over 10 minutes. -- Braze will process up to 10 item updates per minute. This means if you update 11 items in one minute, only the first 10 items can trigger a price drop the notification. +- Braze will process up to 10 item updates per minute. This means if you update 11 items in one minute, only the first 10 items can trigger a price drop notification. [1]: {% image_buster /assets/img/price_drop_notifications.png %} [2]: {% image_buster /assets/img/catalog_settings_drawer.png %} From e0b649975d3519825a11a6a90b3ba742d7c3c342 Mon Sep 17 00:00:00 2001 From: Lydia Xie <87040416+lydia-xie@users.noreply.github.com> Date: Mon, 15 Jul 2024 08:09:02 -0700 Subject: [PATCH 11/32] Apply suggestions from code review --- .../catalogs/price_drop_notifications.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md b/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md index 7f0c0415484..ba1b9ec6052 100644 --- a/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md +++ b/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md @@ -90,7 +90,7 @@ Use this Liquid tag {%raw%}``{% catalog_items {{canvas_en ## Considerations - Users are subscribed for 90 days. If an item does not drop in price in 90 days, the user is removed from the subscription. -- When using the **Notify all subscribed users** notification rule, Braze will notify 100,000 over 10 minutes. +- When using the **Notify all subscribed users** notification rule, Braze will notify 100,000 users over 10 minutes. - Braze will process up to 10 item updates per minute. This means if you update 11 items in one minute, only the first 10 items can trigger a price drop notification. [1]: {% image_buster /assets/img/price_drop_notifications.png %} From 9c2d6f74e2c5648d82fec9e93a459d352912a3ee Mon Sep 17 00:00:00 2001 From: Lydia Xie <87040416+lydia-xie@users.noreply.github.com> Date: Mon, 15 Jul 2024 08:10:18 -0700 Subject: [PATCH 12/32] Apply suggestions from code review --- .../catalogs/price_drop_notifications.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md b/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md index ba1b9ec6052..efc6b668311 100644 --- a/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md +++ b/_docs/_user_guide/personalization_and_dynamic_content/catalogs/price_drop_notifications.md @@ -53,7 +53,7 @@ Follow these steps to set up price drop notifications in a specific catalog. ] } ``` -

+
4. Select **Save** and continue to the catalog's **Settings** page.
5. Set your notification rule. There are two options: - **Notify all subscribed users** notifies all customers who are waiting when the item's price drops. From 83f918dc7a7ce0f646e30c6509dbbb78bd4b31c6 Mon Sep 17 00:00:00 2001 From: Lydia Xie Date: Mon, 15 Jul 2024 09:29:39 -0700 Subject: [PATCH 13/32] BD-3255: Remove note about Canvas Flow names coming soon --- .../event_glossary/message_engagement_events.md | 4 ---- 1 file changed, 4 deletions(-) diff --git a/_docs/_user_guide/data_and_analytics/braze_currents/event_glossary/message_engagement_events.md b/_docs/_user_guide/data_and_analytics/braze_currents/event_glossary/message_engagement_events.md index 4c0c25e097a..ca3ff4f890f 100644 --- a/_docs/_user_guide/data_and_analytics/braze_currents/event_glossary/message_engagement_events.md +++ b/_docs/_user_guide/data_and_analytics/braze_currents/event_glossary/message_engagement_events.md @@ -43,10 +43,6 @@ Certain events return a `platform` value that specifies the platform of the user Storage schemas apply to the flat file event data we send to Data Warehouse partners (Google Cloud Storage, Amazon S3, and Microsoft Azure Blob Storage). Some event and destination combinations listed here are not yet generally available. For information on which events are supported by various partners, refer to our list of [available partners]({{site.baseurl}}/user_guide/data_and_analytics/braze_currents/available_partners/) and check their respective pages.

Additionally, note that Currents will drop events with excessively large payloads of greater than 900 KB. {% endalert %} -{% alert note %} -Human-readable names for objects related to Canvas Flow are coming soon to Currents. In the meantime, the IDs can be used for grouping, and translated to human-readable names via the [Export Canvas details endpoint]({{site.baseurl}}/api/endpoints/export/canvas/get_canvas_details/). -{% endalert %} - {% api %} ## WhatsApp read events From 2d45944ec147cb428e1cf00c72bc4661f385d60a Mon Sep 17 00:00:00 2001 From: "Tony Pitkin (Braze)" <84037309+tonypitkinbraze@users.noreply.github.com> Date: Mon, 15 Jul 2024 10:12:21 -0700 Subject: [PATCH 14/32] Update overview.md Updating Github link --- .../_user_guide/data_and_analytics/cloud_ingestion/overview.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_user_guide/data_and_analytics/cloud_ingestion/overview.md b/_docs/_user_guide/data_and_analytics/cloud_ingestion/overview.md index 869620b3de8..f2f73208f30 100644 --- a/_docs/_user_guide/data_and_analytics/cloud_ingestion/overview.md +++ b/_docs/_user_guide/data_and_analytics/cloud_ingestion/overview.md @@ -466,7 +466,7 @@ We use the `UPDATED_AT` timestamp to track what data has been synced successfull ### Table configuration -We have a public [GitHub repository](https://github.com/braze-inc/braze-examples/tree/main/data-ingestion) for customers to share best practices or code snippets. To contribute your own snippets, create a pull request! +We have a public [GitHub repository](https://github.com/braze-inc/braze-examples/tree/main/cloud-data-ingestion) for customers to share best practices or code snippets. To contribute your own snippets, create a pull request! ### Data formatting From acf99c472259dd1a0f81a7e77e81e6da96796ce0 Mon Sep 17 00:00:00 2001 From: Rachel Feinberg Date: Mon, 15 Jul 2024 10:13:08 -0700 Subject: [PATCH 15/32] BD-3260 Snowflake Data Retention --- .../snowflake/data_retention.md | 279 ++++++++++++++++++ .../data_warehouses/snowflake/faqs.md | 2 +- 2 files changed, 280 insertions(+), 1 deletion(-) create mode 100644 _docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md diff --git a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md new file mode 100644 index 00000000000..e79c90a2ed3 --- /dev/null +++ b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md @@ -0,0 +1,279 @@ +--- +nav_title: "Data Retention" +article_title: Snowflake Data Retention +page_order: 3 +description: "This page covers how to retain full events data when the Braze retention policy is applied." +page_type: partner +search_tag: Partner +--- + +# Snowflake data retention + +> Braze anonymizes (removes personally identifiable information (PII)) from all events data that is older than two years old. If you use Snowflake data sharing, you may choose to retain the full events data in your environment by storing a copy in your Snowflake account before the retention policy is applied. + +This page presents two ways you can retain non-anonymized data: + +- Copy your data to another Snowflake database +- Unload your data to a stage + +{% alert warning %} +Braze automatically anonymizes events data for users that are deleted from Braze, as described in [Data Protection Technical Assistance]({{site.baseurl}}/dp-technical-assistance/). Any data copied outside of the shared database will not be included in this process, as it’s no longer managed by Braze. +{% endalert %} + +## Copy all data to another Snowflake database + +You can retain non-anonymized data by copying your data from the shared `BRAZE_RAW_EVENTS` schema to another database and schema in Snowflake. To do so, follow these steps: + +1. Create the procedure `COPY_BRAZE_SHARE`, which will be used to copy all the data shared by Braze to another database and schema within Snowflake. + +{% raw %} +```json +-- +-- @param string SOURCE_DATABASE - database name of the braze data share +-- @param string SOURCE_SCHEMA - schema name of the braze data share +-- @param string DESTINATION_DATABASE - +-- @param string DESTINATION_SCHEMA - +-- @param string MAX_DATE - copy data on / before the max date default DATEADD(year, -2, CURRENT_DATE()) +-- @param string TABLE_NAME_FILTER - filter to select table that will be unloaded, default to 'USER_%' +-- + +CREATE PROCEDURE COPY_BRAZE_SHARE( + SOURCE_DATABASE STRING, + SOURCE_SCHEMA STRING, + DESTINATION_DATABASE STRING, + DESTINATION_SCHEMA STRING, + MAX_DATE DATE default DATEADD(year, -2, CURRENT_DATE()), + TABLE_NAME_FILTER STRING default 'USERS_%' +) +RETURNS TABLE (TABLE_NAME STRING, SUCCESS BOOLEAN, INFO STRING) +LANGUAGE PYTHON +RUNTIME_VERSION = '3.8' +PACKAGES = ('snowflake-snowpark-python') +HANDLER = 'run' +AS +$$ +import snowflake.snowpark as snowpark +from snowflake.snowpark.exceptions import SnowparkSQLException + +def run(session: snowpark.Session, SOURCE_DATABASE: str, SOURCE_SCHEMA: str, DESTINATION_DATABASE: str, DESTINATION_SCHEMA: str, MAX_DATE: str, TABLE_NAME_FILTER: str): + result = [] + + # Get the list of filtered table names + table_query = f""" + SELECT table_name + FROM {SOURCE_DATABASE}.INFORMATION_SCHEMA.TABLES + WHERE TABLE_SCHEMA = '{SOURCE_SCHEMA}' AND table_name LIKE '{TABLE_NAME_FILTER}' + """ + + tables = session.sql(table_query).collect() + + # Iterate through each table and copy data + for row in tables: + table_name = row['TABLE_NAME'] + + # skip archive tables + if table_name.endswith('_ARCHIVED'): + continue + + # Check if the destination table exists + check_table_query = f""" + SELECT COUNT(*) as count + FROM {DESTINATION_DATABASE}.INFORMATION_SCHEMA.TABLES + WHERE TABLE_SCHEMA = '{DESTINATION_SCHEMA}' AND TABLE_NAME = '{table_name}' + """ + table_exists = session.sql(check_table_query).collect()[0]['COUNT'] > 0 + + if table_exists: + # find the current max SF_CREATED_AT in the existing table + cur_max_date = None + + date_query = f""" + SELECT MAX(SF_CREATED_AT) as CUR_MAX_DATE + FROM {DESTINATION_DATABASE}.{DESTINATION_SCHEMA}.{table_name} + """ + date_result = session.sql(date_query).collect() + + if date_result: + cur_max_date = date_result[0]['CUR_MAX_DATE'] + + if cur_max_date: + # If the destination table is not empty, only add data that is newer than cur_max_date and older than MAX_DATE + copy_query = f""" + INSERT INTO {DESTINATION_DATABASE}.{DESTINATION_SCHEMA}.{table_name} + SELECT * FROM {SOURCE_DATABASE}.{SOURCE_SCHEMA}.{table_name} + WHERE SF_CREATED_AT <= '{MAX_DATE}' + AND SF_CREATED_AT > '{cur_max_date}' + """ + else: + # If the destination table is empty, copy all data before MAX_DATE + copy_query = f""" + INSERT INTO {DESTINATION_DATABASE}.{DESTINATION_SCHEMA}.{table_name} + SELECT * FROM {SOURCE_DATABASE}.{SOURCE_SCHEMA}.{table_name} + WHERE SF_CREATED_AT <= '{MAX_DATE}' + """ + else: + # If table doesn't exist, create it and copy data + copy_query = f""" + CREATE TABLE {DESTINATION_DATABASE}.{DESTINATION_SCHEMA}.{table_name} AS + SELECT * FROM {SOURCE_DATABASE}.{SOURCE_SCHEMA}.{table_name} + WHERE SF_CREATED_AT <= '{MAX_DATE}' + """ + + try: + session.sql(copy_query).collect() + result.append([table_name, True, ""]) + except SnowparkSQLException as e: + result.append([table_name, False, str(e)]) + + # Return the results + return session.create_dataframe(result, schema=['TABLE_NAME', 'SUCCESS', 'INFO']) +$$; +``` +{% endraw %} + +{: start="2"} +2. Run the below command to execute the procedure. By default, the procedure will back up data older than two years for all `USERS_*` event types. + +{% raw %} +```json +-- this will copy all the rows that are 2 years or older in all the 'USERS_*' tables +-- from 'SOURCE_DB'.'SOURCE_SCHEMA' to 'DEST_DB'.'DEST_SCHEMA' + +CALL COPY_BRAZE_SHARE('SOURCE_DB', 'SOURCE_SCHEMA', 'DEST_DB', 'DEST_SCHEMA') +``` +{% endraw %} + +{: start="3"} +3. (Optional) Specify a filter to choose what age data to back up, and specify a table name filter to back up only selected events tables. + +{% raw %} +```json +-- this will copy all the rows that are 1 year or older in all the 'USERS_BEHAVIORS_*' tables +-- from 'SOURCE_DB'.'SOURCE_SCHEMA' to 'DEST_DB'.'DEST_SCHEMA' + +CALL COPY_BRAZE_SHARE('SOURCE_DB', 'SOURCE_SCHEMA', 'DEST_DB', 'DEST_SCHEMA', DATEADD(year, -1, CURRENT_DATE()), 'USERS_BEHAVIORS_%') +``` +{% endraw %} + +{% alert note %} +Repeat running of the procedure shouldn't create duplicate records, as it will check the max (`SF_CREATED_AT`) and only back up data newer than that. +{% endalert %} + +## Unload data to stage + +You can retain non-anonymized data by unloading data from the shared `BRAZE_RAW_EVENTS` schema to a stage. To do so, follow these steps: + +1. Create the procedure `UNLOAD_BRAZE_SHARE`, which will be used to copy all the data shared by Braze to the specified stage. + +{% raw %} +```json +-- +-- @param string DATABASE_NAME - database name of the braze data share +-- @param string SCHEMA_NAME - schema name of the braze data share +-- @param string STAGE_NAME - Snowflake stage where the data will be unloaded +-- @param int MIN_DATE - copy data from this date (inclusive) +-- @param int MAX_DATE - copy data till this date (exclusive) +-- @param string TABLE_NAME_FILTER - filter to select table that will be unloaded, default to 'USER_%' +-- +CREATE PROCEDURE UNLOAD_BRAZE_SHARE( + SOURCE_DATABASE STRING, + SOURCE_SCHEMA STRING, + STAGE_NAME STRING, + MIN_DATE DATE, + MAX_DATE DATE, + TABLE_NAME_FILTER STRING default 'USERS_%' +) +RETURNS TABLE (TABLE_NAME STRING, SUCCESS BOOLEAN, INFO STRING) +LANGUAGE PYTHON +RUNTIME_VERSION = '3.8' +PACKAGES = ('snowflake-snowpark-python') +HANDLER = 'run' +AS +$$ +import snowflake.snowpark as snowpark +from snowflake.snowpark.exceptions import SnowparkSQLException + +def run(session: snowpark.Session, DATABASE_NAME: str, SCHEMA_NAME: str, STAGE_NAME: str, MIN_DATE: str, MAX_DATE: str, TABLE_NAME_FILTER: str): + result = [] + + if MIN_DATE >= MAX_DATE: + result.append(["MIN_DATE cannot be more recent than MAX_DATE", False, ""]) + return session.create_dataframe(result, schema=['TABLE_NAME', 'SUCCESS', 'INFO']) + + # Get list of tables + table_query = f""" + SELECT TABLE_NAME + FROM {DATABASE_NAME}.INFORMATION_SCHEMA.TABLES + WHERE TABLE_SCHEMA = '{SCHEMA_NAME}' AND TABLE_NAME LIKE '{TABLE_NAME_FILTER}' + """ + tables = session.sql(table_query).collect() + + for table in tables: + table_name = table['TABLE_NAME'] + + # skip archive tables + if table_name.endswith('_ARCHIVED'): + continue + + # Create CSV file name + csv_file_name = f"{table_name}_{MIN_DATE}_{MAX_DATE}.csv" + + # Construct COPY INTO command with date filter + copy_cmd = f""" + COPY INTO @{STAGE_NAME}/{csv_file_name} + FROM ( + SELECT * + FROM {DATABASE_NAME}.{SCHEMA_NAME}.{table_name} + WHERE SF_CREATED_AT >= TO_DATE('{MIN_DATE}') and SF_CREATED_AT < TO_DATE('{MAX_DATE}') + ) + FILE_FORMAT = (TYPE = CSV FIELD_OPTIONALLY_ENCLOSED_BY = '"') + HEADER = TRUE + OVERWRITE = FALSE + """ + + # Execute COPY INTO command + try: + session.sql(copy_cmd).collect() + result.append([table_name, True, csv_file_name]) + except SnowparkSQLException as e: + result.append([table_name, False, str(e)]) + + return session.create_dataframe(result, schema=['TABLE_NAME', 'SUCCESS', 'INFO']) +$$; +``` +{% endraw %} + +{: start="2"} +2. Run the below commands to execute the procedure. If you don’t specify a table name filter, the default behavior will copy all tables with `USERS_` prefix. + +{% raw %} +```json +-- create a Snowflake stage to store the file +create stage MY_EXPORT_STAGE; + +-- call the procedure +-- this will unload date between '2020-01-01' and '2021-01-01' +-- from tables with 'USERS_' prefix in 'DATABASE_NAME'.'SCHEMA' +CALL UNLOAD_BRAZE_SHARE('DATABASE_NAME', 'SCHEMA', 'MY_EXPORT_STAGE', '2020-01-01', 2021-01-01'); + +-- should list the files that's unloaded +LIST @MY_EXPORT_STAGE; +``` +{% endraw %} + +{: start="3"} +3. (Optional) Specify a filter in the procedure to unload only specified tables. + +{% raw %} +```json +-- create a Snowflake stage to store the file +create stage MY_EXPORT_STAGE; + +-- this will unload date between '2020-01-01' and '2021-01-01' +-- from tables with 'USERS_BEHAVIORS_' prefix in 'DATABASE_NAME'.'SCHEMA' +CALL EXPORT_BRAZE_SHARE_TO_STAGE('DATABASE_NAME', 'SCHEMA', 'MY_EXPORT_STAGE', '2020-01-01', 2021-01-01', 'USERS_BEHAVIORS_%'); + +-- should list the files that's unloaded +LIST @MY_EXPORT_STAGE; +``` +{% endraw %} diff --git a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/faqs.md b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/faqs.md index 8f13a057bb2..439cec749db 100644 --- a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/faqs.md +++ b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/faqs.md @@ -1,7 +1,7 @@ --- nav_title: FAQs article_title: Snowflake Data Sharing FAQs -page_order: 3 +page_order: 4 page_type: FAQ description: "This article answers frequently asked questions about Snowflake data sharing." From f8fa315e08612591efc4d5aec833340e26fef921 Mon Sep 17 00:00:00 2001 From: Rachel Feinberg Date: Mon, 15 Jul 2024 15:21:59 -0700 Subject: [PATCH 16/32] Edits --- .../snowflake/data_retention.md | 131 +++++++++--------- 1 file changed, 65 insertions(+), 66 deletions(-) diff --git a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md index e79c90a2ed3..db52524cbbe 100644 --- a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md +++ b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md @@ -24,26 +24,17 @@ Braze automatically anonymizes events data for users that are deleted from Braze You can retain non-anonymized data by copying your data from the shared `BRAZE_RAW_EVENTS` schema to another database and schema in Snowflake. To do so, follow these steps: -1. Create the procedure `COPY_BRAZE_SHARE`, which will be used to copy all the data shared by Braze to another database and schema within Snowflake. +1. In your Snowflake account, create the procedure `COPY_BRAZE_SHARE`, which will be used to copy all the data shared by Braze to another database and schema within Snowflake. {% raw %} -```json --- --- @param string SOURCE_DATABASE - database name of the braze data share --- @param string SOURCE_SCHEMA - schema name of the braze data share --- @param string DESTINATION_DATABASE - --- @param string DESTINATION_SCHEMA - --- @param string MAX_DATE - copy data on / before the max date default DATEADD(year, -2, CURRENT_DATE()) --- @param string TABLE_NAME_FILTER - filter to select table that will be unloaded, default to 'USER_%' --- - +```sql CREATE PROCEDURE COPY_BRAZE_SHARE( - SOURCE_DATABASE STRING, - SOURCE_SCHEMA STRING, - DESTINATION_DATABASE STRING, - DESTINATION_SCHEMA STRING, - MAX_DATE DATE default DATEADD(year, -2, CURRENT_DATE()), - TABLE_NAME_FILTER STRING default 'USERS_%' + SOURCE_DATABASE STRING, -- Database name of the braze data share + SOURCE_SCHEMA STRING, -- Schema name of the braze data share + DESTINATION_DATABASE STRING, -- Name of the database to which you want to copy shared the data + DESTINATION_SCHEMA STRING, -- Name of the schema to which you want to copy shared the data + MAX_DATE DATE default DATEADD(year, -2, CURRENT_DATE()), -- Copy data on or before the maximum date default DATEADD(year, -2, CURRENT_DATE()) + TABLE_NAME_FILTER STRING default 'USERS_%' -- Filter to select table that will be unloaded, default to 'USER_%' ) RETURNS TABLE (TABLE_NAME STRING, SUCCESS BOOLEAN, INFO STRING) LANGUAGE PYTHON @@ -58,7 +49,7 @@ from snowflake.snowpark.exceptions import SnowparkSQLException def run(session: snowpark.Session, SOURCE_DATABASE: str, SOURCE_SCHEMA: str, DESTINATION_DATABASE: str, DESTINATION_SCHEMA: str, MAX_DATE: str, TABLE_NAME_FILTER: str): result = [] - # Get the list of filtered table names + -- Get the list of filtered table names table_query = f""" SELECT table_name FROM {SOURCE_DATABASE}.INFORMATION_SCHEMA.TABLES @@ -67,15 +58,15 @@ def run(session: snowpark.Session, SOURCE_DATABASE: str, SOURCE_SCHEMA: str, DES tables = session.sql(table_query).collect() - # Iterate through each table and copy data + -- Iterate through each table and copy data for row in tables: table_name = row['TABLE_NAME'] - # skip archive tables + -- Skip archive tables if table_name.endswith('_ARCHIVED'): continue - # Check if the destination table exists + -- Check if the destination table exists check_table_query = f""" SELECT COUNT(*) as count FROM {DESTINATION_DATABASE}.INFORMATION_SCHEMA.TABLES @@ -84,7 +75,7 @@ def run(session: snowpark.Session, SOURCE_DATABASE: str, SOURCE_SCHEMA: str, DES table_exists = session.sql(check_table_query).collect()[0]['COUNT'] > 0 if table_exists: - # find the current max SF_CREATED_AT in the existing table + -- Find the current, most recent `SF_CREATED_AT` in the existing table cur_max_date = None date_query = f""" @@ -97,7 +88,7 @@ def run(session: snowpark.Session, SOURCE_DATABASE: str, SOURCE_SCHEMA: str, DES cur_max_date = date_result[0]['CUR_MAX_DATE'] if cur_max_date: - # If the destination table is not empty, only add data that is newer than cur_max_date and older than MAX_DATE + -- If the destination table is not empty, only add data that is newer than `cur_max_date` and older than`MAX_DATE` copy_query = f""" INSERT INTO {DESTINATION_DATABASE}.{DESTINATION_SCHEMA}.{table_name} SELECT * FROM {SOURCE_DATABASE}.{SOURCE_SCHEMA}.{table_name} @@ -105,14 +96,14 @@ def run(session: snowpark.Session, SOURCE_DATABASE: str, SOURCE_SCHEMA: str, DES AND SF_CREATED_AT > '{cur_max_date}' """ else: - # If the destination table is empty, copy all data before MAX_DATE + -- If the destination table is empty, copy all data before `MAX_DATE` copy_query = f""" INSERT INTO {DESTINATION_DATABASE}.{DESTINATION_SCHEMA}.{table_name} SELECT * FROM {SOURCE_DATABASE}.{SOURCE_SCHEMA}.{table_name} WHERE SF_CREATED_AT <= '{MAX_DATE}' """ else: - # If table doesn't exist, create it and copy data + -- If the table doesn't exist, create it and copy data copy_query = f""" CREATE TABLE {DESTINATION_DATABASE}.{DESTINATION_SCHEMA}.{table_name} AS SELECT * FROM {SOURCE_DATABASE}.{SOURCE_SCHEMA}.{table_name} @@ -125,38 +116,46 @@ def run(session: snowpark.Session, SOURCE_DATABASE: str, SOURCE_SCHEMA: str, DES except SnowparkSQLException as e: result.append([table_name, False, str(e)]) - # Return the results + -- Return the results return session.create_dataframe(result, schema=['TABLE_NAME', 'SUCCESS', 'INFO']) $$; ``` {% endraw %} {: start="2"} -2. Run the below command to execute the procedure. By default, the procedure will back up data older than two years for all `USERS_*` event types. +2. Run one of the below commands in your Snowflake account to execute the procedure. + +{% tabs %} +{% tab Default %} + +By default, the procedure will back up data older than two years for all `USERS_*` event types. {% raw %} -```json --- this will copy all the rows that are 2 years or older in all the 'USERS_*' tables +```sql +-- Copy all the rows that are two years or older in all the 'USERS_*' tables -- from 'SOURCE_DB'.'SOURCE_SCHEMA' to 'DEST_DB'.'DEST_SCHEMA' CALL COPY_BRAZE_SHARE('SOURCE_DB', 'SOURCE_SCHEMA', 'DEST_DB', 'DEST_SCHEMA') ``` {% endraw %} +{% endtab %} +{% tab Filtered %} -{: start="3"} -3. (Optional) Specify a filter to choose what age data to back up, and specify a table name filter to back up only selected events tables. +Specify a filter to choose what age data to back up, and specify a table name filter to back up only selected events tables. {% raw %} -```json --- this will copy all the rows that are 1 year or older in all the 'USERS_BEHAVIORS_*' tables +```sql +-- Copy all the rows that are one year or older in all the 'USERS_BEHAVIORS_*' tables -- from 'SOURCE_DB'.'SOURCE_SCHEMA' to 'DEST_DB'.'DEST_SCHEMA' CALL COPY_BRAZE_SHARE('SOURCE_DB', 'SOURCE_SCHEMA', 'DEST_DB', 'DEST_SCHEMA', DATEADD(year, -1, CURRENT_DATE()), 'USERS_BEHAVIORS_%') ``` {% endraw %} +{% endtab %} +{% endtabs %} {% alert note %} -Repeat running of the procedure shouldn't create duplicate records, as it will check the max (`SF_CREATED_AT`) and only back up data newer than that. +Repeat running of the procedure won't create duplicate records, as the procedure will check the most recent `SF_CREATED_AT` and only back up data newer than that. {% endalert %} ## Unload data to stage @@ -166,22 +165,14 @@ You can retain non-anonymized data by unloading data from the shared `BRAZE_RAW_ 1. Create the procedure `UNLOAD_BRAZE_SHARE`, which will be used to copy all the data shared by Braze to the specified stage. {% raw %} -```json --- --- @param string DATABASE_NAME - database name of the braze data share --- @param string SCHEMA_NAME - schema name of the braze data share --- @param string STAGE_NAME - Snowflake stage where the data will be unloaded --- @param int MIN_DATE - copy data from this date (inclusive) --- @param int MAX_DATE - copy data till this date (exclusive) --- @param string TABLE_NAME_FILTER - filter to select table that will be unloaded, default to 'USER_%' --- +```sql CREATE PROCEDURE UNLOAD_BRAZE_SHARE( - SOURCE_DATABASE STRING, - SOURCE_SCHEMA STRING, - STAGE_NAME STRING, - MIN_DATE DATE, - MAX_DATE DATE, - TABLE_NAME_FILTER STRING default 'USERS_%' + SOURCE_DATABASE STRING, -- Database name of the braze data share + SOURCE_SCHEMA STRING, -- Schema name of the braze data share + STAGE_NAME STRING, -- Snowflake stage where the data will be unloaded + MIN_DATE DATE, -- Copy data from this date (inclusive) + MAX_DATE DATE, -- Copy data till this date (exclusive) + TABLE_NAME_FILTER STRING default 'USERS_%' -- Filter to select table that will be unloaded, default to 'USER_%' ) RETURNS TABLE (TABLE_NAME STRING, SUCCESS BOOLEAN, INFO STRING) LANGUAGE PYTHON @@ -200,7 +191,7 @@ def run(session: snowpark.Session, DATABASE_NAME: str, SCHEMA_NAME: str, STAGE_N result.append(["MIN_DATE cannot be more recent than MAX_DATE", False, ""]) return session.create_dataframe(result, schema=['TABLE_NAME', 'SUCCESS', 'INFO']) - # Get list of tables + -- Get list of tables table_query = f""" SELECT TABLE_NAME FROM {DATABASE_NAME}.INFORMATION_SCHEMA.TABLES @@ -211,14 +202,14 @@ def run(session: snowpark.Session, DATABASE_NAME: str, SCHEMA_NAME: str, STAGE_N for table in tables: table_name = table['TABLE_NAME'] - # skip archive tables + -- Skip archive tables if table_name.endswith('_ARCHIVED'): continue - # Create CSV file name + -- Create CSV file name csv_file_name = f"{table_name}_{MIN_DATE}_{MAX_DATE}.csv" - # Construct COPY INTO command with date filter + -- Construct `COPY INTO` command with date filter copy_cmd = f""" COPY INTO @{STAGE_NAME}/{csv_file_name} FROM ( @@ -231,7 +222,7 @@ def run(session: snowpark.Session, DATABASE_NAME: str, SCHEMA_NAME: str, STAGE_N OVERWRITE = FALSE """ - # Execute COPY INTO command + -- Execute COPY INTO command try: session.sql(copy_cmd).collect() result.append([table_name, True, csv_file_name]) @@ -244,36 +235,44 @@ $$; {% endraw %} {: start="2"} -2. Run the below commands to execute the procedure. If you don’t specify a table name filter, the default behavior will copy all tables with `USERS_` prefix. +2. Run one of the below commands to execute the procedure. + +{% tabs %} +{% tab Default %} + +By default, the procedure will copy all tables with `USERS_` prefix. {% raw %} -```json --- create a Snowflake stage to store the file +```sql +-- Create a Snowflake stage to store the file create stage MY_EXPORT_STAGE; --- call the procedure --- this will unload date between '2020-01-01' and '2021-01-01' +-- Call the procedure +-- to unload date between '2020-01-01' and '2021-01-01' -- from tables with 'USERS_' prefix in 'DATABASE_NAME'.'SCHEMA' CALL UNLOAD_BRAZE_SHARE('DATABASE_NAME', 'SCHEMA', 'MY_EXPORT_STAGE', '2020-01-01', 2021-01-01'); --- should list the files that's unloaded +-- List the files that are unloaded LIST @MY_EXPORT_STAGE; ``` {% endraw %} +{% endtab %} +{% tab Filter %} -{: start="3"} -3. (Optional) Specify a filter in the procedure to unload only specified tables. +Specify a filter in the procedure to unload only specified tables. {% raw %} -```json --- create a Snowflake stage to store the file +```sql +-- Create a Snowflake stage to store the file create stage MY_EXPORT_STAGE; --- this will unload date between '2020-01-01' and '2021-01-01' +-- Unload date between '2020-01-01' and '2021-01-01' -- from tables with 'USERS_BEHAVIORS_' prefix in 'DATABASE_NAME'.'SCHEMA' CALL EXPORT_BRAZE_SHARE_TO_STAGE('DATABASE_NAME', 'SCHEMA', 'MY_EXPORT_STAGE', '2020-01-01', 2021-01-01', 'USERS_BEHAVIORS_%'); --- should list the files that's unloaded +-- List the files that are unloaded LIST @MY_EXPORT_STAGE; ``` {% endraw %} +{% endtab %} +{% endtabs %} From ce3aad4204c769db78778bfa7d5e1d981c2ef073 Mon Sep 17 00:00:00 2001 From: Rachel Feinberg Date: Mon, 15 Jul 2024 15:30:03 -0700 Subject: [PATCH 17/32] Fix tab --- .../data_warehouses/snowflake/data_retention.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md index db52524cbbe..3e6ed7e8541 100644 --- a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md +++ b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md @@ -257,7 +257,7 @@ LIST @MY_EXPORT_STAGE; ``` {% endraw %} {% endtab %} -{% tab Filter %} +{% tab Filtered %} Specify a filter in the procedure to unload only specified tables. From 99e92241b2ae90a3f3a8d428983dcfa42b2a772d Mon Sep 17 00:00:00 2001 From: Rachel Feinberg Date: Mon, 15 Jul 2024 16:25:08 -0700 Subject: [PATCH 18/32] BD-3252 Add FAQ to Link aliasing --- .../message_building_by_channel/email/templates/faq.md | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/_docs/_user_guide/message_building_by_channel/email/templates/faq.md b/_docs/_user_guide/message_building_by_channel/email/templates/faq.md index db7b8b90157..d6b135f1c2d 100644 --- a/_docs/_user_guide/message_building_by_channel/email/templates/faq.md +++ b/_docs/_user_guide/message_building_by_channel/email/templates/faq.md @@ -69,3 +69,10 @@ For all new Content Blocks that are created, link aliasing is applied across wor Existing Content Blocks won't be modified when link aliasing is enabled. While existing link templates won't be modified, the existing link template section in a message will be removed. Check out [Link aliasing in Content Blocks]({{site.baseurl}}/user_guide/message_building_by_channel/email/templates/link_aliasing/#link-aliasing-in-content-blocks) for more information. +### Can I use Liquid conditional logic entirely within an HTML anchor tag? + +No, Braze link aliasing won't recognize the HTML properly. + +When logic like this is used in tandem with features that need to parse the HTML (such as a preheader or link templating), the library used to scan the HTML can modify the anchor tag in a way that will prevent the proper `href` from being templated. The library will then determine that the HTML is invalid because it's agnostic to the Liquid code. + +Instead, use Liquid logic that contains a complete anchor tag at each stage. This won't interfere with HTML parsing because the logic contains multiple instances of valid HTML. YOu can also also simplify your logic by assigning and then templating a variable into the appropriate anchor tag. \ No newline at end of file From c850e2a48c36d5ed1603fa0b33353473caba85f8 Mon Sep 17 00:00:00 2001 From: Rachel Feinberg Date: Mon, 15 Jul 2024 16:39:57 -0700 Subject: [PATCH 19/32] BD-3250 Add treatment sample group to User Archival --- _docs/_hidden/other/user_archival.md | 6 +++++- .../reporting/global_control_group_reporting.md | 4 ++++ 2 files changed, 9 insertions(+), 1 deletion(-) diff --git a/_docs/_hidden/other/user_archival.md b/_docs/_hidden/other/user_archival.md index 84056ef937d..286e1ce9965 100644 --- a/_docs/_hidden/other/user_archival.md +++ b/_docs/_hidden/other/user_archival.md @@ -52,7 +52,11 @@ In this case, these users cannot be messaged and are not engaging with your bran ## Global Control Group users -Users in the Global Control Group will never be archived, even if they meet the definition of inactive or dormant users. +Users in the Global Control Group will never be archived, even if they meet the definition of inactive or dormant users. + +### Treatment sample group + +Treatment sample group users are excluded from archiving within a Global Control Group report. ## Test users diff --git a/_docs/_user_guide/data_and_analytics/reporting/global_control_group_reporting.md b/_docs/_user_guide/data_and_analytics/reporting/global_control_group_reporting.md index d4c91910736..f0daf14d977 100644 --- a/_docs/_user_guide/data_and_analytics/reporting/global_control_group_reporting.md +++ b/_docs/_user_guide/data_and_analytics/reporting/global_control_group_reporting.md @@ -30,6 +30,10 @@ Keep in mind that the percentage metrics in your report are rounded. For instanc Lastly, as with several other reports on our platform, this report displays a [confidence]({{site.baseurl}}/user_guide/engagement_tools/testing/multivariant_testing/#understanding-confidence) percentage for your change from control metric. Note that in cases where the conversion rate between your control and treatment are identical, a confidence of 0% is to be expected; this indicates that there is a 0% chance that there is a difference in performance between the two groups. +### Group sizes + +Before May 2024, the Global Control Group was excluded from user archival. This could result in your treatment sample group and Global Control Group having significantly different sizes. The next time you reset your Global Control Group, this discrepancy will go away and you'll see similar group sizes. + ## Report metrics | Metric | Definition | Calculation | From 66bd075b466a278b4d0c7ae725c060d300c04e57 Mon Sep 17 00:00:00 2001 From: internetisaiah <95643215+internetisaiah@users.noreply.github.com> Date: Tue, 16 Jul 2024 09:05:46 -0700 Subject: [PATCH 20/32] Reverting SDK-5061 doc updates --- .../swift/push_notifications/integration.md | 102 +++++++----------- 1 file changed, 41 insertions(+), 61 deletions(-) diff --git a/_docs/_developer_guide/platform_integration_guides/swift/push_notifications/integration.md b/_docs/_developer_guide/platform_integration_guides/swift/push_notifications/integration.md index 55f7e156f87..99474b1d32c 100644 --- a/_docs/_developer_guide/platform_integration_guides/swift/push_notifications/integration.md +++ b/_docs/_developer_guide/platform_integration_guides/swift/push_notifications/integration.md @@ -50,7 +50,7 @@ The Swift SDK provides a configuration-only approach to automate the processing To enable the automatic push integration, set the `automation` property of the `push` configuration to `true`: {% tabs %} -{% tab Swift %} +{% tab swift %} ```swift let configuration = Braze.Configuration(apiKey: "{YOUR-BRAZE-API-KEY}", endpoint: "{YOUR-BRAZE-API-ENDPOINT}") configuration.push.automation = true @@ -85,7 +85,7 @@ The SDK must be initialized on the main thread to enable push notification autom For more granular control, each automation step can be enabled or disabled individually: {% tabs %} -{% tab Swift %} +{% tab swift %} ```swift // Enable all automations and disable the automatic notification authorization request at launch. @@ -130,7 +130,7 @@ The following code sample includes integration for provisional push authenticati {% endalert %} {% tabs %} -{% tab Swift %} +{% tab swift %} ```swift application.registerForRemoteNotifications() @@ -178,7 +178,7 @@ You must assign your delegate object using `center.delegate = self` synchronousl Once APNs registration is complete, pass the resulting `deviceToken` to Braze to enable for push notifications for the user. {% tabs %} -{% tab Swift %} +{% tab swift %} Add the following code to your app's `application(_:didRegisterForRemoteNotificationsWithDeviceToken:)` method: @@ -206,11 +206,10 @@ The `application:didRegisterForRemoteNotificationsWithDeviceToken:` delegate met Next, pass the received push notifications along to Braze. This step is necessary for logging push analytics and link handling. Ensure that you call all push integration code in your application's main thread. -#### Default push handling - {% tabs %} -{% tab Swift %} -To enable Braze's default push handling, add the following code to your app's `application(_:didReceiveRemoteNotification:fetchCompletionHandler:)` method: +{% tab swift %} + +Add the following code to your app's `application(_:didReceiveRemoteNotification:fetchCompletionHandler:)` method: ```swift if let braze = AppDelegate.braze, braze.notifications.handleBackgroundNotification( @@ -222,7 +221,7 @@ if let braze = AppDelegate.braze, braze.notifications.handleBackgroundNotificati completionHandler(.noData) ``` -Next, add the following to your app's `userNotificationCenter(_:didReceive:withCompletionHandler:)` method: +Next, add the following code to your app's `userNotificationCenter(_:didReceive:withCompletionHandler:)` method: ```swift if let braze = AppDelegate.braze, braze.notifications.handleUserNotification( @@ -233,10 +232,29 @@ if let braze = AppDelegate.braze, braze.notifications.handleUserNotification( } completionHandler() ``` -{% endtab %} +**Foreground push handling** + +To display a push notification while the app is in the foreground, implement `userNotificationCenter(_:willPresent:withCompletionHandler:)`: + +```swift +func userNotificationCenter(_ center: UNUserNotificationCenter, + willPresent notification: UNNotification, + withCompletionHandler completionHandler: @escaping (UNNotificationPresentationOptions) -> Void) { + if #available(iOS 14.0, *) { + completionHandler([.list, .banner]) + } else { + completionHandler([.alert]) + } +} +``` + +If the foreground notification is clicked, the push delegate `userNotificationCenter(_:didReceive:withCompletionHandler:)` will be called, and Braze will log a push click event. + +{% endtab %} {% tab OBJECTIVE-C %} -To enable Braze's default push handling, add the following code to your application's `application:didReceiveRemoteNotification:fetchCompletionHandler:` method: + +Add the following code to your application's `application:didReceiveRemoteNotification:fetchCompletionHandler:` method: ```objc BOOL processedByBraze = AppDelegate.braze != nil && [AppDelegate.braze.notifications handleBackgroundNotificationWithUserInfo:userInfo @@ -259,47 +277,15 @@ if (processedByBraze) { completionHandler(); ``` -{% endtab %} -{% endtabs %} - -#### Foreground push handling -{% tabs %} -{% tab Swift %} -To enable foreground push notifications and let Braze recognize them when they're received, implement `UNUserNotificationCenter.userNotificationCenter(_:willPresent:withCompletionHandler:)`. If a user taps your foreground notification, the `userNotificationCenter(_:didReceive:withCompletionHandler:)` push delegate will be called and Braze will log the push click event. - -```swift -func userNotificationCenter( - _ center: UNUserNotificationCenter, - willPresent notification: UNNotification, - withCompletionHandler completionHandler: @escaping (UNNotificationPresentationOptions -) -> Void) { - // Forward notification payload to Braze for processing. - AppDelegate.braze?.notifications.handleForegroundNotification(notification: notification) - - // Configure application's foreground notification display options. - if #available(iOS 14.0, *) { - completionHandler([.list, .banner]) - } else { - completionHandler([.alert]) - } -} -``` -{% endtab %} +**Foreground push handling** -{% tab OBJECTIVE-C %} -To enable foreground push notifications and let Braze recognize them when they're received, implement `userNotificationCenter:willPresentNotification:withCompletionHandler:`. If a user taps your foreground notification, the `userNotificationCenter:didReceiveNotificationResponse:withCompletionHandler:` push delegate will be called and Braze will log the push click event. +To display a push notification while the app is in the foreground, implement `userNotificationCenter:willPresentNotification:withCompletionHandler:`: ```objc - (void)userNotificationCenter:(UNUserNotificationCenter *)center willPresentNotification:(UNNotification *)notification withCompletionHandler:(void (^)(UNNotificationPresentationOptions options))completionHandler { - if (AppDelegate.braze != nil) { - // Forward notification payload to Braze for processing. - [AppDelegate.braze.notifications handleForegroundNotificationWithNotification:notification]; - } - - // Configure application's foreground notification display options. if (@available(iOS 14.0, *)) { completionHandler(UNNotificationPresentationOptionList | UNNotificationPresentationOptionBanner); } else { @@ -307,6 +293,9 @@ To enable foreground push notifications and let Braze recognize them when they'r } } ``` + +If the foreground notification is clicked, the push delegate `userNotificationCenter:didReceiveNotificationResponse:withCompletionHandler:` will be called, and Braze will log a push click event. + {% endtab %} {% endtabs %} @@ -316,41 +305,32 @@ Deep linking from a push into the app is automatically handled via our standard ## Subscribing to push notifications updates +To access the push notification payloads processed by Braze, use the [`Braze.Notifications.subscribeToUpdates(_:)`](https://braze-inc.github.io/braze-swift-sdk/documentation/brazekit/braze/notifications-swift.class/subscribetoupdates(_:)/) method. + {% tabs %} -{% tab Swift %} -To access the push notification payloads processed by Braze, use the [`Braze.Notifications.subscribeToUpdates(_:)`](https://braze-inc.github.io/braze-swift-sdk/documentation/brazekit/braze/notifications-swift.class/subscribetoupdates(_:)/) method. You can use the `payloadTypes` parameter to specify whether you'd like to subscribe to notifications involving push open events, foreground push received events, or both. +{% tab swift %} ```swift // This subscription is maintained through a Braze cancellable, which will observe for changes until the subscription is cancelled. // You must keep a strong reference to the cancellable to keep the subscription active. // The subscription is canceled either when the cancellable is deinitialized or when you call its `.cancel()` method. -let cancellable = AppDelegate.braze?.notifications.subscribeToUpdates(payloadTypes: [.open, .received]) { payload in +let cancellable = AppDelegate.braze?.notifications.subscribeToUpdates { payload in print("Braze processed notification with title '\(payload.title)' and body '\(payload.body)'") } ``` -{% endtab %} +{% endtab %} {% tab OBJECTIVE-C %} -To access the push notification payloads processed by Braze, use the [`Braze.Notifications.subscribeToUpdates(_:)`](https://braze-inc.github.io/braze-swift-sdk/documentation/brazekit/braze/notifications-swift.class/subscribetoupdates(_:)/) method. You can use the `payloadTypes` parameter to specify whether you'd like to subscribe to notifications involving push open events, foreground push received events, or both. ```objc -BRZCancellable *cancellable = [notifications subscribeToUpdates:^(BRZNotificationsPayload * _Nonnull payload) { +BRZCancellable *cancellable = [notifications subscribeToUpdatesWithInternalNotifications:NO update:^(BRZNotificationsPayload * _Nonnull payload) { NSLog(@"Braze processed notification with title '%@' and body '%@'", payload.title, payload.body); }]; ``` -Or, to specify the types of push events you'd like to subscribe to: - -```objc -NSInteger filtersValue = BRZNotificationsPayloadTypeFilter.opened.rawValue | BRZNotificationsPayloadTypeFilter.received.rawValue; -BRZNotificationsPayloadTypeFilter *filters = [[BRZNotificationsPayloadTypeFilter alloc] initWithRawValue: filtersValue]; -BRZCancellable *cancellable = [notifications subscribeToUpdatesWithPayloadTypes:filters:^(BRZNotificationsPayload * _Nonnull payload) { - NSLog(@"Braze processed notification with title '%@' and body '%@'", payload.title, payload.body); -}]; -``` {% endtab %} - {% endtabs %} + {% alert note %} When using the automatic push integration, `subscribeToUpdates(_:)` is the only way to be notified of remote notifications processed by Braze. The `UIAppDelegate` and `UNUserNotificationCenterDelegate` system methods are not called when the notification is automatically processed by Braze. {% endalert %} From a660a982796e822843f478833809c2a3f95d969f Mon Sep 17 00:00:00 2001 From: Rachel Feinberg <135255868+rachel-feinberg@users.noreply.github.com> Date: Tue, 16 Jul 2024 09:20:53 -0700 Subject: [PATCH 21/32] Update _docs/_user_guide/message_building_by_channel/email/templates/faq.md Co-authored-by: Lydia Xie <87040416+lydia-xie@users.noreply.github.com> --- .../message_building_by_channel/email/templates/faq.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_user_guide/message_building_by_channel/email/templates/faq.md b/_docs/_user_guide/message_building_by_channel/email/templates/faq.md index d6b135f1c2d..a068d79767d 100644 --- a/_docs/_user_guide/message_building_by_channel/email/templates/faq.md +++ b/_docs/_user_guide/message_building_by_channel/email/templates/faq.md @@ -75,4 +75,4 @@ No, Braze link aliasing won't recognize the HTML properly. When logic like this is used in tandem with features that need to parse the HTML (such as a preheader or link templating), the library used to scan the HTML can modify the anchor tag in a way that will prevent the proper `href` from being templated. The library will then determine that the HTML is invalid because it's agnostic to the Liquid code. -Instead, use Liquid logic that contains a complete anchor tag at each stage. This won't interfere with HTML parsing because the logic contains multiple instances of valid HTML. YOu can also also simplify your logic by assigning and then templating a variable into the appropriate anchor tag. \ No newline at end of file +Instead, use Liquid logic that contains a complete anchor tag at each stage. This won't interfere with HTML parsing because the logic includes multiple instances of valid HTML. You can also simplify your logic by assigning and then templating a variable into the appropriate anchor tag. \ No newline at end of file From ea4bb19f4e936fd4c13205c23b505ac235db21b1 Mon Sep 17 00:00:00 2001 From: Rachel Feinberg <135255868+rachel-feinberg@users.noreply.github.com> Date: Tue, 16 Jul 2024 09:35:39 -0700 Subject: [PATCH 22/32] Apply suggestions from code review Co-authored-by: Lydia Xie <87040416+lydia-xie@users.noreply.github.com> --- .../data_warehouses/snowflake/data_retention.md | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md index 3e6ed7e8541..48b6b5ba808 100644 --- a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md +++ b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md @@ -9,7 +9,7 @@ search_tag: Partner # Snowflake data retention -> Braze anonymizes (removes personally identifiable information (PII)) from all events data that is older than two years old. If you use Snowflake data sharing, you may choose to retain the full events data in your environment by storing a copy in your Snowflake account before the retention policy is applied. +> Braze anonymizes—removes personally identifiable information (PII)—from all events data that is older than two years old. If you use Snowflake data sharing, you may choose to retain the full events data in your environment by storing a copy in your Snowflake account before the retention policy is applied. This page presents two ways you can retain non-anonymized data: @@ -17,7 +17,7 @@ This page presents two ways you can retain non-anonymized data: - Unload your data to a stage {% alert warning %} -Braze automatically anonymizes events data for users that are deleted from Braze, as described in [Data Protection Technical Assistance]({{site.baseurl}}/dp-technical-assistance/). Any data copied outside of the shared database will not be included in this process, as it’s no longer managed by Braze. +Braze automatically anonymizes events data for users that are deleted from Braze, as described in [Data Protection Technical Assistance]({{site.baseurl}}/dp-technical-assistance/). Any data copied outside of the shared database will not be included in this process, as Braze no longer manages it. {% endalert %} ## Copy all data to another Snowflake database @@ -155,7 +155,7 @@ CALL COPY_BRAZE_SHARE('SOURCE_DB', 'SOURCE_SCHEMA', 'DEST_DB', 'DEST_SCHEMA', DA {% endtabs %} {% alert note %} -Repeat running of the procedure won't create duplicate records, as the procedure will check the most recent `SF_CREATED_AT` and only back up data newer than that. +Repeatedly running the procedure won't create duplicate records since this procedure checks the most recent `SF_CREATED_AT` and only backs up data newer than that. {% endalert %} ## Unload data to stage From f5388e7c0348fd3ff90392a04a5d1b4560aae50d Mon Sep 17 00:00:00 2001 From: zzhaobraze Date: Tue, 16 Jul 2024 12:53:50 -0400 Subject: [PATCH 23/32] fix missing content after a period. --- assets/js/documents.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/assets/js/documents.js b/assets/js/documents.js index ae7a5319467..9caa8166114 100644 --- a/assets/js/documents.js +++ b/assets/js/documents.js @@ -528,7 +528,7 @@ $(document).ready(function() { if ($(this)[0].nextSibling){ punctuation = ($(this)[0].nextSibling.nodeValue || '').substr(0,1); if (punctuations.includes(punctuation)) { - $(this)[0].nextSibling.remove(); + $(this)[0].nextSibling = $(this)[0].nextSibling.substring(1); has_punchtuation = true; } } From 44705d3a7ae0b3aaab789fc8cc81a516089a4d5d Mon Sep 17 00:00:00 2001 From: Dave Hensley Date: Tue, 16 Jul 2024 13:28:13 -0400 Subject: [PATCH 24/32] BD-3263 Add the icon for generating a nested event property schema. --- .../custom_data/custom_events/nested_objects.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_user_guide/data_and_analytics/custom_data/custom_events/nested_objects.md b/_docs/_user_guide/data_and_analytics/custom_data/custom_events/nested_objects.md index dcd4b6b24e5..8d5b7d0bad7 100644 --- a/_docs/_user_guide/data_and_analytics/custom_data/custom_events/nested_objects.md +++ b/_docs/_user_guide/data_and_analytics/custom_data/custom_events/nested_objects.md @@ -31,7 +31,7 @@ To access the nested data in your custom event, generate a schema for each event 1. Go to **Data Settings** > **Custom Events**. 2. Select **Manage Properties** for the events with nested properties. -3. Click the icon to generate the schema. To view the schema, click the plus button. +3. Click the button to generate the schema. To view the schema, click the button. ![][6]{: style="max-width:80%;"} From 70f8dff18038e332cd85c2006eb657181713db8b Mon Sep 17 00:00:00 2001 From: Lydia Xie <87040416+lydia-xie@users.noreply.github.com> Date: Tue, 16 Jul 2024 10:42:03 -0700 Subject: [PATCH 25/32] Apply suggestions from code review --- .../custom_data/custom_events/nested_objects.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_user_guide/data_and_analytics/custom_data/custom_events/nested_objects.md b/_docs/_user_guide/data_and_analytics/custom_data/custom_events/nested_objects.md index 8d5b7d0bad7..2427f43bfa0 100644 --- a/_docs/_user_guide/data_and_analytics/custom_data/custom_events/nested_objects.md +++ b/_docs/_user_guide/data_and_analytics/custom_data/custom_events/nested_objects.md @@ -31,7 +31,7 @@ To access the nested data in your custom event, generate a schema for each event 1. Go to **Data Settings** > **Custom Events**. 2. Select **Manage Properties** for the events with nested properties. -3. Click the button to generate the schema. To view the schema, click the button. +3. Select the button to generate the schema. To view the schema, select the plus button. ![][6]{: style="max-width:80%;"} From 2abd5836700c8b2946d4d32bf755aa4547058f0a Mon Sep 17 00:00:00 2001 From: Rachel Feinberg Date: Tue, 16 Jul 2024 10:48:41 -0700 Subject: [PATCH 26/32] Fix warning --- .../data_warehouses/snowflake/data_retention.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md index 48b6b5ba808..4dd9ad50dd9 100644 --- a/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md +++ b/_docs/_partners/data_and_infrastructure_agility/data_warehouses/snowflake/data_retention.md @@ -155,7 +155,7 @@ CALL COPY_BRAZE_SHARE('SOURCE_DB', 'SOURCE_SCHEMA', 'DEST_DB', 'DEST_SCHEMA', DA {% endtabs %} {% alert note %} -Repeatedly running the procedure won't create duplicate records since this procedure checks the most recent `SF_CREATED_AT` and only backs up data newer than that. +Repeatedly running the procedure won't create duplicate records because this procedure checks the most recent `SF_CREATED_AT` and only backs up data newer than that. {% endalert %} ## Unload data to stage From dbe137629094884fbfee50585da11a9040a382a8 Mon Sep 17 00:00:00 2001 From: Lydia Xie Date: Tue, 16 Jul 2024 10:50:31 -0700 Subject: [PATCH 27/32] BD-3261: Add redirect for email --- assets/js/broken_redirect_list.js | 1 + 1 file changed, 1 insertion(+) diff --git a/assets/js/broken_redirect_list.js b/assets/js/broken_redirect_list.js index 0b57a00b43b..f1ec26d590e 100644 --- a/assets/js/broken_redirect_list.js +++ b/assets/js/broken_redirect_list.js @@ -1158,5 +1158,6 @@ validurls['/docs/user_guide/message_building_by_channel/content_cards/integratio validurls['/docs/user_guide/data_and_analytics/your_reports/'] = '/docs/user_guide/data_and_analytics/reporting/' validurls['/docs/user_guide/message_building_by_channel/email/best_practices/managing_email_subscriptions/'] = '/docs/user_guide/message_building_by_channel/email/best_practices/duplicate_emails/'; +validurls['/docs/help/best_practices/email/managing_email_subscriptions/'] = '/docs/user_guide/message_building_by_channel/email/managing_user_subscriptions'; // validurls['OLD'] = 'NEW'; From f0fa01860b08592a384a088c4f45c17cbf36ba3f Mon Sep 17 00:00:00 2001 From: Lydia Xie Date: Tue, 16 Jul 2024 10:55:22 -0700 Subject: [PATCH 28/32] fix old redirects --- assets/js/broken_redirect_list.js | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/assets/js/broken_redirect_list.js b/assets/js/broken_redirect_list.js index f1ec26d590e..dee3ceafcde 100644 --- a/assets/js/broken_redirect_list.js +++ b/assets/js/broken_redirect_list.js @@ -4,8 +4,8 @@ validurls['/docs/best_practices/#android-push-priority'] = '/docs/help/best_prac validurls['/docs/best_practices/#android-push-visibility'] = '/docs/help/best_practices/push/additional_android_best_practices/#android-push-visibility'; validurls['/docs/best_practices/#body-styling'] = '/docs/help/best_practices/email/email_styling_tips/#body-styling'; validurls['/docs/best_practices/#email'] = '/docs/help/best_practices/email/overview/'; -validurls['/docs/best_practices/email#managing-email-subscriptions'] = '/docs/help/best_practices/email/managing_email_subscriptions/'; -validurls['/docs/best_practices/email'] = '/docs/help/best_practices/email/managing_email_subscriptions/'; +validurls['/docs/best_practices/email#managing-email-subscriptions'] = '/docs/user_guide/message_building_by_channel/email/managing_user_subscriptions'; +validurls['/docs/best_practices/email'] = '/docs/user_guide/message_building_by_channel/email/managing_user_subscriptions'; validurls['/docs/best_practices/'] = '/docs/help/home/'; validurls['/docs/whatsapp_response_messaging/'] = '/docs/user_guide/message_building_by_channel/whatsapp/whatsapp_campaign/create/#response-messages'; @@ -421,7 +421,7 @@ validurls['/docs/help/best_practices/email/email_styling_tips/'] = '/docs/user_g validurls['/docs/help/best_practices/email/content-specific/'] = '/docs/user_guide/message_building_by_channel/email/best_practices/guidelines_and_tips/#content-specific-tips--tricks'; validurls['/docs/help/best_practices/email/email_services/'] = '/docs/user_guide/message_building_by_channel/email/best_practices/email_services/'; validurls['/docs/help/best_practices/email/sunset_policies/'] = '/docs/user_guide/message_building_by_channel/email/best_practices/sunset_policies/'; -validurls['/docs/help/best_practices/email/managing_email_subscriptions/'] = '/docs/user_guide/message_building_by_channel/email/best_practices/managing_email_subscriptions/'; +validurls['/docs/help/best_practices/email/managing_email_subscriptions/'] = '/docs/user_guide/message_building_by_channel/email/managing_user_subscriptions'; validurls['/docs/help/best_practices/in-app_messages/prep_guide/'] = '/docs/user_guide/message_building_by_channel/in-app_messages/best_practices/prep_guide/'; validurls['/docs/help/best_practices/in-app_messages/previous_in-app_message_generations/'] = '/docs/user_guide/message_building_by_channel/in-app_messages/best_practices/previous_in-app_message_generations/'; validurls['/docs/user_guide/message_building_by_channel/in-app_messages/best_practices/previous_in-app_message_generations/'] = '/docs/developer_guide/platform_integration_guides/sdk_changelogs'; @@ -1158,6 +1158,5 @@ validurls['/docs/user_guide/message_building_by_channel/content_cards/integratio validurls['/docs/user_guide/data_and_analytics/your_reports/'] = '/docs/user_guide/data_and_analytics/reporting/' validurls['/docs/user_guide/message_building_by_channel/email/best_practices/managing_email_subscriptions/'] = '/docs/user_guide/message_building_by_channel/email/best_practices/duplicate_emails/'; -validurls['/docs/help/best_practices/email/managing_email_subscriptions/'] = '/docs/user_guide/message_building_by_channel/email/managing_user_subscriptions'; // validurls['OLD'] = 'NEW'; From 51ea3b1983527fed74ebfa926071c0ff4de680cd Mon Sep 17 00:00:00 2001 From: Lydia Xie <87040416+lydia-xie@users.noreply.github.com> Date: Tue, 16 Jul 2024 11:58:15 -0700 Subject: [PATCH 29/32] Minor edits --- .../cloud_ingestion/integrations.md | 14 +++++++------- 1 file changed, 7 insertions(+), 7 deletions(-) diff --git a/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md b/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md index 460853a3fb9..ff79cc0069f 100644 --- a/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md +++ b/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md @@ -76,7 +76,7 @@ CREATE OR REPLACE TABLE BRAZE_CLOUD_PRODUCTION.INGESTION.USERS_ATTRIBUTES_SYNC ( You can name the database, schema, and table as you'd like, but the column names should match the preceding definition. - `UPDATED_AT` - The time this row was updated in or added to the table. We will only sync rows that have been added or updated since the last sync. -- User identifier columns. Your table may contain one or more user identifier columns. Each row should only contain one identifier (either `external_id`, the combination of `alias_name` and `alias_label`, or `braze_id`. A source table may have columns for one, two, or all three identifier types. +- **User identifier columns**: Your table may contain one or more user identifier columns. Each row should only contain one identifier (either `external_id`, the combination of `alias_name` and `alias_label`, or `braze_id`). A source table may have columns for one, two, or all three identifier types. - `EXTERNAL_ID` - This identifies the user you want to update. This should match the `external_id` value used in Braze. - `ALIAS_NAME` and `ALIAS_LABEL` - These two columns create a user alias object. `alias_name` should be a unique identifier, and `alias_label` specifies the type of alias. Users may have multiple aliases with different labels but only one `alias_name` per `alias_label`. - `BRAZE_ID` - The Braze user identifier. This is generated by the Braze SDK, and new users cannot be created using a Braze ID through Cloud Data Ingestion. To create new users, specify an external user ID or user alias. @@ -523,14 +523,14 @@ Return to the Braze dashboard and click **Test connection**. If successful, you' {% endtab %} {% tab BigQuery %} -Once all configuration details for your sync are entered, click **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. +After all configuration details for your sync are entered, click **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_13.png %}) {% endtab %} {% tab Databricks %} -Once all configuration details for your sync are entered, click **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. +After all configuration details for your sync are entered, click **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_13.png %}) @@ -582,27 +582,27 @@ If you reuse the same user across integrations, you cannot delete the user in th {% tabs %} {% tab Snowflake %} -Once activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. +When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_5.png %}) {% endtab %} {% tab Redshift %} -Once activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. +When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_10.png %}) {% endtab %} {% tab BigQuery %} -Once activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. +When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_15.png %}) {% endtab %} {% tab Databricks %} -Once activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. +When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_18.png %}) From 4339f6a6b887b31e3a2b366a9bd3910403ee416c Mon Sep 17 00:00:00 2001 From: Lydia Xie <87040416+lydia-xie@users.noreply.github.com> Date: Tue, 16 Jul 2024 12:00:11 -0700 Subject: [PATCH 30/32] Apply suggestions from code review --- .../data_and_analytics/cloud_ingestion/integrations.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md b/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md index ff79cc0069f..bcd23a60910 100644 --- a/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md +++ b/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md @@ -76,7 +76,7 @@ CREATE OR REPLACE TABLE BRAZE_CLOUD_PRODUCTION.INGESTION.USERS_ATTRIBUTES_SYNC ( You can name the database, schema, and table as you'd like, but the column names should match the preceding definition. - `UPDATED_AT` - The time this row was updated in or added to the table. We will only sync rows that have been added or updated since the last sync. -- **User identifier columns**: Your table may contain one or more user identifier columns. Each row should only contain one identifier (either `external_id`, the combination of `alias_name` and `alias_label`, or `braze_id`). A source table may have columns for one, two, or all three identifier types. +- **User identifier columns** - Your table may contain one or more user identifier columns. Each row should only contain one identifier (either `external_id`, the combination of `alias_name` and `alias_label`, or `braze_id`). A source table may have columns for one, two, or all three identifier types. - `EXTERNAL_ID` - This identifies the user you want to update. This should match the `external_id` value used in Braze. - `ALIAS_NAME` and `ALIAS_LABEL` - These two columns create a user alias object. `alias_name` should be a unique identifier, and `alias_label` specifies the type of alias. Users may have multiple aliases with different labels but only one `alias_name` per `alias_label`. - `BRAZE_ID` - The Braze user identifier. This is generated by the Braze SDK, and new users cannot be created using a Braze ID through Cloud Data Ingestion. To create new users, specify an external user ID or user alias. From dbff7ab591ec5a558924b8060bc263ca87eacd58 Mon Sep 17 00:00:00 2001 From: Lydia Xie <87040416+lydia-xie@users.noreply.github.com> Date: Tue, 16 Jul 2024 13:11:52 -0700 Subject: [PATCH 31/32] Apply suggestions from code review Co-authored-by: Rachel Feinberg <135255868+rachel-feinberg@users.noreply.github.com> --- .../cloud_ingestion/integrations.md | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md b/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md index bcd23a60910..4a2ee03975d 100644 --- a/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md +++ b/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md @@ -523,14 +523,14 @@ Return to the Braze dashboard and click **Test connection**. If successful, you' {% endtab %} {% tab BigQuery %} -After all configuration details for your sync are entered, click **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. +After all configuration details for your sync are entered, select **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_13.png %}) {% endtab %} {% tab Databricks %} -After all configuration details for your sync are entered, click **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. +After all configuration details for your sync are entered, select **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_13.png %}) @@ -582,27 +582,27 @@ If you reuse the same user across integrations, you cannot delete the user in th {% tabs %} {% tab Snowflake %} -When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. +When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, select **Sync Now**. This run will not impact regularly scheduled future syncs. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_5.png %}) {% endtab %} {% tab Redshift %} -When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. +When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, select **Sync Now**. This run will not impact regularly scheduled future syncs. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_10.png %}) {% endtab %} {% tab BigQuery %} -When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. +When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, select **Sync Now**. This run will not impact regularly scheduled future syncs. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_15.png %}) {% endtab %} {% tab Databricks %} -When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, click **Sync Now**. This run will not impact regularly scheduled future syncs. +When activated, your sync will run on the schedule configured during setup. If you want to run the sync outside the normal testing schedule or to fetch the most recent data, select **Sync Now**. This run will not impact regularly scheduled future syncs. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_18.png %}) From c497e2bb4c567e1ccefb9a96d1b1785fb9deed1a Mon Sep 17 00:00:00 2001 From: Rachel Feinberg Date: Tue, 16 Jul 2024 13:14:48 -0700 Subject: [PATCH 32/32] Change click to select --- .../cloud_ingestion/integrations.md | 22 +++++++++---------- 1 file changed, 11 insertions(+), 11 deletions(-) diff --git a/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md b/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md index 4a2ee03975d..bb151334a85 100644 --- a/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md +++ b/_docs/_user_guide/data_and_analytics/cloud_ingestion/integrations.md @@ -348,11 +348,11 @@ You can name the schema and table as you'd like, but the column names should mat In order for Braze to access Databricks, a personal access token needs to be created. -1. In your Databricks workspace, click your Databricks username in the top bar, and then select **User Settings** from the drop-down. -2. On the Access tokens tab, click **Generate new token**. +1. In your Databricks workspace, select your Databricks username in the top bar, and then select **User Settings** from the drop-down. +2. On the Access tokens tab, select **Generate new token**. 3. Enter a comment that helps you to identify this token, such as "Braze CDI", and change the token’s lifetime to no lifetime by leaving the Lifetime (days) box empty (blank). -4. Click **Generate**. -5. Copy the displayed token, and then click **Done**. +4. Select **Generate**. +5. Copy the displayed token, and then select **Done**. Keep the token in a safe place until you need to enter it on the Braze dashboard during the credential creation step. @@ -381,7 +381,7 @@ If you have network policies in place, you must give Braze network access to you {% tabs %} {% tab Snowflake %} -Go to **Partner Integrations** > **Technology Partners**. Find the Snowflake page and click **Create new import sync**. +Go to **Partner Integrations** > **Technology Partners**. Find the Snowflake page and select **Create new import sync**. {% alert note %} If you are using the [older navigation]({{site.baseurl}}/navigation), go to **Technology Partners**. @@ -418,7 +418,7 @@ ALTER USER BRAZE_INGESTION_USER SET rsa_public_key='Braze12345...'; {% endtab %} {% tab Redshift %} -Go to **Partner Integrations** > **Technology Partners**. Find the Redshift page and click **Create new import sync**. +Go to **Partner Integrations** > **Technology Partners**. Find the Redshift page and select **Create new import sync**. {% alert note %} If you are using the [older navigation]({{site.baseurl}}/navigation), go to **Technology Partners**. @@ -445,7 +445,7 @@ You will also choose the data type and sync frequency. Frequency can be anywhere {% endtab %} {% tab BigQuery %} -Go to **Partner Integrations** > **Technology Partners**. Find the BigQuery page and click **Create new import sync**. +Go to **Partner Integrations** > **Technology Partners**. Find the BigQuery page and select **Create new import sync**. {% alert note %} If you are using the [older navigation]({{site.baseurl}}/navigation), go to **Technology Partners**. @@ -473,7 +473,7 @@ You will also choose the data type and sync frequency. Frequency can be anywhere {% endtab %} {% tab Databricks %} -Go to **Partner Integrations** > **Technology Partners**. Find the Databricks page and click **Create new import sync**. +Go to **Partner Integrations** > **Technology Partners**. Find the Databricks page and select **Create new import sync**. {% alert note %} If you are using the [older navigation]({{site.baseurl}}/navigation), go to **Technology Partners**. @@ -506,18 +506,18 @@ You will also choose the data type and sync frequency. Frequency can be anywhere {% tabs %} {% tab Snowflake %} -Return to the Braze dashboard and click **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. +Return to the Braze dashboard and select **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_3.png %}) {% endtab %} {% tab Redshift %} -Return to the Braze dashboard and click **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. +Return to the Braze dashboard and select **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_8.png %}) {% endtab %} {% tab Redshift Private Network %} -Return to the Braze dashboard and click **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. +Return to the Braze dashboard and select **Test connection**. If successful, you'll see a preview of the data. If, for some reason, we can't connect, we'll display an error message to help you troubleshoot the issue. ![]({% image_buster /assets/img/cloud_ingestion/ingestion_19.png %}) {% endtab %}