From 861ce45aac5cfe1595172ae24872d0ee7176cfda Mon Sep 17 00:00:00 2001 From: Glenn Gillen Date: Sun, 10 Nov 2024 22:50:04 +1100 Subject: [PATCH 1/3] content: pull from kafka --- src/content/blog/snowflake-pull-kafka.mdx | 57 +++++++++++++++++------ 1 file changed, 43 insertions(+), 14 deletions(-) diff --git a/src/content/blog/snowflake-pull-kafka.mdx b/src/content/blog/snowflake-pull-kafka.mdx index 8661aebb..bfb38816 100644 --- a/src/content/blog/snowflake-pull-kafka.mdx +++ b/src/content/blog/snowflake-pull-kafka.mdx @@ -2,7 +2,7 @@ title: Real-Time Data Ingestion from Kafka to Snowflake codetour: true category: Learning -date: '2024-10-10' +date: '2024-11-10' description: Setup a private point-to-point data stream from Kafka to Snowflake. image: /blog/snowflake-pull-kafka/cover.png author: Glenn Gillen @@ -17,19 +17,26 @@ featuredOrder: 7 {/* */} -In today's data-driven world, organizations are constantly seeking ways to harness -the power of real-time data for analytics and decision-making. Apache Kafka has -emerged as a powerhouse for handling high-volume, real-time data streams, while -Snowflake offers unparalleled capabilities for data warehousing and analytics. - -What if you could seamlessly combine the strengths of both platforms in less -than 15 minutes, without the headaches of managing IP allow -lists, opening firewall ports, or navigating the complexities of services like -PrivateLink? +If you're business is using Kafka there is likely already _a lot_ of +messages travelling through it. If you're also a Snowflake customer it would be +great if those messages could make their way into your Snowflake data cloud. +Kafka is in your private network, Snowflake in their cloud, and getting data +between them isn't entirely straight-foward. It'd be much easier if those +two systems could look and feel like they were next to each other. Today, we're excited to introduce a solution that makes this vision a reality: the Pull from Kafka Connector! + + + + ## Snowflake 💙 Apache Kafka Apache Kafka (and Kafka-compatible alternatives) is the system of choice for building @@ -513,22 +520,44 @@ pull data through to our private Kafka broker. ![Snowflake push to Kafka setup complete](/blog/snowflake-push-kafka/aws-complete.png) -## Next steps +## Seeing it in action -Any updates to your data in your Kafka topic will now create a new row in your Snowflake table. +Any updates to your data in your Kafka topic will now create a new row in your +Snowflake table. Post the below message to the Kafka topic to verify the setup. -Replace `$BROKER_ADDRESS` with your actual Kafka broker address, and ensure the topic name (customers in this example) matches the one you've configured in your Snowflake Pull from Kafka Connector setup +Replace `$BROKER_ADDRESS` with your actual Kafka broker address, and ensure the +topic name (`customers` in this example) matches the one you've configured in +your Snowflake Pull from Kafka Connector setup ```bash echo '{"key": "customer123", "id": 1001, "name": "John Doe", "email": "john.doe@example.com"}' | \ kafka-console-producer --broker-list $BROKER_ADDRESS:9092 --topic customers ``` -The Snowflake connector will then pull these messages from Kafka and insert them into your CUSTOMERS table, mapping the JSON fields to the corresponding columns. +The Snowflake connector will then pull these messages from Kafka and insert them +into your `CUSTOMERS` table, mapping the JSON fields to the corresponding +columns. + +## Wrap up + +It's all done! In the course of a few minutes we've been able to: + +* Setup an Ockam node next to our Kafka broker. +* Start an Ockam node within Snowflake. +* Establish an Ockam Portal between our nodes &emdash; a secure point-to-point +connection that is mutually authenticated with end-to-end encryption. With +regular and automatic rotation of the encryption keys. +* Then use the secure portal to consume messages from our private Kafka broker +and load them directly into a Snowflake table. + +We've been able to achieve all of this without the need to expose our Kafka +broker to the public internet, update firewall ingress rules, setup a VPN, +or manage IP allow lists. If you'd like to explore some other capabilities of Ockam I'd recommend: * [Encrypting data _through_ Kafka](https://docs.ockam.io/portals/kafka) +* [Pushing data from Snowflake to Kafka](/blog/snowflake-push-kafka) * [Zero-trust data streaming with Redpanda Connect](/blog/redpanda_connect_with_ockam#connect-secure-and-streamall-in-one-simple-platform) * [Adding security as a feature in your SaaS product](/blog/building-secure-saas-platforms) From 7a8274c8fadb919bf138e60f69699c5e6dc837a9 Mon Sep 17 00:00:00 2001 From: Glenn Gillen Date: Sun, 10 Nov 2024 23:00:01 +1100 Subject: [PATCH 2/3] content: links --- src/components/AllPageNotice.tsx | 3 +++ src/config.ts | 4 ++-- src/content/blog/snowflake-pull-kafka.mdx | 2 +- 3 files changed, 6 insertions(+), 3 deletions(-) diff --git a/src/components/AllPageNotice.tsx b/src/components/AllPageNotice.tsx index 90230983..fa1dcec6 100644 --- a/src/components/AllPageNotice.tsx +++ b/src/components/AllPageNotice.tsx @@ -2,6 +2,8 @@ import { CloseIcon } from '@chakra-ui/icons'; import { Box, Heading, + Link, + LinkProps, Slide, Text, TextProps, @@ -22,6 +24,7 @@ interface AllPageMessage { const components = { p: (props: TextProps): JSX.Element => , h1: Heading, + a: (props: LinkProps): JSX.Element => , Heading, code: CodeBlock, }; diff --git a/src/config.ts b/src/config.ts index 0c7d6f1f..f41d72f3 100644 --- a/src/config.ts +++ b/src/config.ts @@ -55,7 +55,7 @@ export default { }, // Controls global message shown on homepage. allPageMessage: { - message: `❄️ Latest: Snowflake customers — stream your data to Postgres! **[Learn more](/blog/snowflake-push-postgres)**! 🐘`, - except: ['/blog/snowflake-push-postgres', '/get-started'], + message: `❄️ Latest: Snowflake customers — Learn how to [connect Snowflake **everything**](/for/snowflake)! ❄️`, + except: ['/for/snowflake', '/get-started'], }, }; diff --git a/src/content/blog/snowflake-pull-kafka.mdx b/src/content/blog/snowflake-pull-kafka.mdx index bfb38816..be88aa5c 100644 --- a/src/content/blog/snowflake-pull-kafka.mdx +++ b/src/content/blog/snowflake-pull-kafka.mdx @@ -545,7 +545,7 @@ It's all done! In the course of a few minutes we've been able to: * Setup an Ockam node next to our Kafka broker. * Start an Ockam node within Snowflake. -* Establish an Ockam Portal between our nodes &emdash; a secure point-to-point +* Establish an Ockam Portal between our nodes — a secure point-to-point connection that is mutually authenticated with end-to-end encryption. With regular and automatic rotation of the encryption keys. * Then use the secure portal to consume messages from our private Kafka broker From cb218c297abe00db1a907caee70eaad1dff2ecfc Mon Sep 17 00:00:00 2001 From: Glenn Gillen Date: Sun, 10 Nov 2024 23:01:20 +1100 Subject: [PATCH 3/3] content: grammar --- src/content/blog/snowflake-pull-kafka.mdx | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/content/blog/snowflake-pull-kafka.mdx b/src/content/blog/snowflake-pull-kafka.mdx index be88aa5c..1e325464 100644 --- a/src/content/blog/snowflake-pull-kafka.mdx +++ b/src/content/blog/snowflake-pull-kafka.mdx @@ -17,8 +17,8 @@ featuredOrder: 7 {/* */} -If you're business is using Kafka there is likely already _a lot_ of -messages travelling through it. If you're also a Snowflake customer it would be +If your business is using Kafka there is already _a lot_ of +messages travelling through your brokers. If you're also a Snowflake customer it would be great if those messages could make their way into your Snowflake data cloud. Kafka is in your private network, Snowflake in their cloud, and getting data between them isn't entirely straight-foward. It'd be much easier if those