diff --git a/src/components/AllPageNotice.tsx b/src/components/AllPageNotice.tsx
index 90230983..fa1dcec6 100644
--- a/src/components/AllPageNotice.tsx
+++ b/src/components/AllPageNotice.tsx
@@ -2,6 +2,8 @@ import { CloseIcon } from '@chakra-ui/icons';
import {
Box,
Heading,
+ Link,
+ LinkProps,
Slide,
Text,
TextProps,
@@ -22,6 +24,7 @@ interface AllPageMessage {
const components = {
p: (props: TextProps): JSX.Element => ,
h1: Heading,
+ a: (props: LinkProps): JSX.Element => ,
Heading,
code: CodeBlock,
};
diff --git a/src/config.ts b/src/config.ts
index 0c7d6f1f..f41d72f3 100644
--- a/src/config.ts
+++ b/src/config.ts
@@ -55,7 +55,7 @@ export default {
},
// Controls global message shown on homepage.
allPageMessage: {
- message: `❄️ Latest: Snowflake customers — stream your data to Postgres! **[Learn more](/blog/snowflake-push-postgres)**! 🐘`,
- except: ['/blog/snowflake-push-postgres', '/get-started'],
+ message: `❄️ Latest: Snowflake customers — Learn how to [connect Snowflake **everything**](/for/snowflake)! ❄️`,
+ except: ['/for/snowflake', '/get-started'],
},
};
diff --git a/src/content/blog/snowflake-pull-kafka.mdx b/src/content/blog/snowflake-pull-kafka.mdx
index 8661aebb..1e325464 100644
--- a/src/content/blog/snowflake-pull-kafka.mdx
+++ b/src/content/blog/snowflake-pull-kafka.mdx
@@ -2,7 +2,7 @@
title: Real-Time Data Ingestion from Kafka to Snowflake
codetour: true
category: Learning
-date: '2024-10-10'
+date: '2024-11-10'
description: Setup a private point-to-point data stream from Kafka to Snowflake.
image: /blog/snowflake-pull-kafka/cover.png
author: Glenn Gillen
@@ -17,19 +17,26 @@ featuredOrder: 7
{/* */}
-In today's data-driven world, organizations are constantly seeking ways to harness
-the power of real-time data for analytics and decision-making. Apache Kafka has
-emerged as a powerhouse for handling high-volume, real-time data streams, while
-Snowflake offers unparalleled capabilities for data warehousing and analytics.
-
-What if you could seamlessly combine the strengths of both platforms in less
-than 15 minutes, without the headaches of managing IP allow
-lists, opening firewall ports, or navigating the complexities of services like
-PrivateLink?
+If your business is using Kafka there is already _a lot_ of
+messages travelling through your brokers. If you're also a Snowflake customer it would be
+great if those messages could make their way into your Snowflake data cloud.
+Kafka is in your private network, Snowflake in their cloud, and getting data
+between them isn't entirely straight-foward. It'd be much easier if those
+two systems could look and feel like they were next to each other.
Today, we're excited to introduce a solution that makes this vision a reality:
the Pull from Kafka Connector!
+
+
+
+
## Snowflake 💙 Apache Kafka
Apache Kafka (and Kafka-compatible alternatives) is the system of choice for building
@@ -513,22 +520,44 @@ pull data through to our private Kafka broker.
![Snowflake push to Kafka setup complete](/blog/snowflake-push-kafka/aws-complete.png)
-## Next steps
+## Seeing it in action
-Any updates to your data in your Kafka topic will now create a new row in your Snowflake table.
+Any updates to your data in your Kafka topic will now create a new row in your
+Snowflake table.
Post the below message to the Kafka topic to verify the setup.
-Replace `$BROKER_ADDRESS` with your actual Kafka broker address, and ensure the topic name (customers in this example) matches the one you've configured in your Snowflake Pull from Kafka Connector setup
+Replace `$BROKER_ADDRESS` with your actual Kafka broker address, and ensure the
+topic name (`customers` in this example) matches the one you've configured in
+your Snowflake Pull from Kafka Connector setup
```bash
echo '{"key": "customer123", "id": 1001, "name": "John Doe", "email": "john.doe@example.com"}' | \
kafka-console-producer --broker-list $BROKER_ADDRESS:9092 --topic customers
```
-The Snowflake connector will then pull these messages from Kafka and insert them into your CUSTOMERS table, mapping the JSON fields to the corresponding columns.
+The Snowflake connector will then pull these messages from Kafka and insert them
+into your `CUSTOMERS` table, mapping the JSON fields to the corresponding
+columns.
+
+## Wrap up
+
+It's all done! In the course of a few minutes we've been able to:
+
+* Setup an Ockam node next to our Kafka broker.
+* Start an Ockam node within Snowflake.
+* Establish an Ockam Portal between our nodes — a secure point-to-point
+connection that is mutually authenticated with end-to-end encryption. With
+regular and automatic rotation of the encryption keys.
+* Then use the secure portal to consume messages from our private Kafka broker
+and load them directly into a Snowflake table.
+
+We've been able to achieve all of this without the need to expose our Kafka
+broker to the public internet, update firewall ingress rules, setup a VPN,
+or manage IP allow lists.
If you'd like to explore some other capabilities of Ockam I'd recommend:
* [Encrypting data _through_ Kafka](https://docs.ockam.io/portals/kafka)
+* [Pushing data from Snowflake to Kafka](/blog/snowflake-push-kafka)
* [Zero-trust data streaming with Redpanda Connect](/blog/redpanda_connect_with_ockam#connect-secure-and-streamall-in-one-simple-platform)
* [Adding security as a feature in your SaaS product](/blog/building-secure-saas-platforms)