Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: snowpipe streaming #5110

Merged
merged 7 commits into from
Dec 4, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ jobs:
go-version-file: 'go.mod'
- run: go version
- run: go mod download # Not required, used to segregate module download vs test times
- run: make test exclude="/rudder-server/(jobsdb|integration_test|processor|regulation-worker|router|services|suppression-backup-service|warehouse)"
- run: FORCE_RUN_INTEGRATION_TESTS=true make test exclude="/rudder-server/(jobsdb|integration_test|processor|regulation-worker|router|services|suppression-backup-service|warehouse)"
- name: Upload coverage report
uses: actions/upload-artifact@v4
with:
Expand All @@ -145,6 +145,7 @@ jobs:
- integration_test/tracing
- integration_test/backendconfigunavailability
- integration_test/trackedusersreporting
- integration_test/snowpipestreaming
- processor
- regulation-worker
- router
Expand Down Expand Up @@ -186,7 +187,9 @@ jobs:
TEST_KAFKA_AZURE_EVENT_HUBS_CLOUD_CONNECTION_STRING: ${{ secrets.TEST_KAFKA_AZURE_EVENT_HUBS_CLOUD_CONNECTION_STRING }}
TEST_S3_DATALAKE_CREDENTIALS: ${{ secrets.TEST_S3_DATALAKE_CREDENTIALS }}
BIGQUERY_INTEGRATION_TEST_CREDENTIALS: ${{ secrets.BIGQUERY_INTEGRATION_TEST_CREDENTIALS }}
run: make test exclude="${{ matrix.exclude }}" package=${{ matrix.package }}
SNOWPIPE_STREAMING_KEYPAIR_UNENCRYPTED_INTEGRATION_TEST_CREDENTIALS: ${{ secrets.SNOWPIPE_STREAMING_KEYPAIR_UNENCRYPTED_INTEGRATION_TEST_CREDENTIALS }}
SNOWPIPE_STREAMING_KEYPAIR_ENCRYPTED_INTEGRATION_TEST_CREDENTIALS: ${{ secrets.SNOWPIPE_STREAMING_KEYPAIR_ENCRYPTED_INTEGRATION_TEST_CREDENTIALS }}
run: FORCE_RUN_INTEGRATION_TESTS=true make test exclude="${{ matrix.exclude }}" package=${{ matrix.package }}
- name: Sanitize name for Artifact
run: |
name=$(echo -n "${{ matrix.package }}" | sed -e 's/[ \t:\/\\"<>|*?]/-/g' -e 's/--*/-/g')
Expand Down
2 changes: 1 addition & 1 deletion gateway/handle.go
Original file line number Diff line number Diff line change
Expand Up @@ -488,7 +488,7 @@ func (gw *Handle) getJobDataFromRequest(req *webRequestT) (jobData *jobFromReq,
}
receivedAt, ok := userEvent.events[0]["receivedAt"].(string)
if !ok || !arctx.ReplaySource {
receivedAt = time.Now().Format(misc.RFC3339Milli)
receivedAt = gw.now().Format(misc.RFC3339Milli)
}
singularEventBatch := SingularEventBatch{
Batch: userEvent.events,
Expand Down
4 changes: 3 additions & 1 deletion gateway/handle_lifecycle.go
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@ import (
kithttputil "github.com/rudderlabs/rudder-go-kit/httputil"
"github.com/rudderlabs/rudder-go-kit/logger"
"github.com/rudderlabs/rudder-go-kit/stats"

"github.com/rudderlabs/rudder-server/app"
backendconfig "github.com/rudderlabs/rudder-server/backend-config"
"github.com/rudderlabs/rudder-server/gateway/throttler"
Expand All @@ -37,6 +38,7 @@ import (
"github.com/rudderlabs/rudder-server/services/transformer"
"github.com/rudderlabs/rudder-server/utils/crash"
"github.com/rudderlabs/rudder-server/utils/misc"
"github.com/rudderlabs/rudder-server/utils/timeutil"
)

/*
Expand Down Expand Up @@ -115,7 +117,7 @@ func (gw *Handle) Setup(
gw.processRequestTime = gw.stats.NewStat("gateway.process_request_time", stats.TimerType)
gw.emptyAnonIdHeaderStat = gw.stats.NewStat("gateway.empty_anonymous_id_header", stats.CountType)

gw.now = time.Now
gw.now = timeutil.Now
achettyiitr marked this conversation as resolved.
Show resolved Hide resolved
gw.diagnosisTicker = time.NewTicker(gw.conf.diagnosisTickerTime)
gw.userWorkerBatchRequestQ = make(chan *userWorkerBatchRequestT, gw.conf.maxDBBatchSize)
gw.batchUserWorkerBatchRequestQ = make(chan *batchUserWorkerBatchRequestT, gw.conf.maxDBWriterProcess)
Expand Down
1,663 changes: 1,663 additions & 0 deletions integration_test/snowpipestreaming/snowpipestreaming_test.go

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
version: "3.9"

services:
transformer:
image: "rudderstack/develop-rudder-transformer:fix.snowpipe-streaming-users"
ports:
- "9090:9090"
healthcheck:
test: wget --no-verbose --tries=1 --spider http://0.0.0.0:9090/health || exit 1
interval: 1s
retries: 25
3 changes: 3 additions & 0 deletions processor/transformer/transformer.go
Original file line number Diff line number Diff line change
Expand Up @@ -539,6 +539,9 @@ func (trans *handle) destTransformURL(destType string) string {
return destinationEndPoint + "?" + whSchemaVersionQueryParam
}
}
if destType == warehouseutils.SnowpipeStreaming {
return fmt.Sprintf("%s?whSchemaVersion=%s&whIDResolve=%t", destinationEndPoint, trans.conf.GetString("Warehouse.schemaVersion", "v1"), warehouseutils.IDResolutionEnabled())
}
return destinationEndPoint
}

Expand Down
2 changes: 1 addition & 1 deletion router/batchrouter/asyncdestinationmanager/common/utils.go
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@ package common
import "slices"

var (
asyncDestinations = []string{"MARKETO_BULK_UPLOAD", "BINGADS_AUDIENCE", "ELOQUA", "YANDEX_METRICA_OFFLINE_EVENTS", "BINGADS_OFFLINE_CONVERSIONS", "KLAVIYO_BULK_UPLOAD", "LYTICS_BULK_UPLOAD"}
asyncDestinations = []string{"MARKETO_BULK_UPLOAD", "BINGADS_AUDIENCE", "ELOQUA", "YANDEX_METRICA_OFFLINE_EVENTS", "BINGADS_OFFLINE_CONVERSIONS", "KLAVIYO_BULK_UPLOAD", "LYTICS_BULK_UPLOAD", "SNOWPIPE_STREAMING"}
achettyiitr marked this conversation as resolved.
Show resolved Hide resolved
sftpDestinations = []string{"SFTP"}
)

Expand Down
3 changes: 3 additions & 0 deletions router/batchrouter/asyncdestinationmanager/manager.go
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@ import (
lyticsBulkUpload "github.com/rudderlabs/rudder-server/router/batchrouter/asyncdestinationmanager/lytics_bulk_upload"
marketobulkupload "github.com/rudderlabs/rudder-server/router/batchrouter/asyncdestinationmanager/marketo-bulk-upload"
"github.com/rudderlabs/rudder-server/router/batchrouter/asyncdestinationmanager/sftp"
"github.com/rudderlabs/rudder-server/router/batchrouter/asyncdestinationmanager/snowpipestreaming"
"github.com/rudderlabs/rudder-server/router/batchrouter/asyncdestinationmanager/yandexmetrica"
)

Expand All @@ -41,6 +42,8 @@ func newRegularManager(
return klaviyobulkupload.NewManager(logger, statsFactory, destination)
case "LYTICS_BULK_UPLOAD":
return lyticsBulkUpload.NewManager(logger, statsFactory, destination)
case "SNOWPIPE_STREAMING":
return snowpipestreaming.New(conf, logger, statsFactory, destination), nil
}
return nil, errors.New("invalid destination type")
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,124 @@
package snowpipestreaming

import (
"context"
"strconv"

"github.com/rudderlabs/rudder-go-kit/logger"
"github.com/rudderlabs/rudder-go-kit/stats"

backendconfig "github.com/rudderlabs/rudder-server/backend-config"
"github.com/rudderlabs/rudder-server/router/batchrouter/asyncdestinationmanager/snowpipestreaming/internal/model"
)

const (
createChannelAPI = "create_channel"
deleteChannelAPI = "delete_channel"
insertAPI = "insert"
statusAPI = "status"
)

func newApiAdapter(
logger logger.Logger,
statsFactory stats.Stats,
api api,
destination *backendconfig.DestinationT,
) api {
return &apiAdapter{
logger: logger,
statsFactory: statsFactory,
api: api,
destination: destination,
}
}

func (a *apiAdapter) defaultTags(apiName string) stats.Tags {
return stats.Tags{
"module": "batch_router",
"workspaceId": a.destination.WorkspaceID,
"destType": a.destination.DestinationDefinition.Name,
"destinationId": a.destination.ID,
"api": apiName,
}
}

func (a *apiAdapter) CreateChannel(ctx context.Context, req *model.CreateChannelRequest) (*model.ChannelResponse, error) {
a.logger.Infon("Creating channel",
logger.NewStringField("rudderIdentifier", req.RudderIdentifier),
logger.NewStringField("partition", req.Partition),
logger.NewStringField("database", req.TableConfig.Database),
logger.NewStringField("namespace", req.TableConfig.Schema),
logger.NewStringField("table", req.TableConfig.Table),
)
achettyiitr marked this conversation as resolved.
Show resolved Hide resolved

tags := a.defaultTags(createChannelAPI)
defer a.recordDuration(tags)()

resp, err := a.api.CreateChannel(ctx, req)
if err != nil {
tags["success"] = "false"
return nil, err
}
tags["success"] = strconv.FormatBool(resp.Success)
tags["code"] = resp.Code
return resp, nil
}
achettyiitr marked this conversation as resolved.
Show resolved Hide resolved

func (a *apiAdapter) DeleteChannel(ctx context.Context, channelID string, sync bool) error {
a.logger.Infon("Deleting channel",
logger.NewStringField("channelId", channelID),
logger.NewBoolField("sync", sync),
)

tags := a.defaultTags(deleteChannelAPI)
defer a.recordDuration(tags)()

err := a.api.DeleteChannel(ctx, channelID, sync)
if err != nil {
tags["success"] = "false"
return err
}
tags["success"] = "true"
return nil
}

func (a *apiAdapter) Insert(ctx context.Context, channelID string, insertRequest *model.InsertRequest) (*model.InsertResponse, error) {
a.logger.Debugn("Inserting data",
logger.NewStringField("channelId", channelID),
logger.NewIntField("rows", int64(len(insertRequest.Rows))),
logger.NewStringField("offset", insertRequest.Offset),
)

tags := a.defaultTags(insertAPI)
defer a.recordDuration(tags)()

resp, err := a.api.Insert(ctx, channelID, insertRequest)
if err != nil {
tags["success"] = "false"
return nil, err
}

Check warning on line 99 in router/batchrouter/asyncdestinationmanager/snowpipestreaming/apiadapter.go

View check run for this annotation

Codecov / codecov/patch

router/batchrouter/asyncdestinationmanager/snowpipestreaming/apiadapter.go#L97-L99

Added lines #L97 - L99 were not covered by tests
tags["success"] = strconv.FormatBool(resp.Success)
tags["code"] = resp.Code
return resp, nil
}

func (a *apiAdapter) GetStatus(ctx context.Context, channelID string) (*model.StatusResponse, error) {
a.logger.Debugn("Getting status",
logger.NewStringField("channelId", channelID),
)

tags := a.defaultTags(statusAPI)
defer a.recordDuration(tags)()

resp, err := a.api.GetStatus(ctx, channelID)
if err != nil {
tags["success"] = "false"
return nil, err
}

Check warning on line 117 in router/batchrouter/asyncdestinationmanager/snowpipestreaming/apiadapter.go

View check run for this annotation

Codecov / codecov/patch

router/batchrouter/asyncdestinationmanager/snowpipestreaming/apiadapter.go#L115-L117

Added lines #L115 - L117 were not covered by tests
tags["success"] = strconv.FormatBool(resp.Success)
return resp, nil
}

func (a *apiAdapter) recordDuration(tags stats.Tags) func() {
return a.statsFactory.NewTaggedStat("snowpipe_streaming_api_response_time", stats.TimerType, tags).RecordDuration()
}
Loading
Loading