Skip to content

Commit

Permalink
Add sentry
Browse files Browse the repository at this point in the history
  • Loading branch information
istreeter committed Jul 17, 2023
1 parent 294a9be commit 0aa590b
Show file tree
Hide file tree
Showing 4 changed files with 27 additions and 6 deletions.
2 changes: 1 addition & 1 deletion modules/core/src/main/resources/reference.conf
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
{
"inMemBatchBytes": 25600000
"cpuParallelismFraction": 0.5
"windows": "5 minutes"
"windowing": "5 minutes"
"spark": {
"taskRetries": 3
"conf": {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ case class Config[+Source, +Sink](
output: Config.Output[Sink],
inMemBatchBytes: Long,
cpuParallelismFraction: BigDecimal,
windows: FiniteDuration,
windowing: FiniteDuration,
spark: Config.Spark,
telemetry: Config.Telemetry,
monitoring: Config.Monitoring
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,10 +8,11 @@
package com.snowplowanalytics.snowplow.lakes

import cats.implicits._
import cats.effect.{Async, Resource}
import cats.effect.{Async, Resource, Sync}
import cats.effect.unsafe.implicits.global
import org.http4s.client.Client
import org.http4s.blaze.client.BlazeClientBuilder
import io.sentry.Sentry

import java.util.UUID

Expand Down Expand Up @@ -43,10 +44,11 @@ object Environment {
sink: SinkConfig => Resource[F, Sink[F]]
): Resource[F, Environment[F]] =
for {
_ <- Resource.eval(enableSentry[F])
resolver <- mkResolver[F](config.iglu)
httpClient <- BlazeClientBuilder[F].withExecutionContext(global.compute).resource
badSink <- sink(config.main.output.bad)
windowing <- Resource.eval(EventProcessingConfig.TimedWindows.build(config.main.windows))
windowing <- Resource.eval(EventProcessingConfig.TimedWindows.build(config.main.windowing))
lakeWriter <- LakeWriter.build[F](config.main.spark, config.main.output.good)
appId <- Resource.eval(Async[F].delay(UUID.randomUUID.toString))
metrics <- Resource.eval(Metrics.build(config.main.monitoring.metrics))
Expand All @@ -64,6 +66,12 @@ object Environment {
windowing = windowing
)

private def enableSentry[F[_]: Sync] = Sync[F].delay {
Sentry.init { options =>
options.setEnableExternalConfiguration(true);
}
}

private def mkResolver[F[_]: Async](resolverConfig: Resolver.ResolverConfig): Resource[F, Resolver[F]] =
Resource.eval {
Resolver
Expand Down
17 changes: 15 additions & 2 deletions project/Dependencies.scala
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ object Dependencies {
val pubsub = "1.123.17"

// Spark
val spark = "3.4.0"
val spark = "3.4.1"
val delta = "2.4.0"
val iceberg = "1.3.0"
val hadoop = "3.3.5"
Expand All @@ -37,13 +37,18 @@ object Dependencies {
// java
val slf4j = "2.0.7"
val azureSdk = "1.9.1"
val sentry = "6.25.2"

// Snowplow
val schemaDdl = "0.18.2"
val badrows = "2.2.0"
val igluClient = "3.0.0"
val tracker = "2.0.0"

// Transitive overrides
val protobuf = "3.19.6"
val snappy = "1.1.10.2"

// tests
val specs2 = "4.20.0"
val catsEffectSpecs2 = "1.5.0"
Expand Down Expand Up @@ -79,6 +84,11 @@ object Dependencies {
// java
val slf4j = "org.slf4j" % "slf4j-simple" % V.slf4j
val azureIdentity = "com.azure" % "azure-identity" % V.azureSdk
val sentry = "io.sentry" % "sentry" % V.sentry

// transitive overrides
val protobuf = "com.google.protobuf" % "protobuf-java" % V.protobuf
val snappy = "org.xerial.snappy" % "snappy-java" % V.snappy

// snowplow: Note jackson-databind 2.14.x is incompatible with Spark
val badrows = "com.snowplowanalytics" %% "snowplow-badrows" % V.badrows
Expand Down Expand Up @@ -112,7 +122,8 @@ object Dependencies {
val kafkaDependencies = Seq(
fs2Kafka,
circeConfig,
circeGeneric
circeGeneric,
snappy
)

val pubsubDependencies = Seq(
Expand All @@ -136,6 +147,8 @@ object Dependencies {
delta,
tracker,
trackerEmit,
sentry,
protobuf,
specs2,
catsEffectSpecs2,
slf4j % Test
Expand Down

0 comments on commit 0aa590b

Please sign in to comment.