Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Equinox v4 Updates #175

Merged
merged 2 commits into from
Sep 13, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion src/Propulsion.CosmosStore/Propulsion.CosmosStore.fsproj
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
<ItemGroup>
<PackageReference Include="MinVer" Version="4.2.0" PrivateAssets="All" />

<PackageReference Include="Equinox.CosmosStore" Version="4.0.0-beta.12" />
<PackageReference Include="Equinox.CosmosStore" Version="4.0.0-rc.1" />
<PackageReference Include="FsCodec.SystemTextJson" Version="3.0.0-rc.7.1" />
</ItemGroup>

Expand Down
8 changes: 7 additions & 1 deletion src/Propulsion.CosmosStore/ReaderCheckpoint.fs
Original file line number Diff line number Diff line change
Expand Up @@ -70,11 +70,17 @@ module Fold =
/// We only want to generate a first class event every N minutes, while efficiently writing contingent on the current etag value
/// So, we post-process the events to remove `Updated` events (as opposed to `Checkpointed` ones),
/// knowing that the state already has that Updated event folded into it when we snapshot
#if COSMOSV2 || COSMOSV3
let transmute events state : Events.Event list * Events.Event list =
match events, state with
| [Events.Updated _], state -> [], [toSnapshot state]
| xs, state -> xs, [toSnapshot state]

#else
let transmute events state : Events.Event array * Events.Event array =
match events, state with
| [| Events.Updated _ |], state -> [||], [|toSnapshot state|]
| xs, state -> xs, [|toSnapshot state|]
#endif
let private mkCheckpoint at next pos = { at = at; nextCheckpointDue = next; pos = pos } : Events.Checkpoint
let private mk (at : DateTimeOffset) (interval : TimeSpan) pos : Events.Config * Events.Checkpoint =
let next = at.Add interval
Expand Down
2 changes: 1 addition & 1 deletion src/Propulsion.DynamoStore/Propulsion.DynamoStore.fsproj
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@
<ItemGroup>
<PackageReference Include="MinVer" Version="4.2.0" PrivateAssets="All" />

<PackageReference Include="Equinox.DynamoStore" Version="4.0.0-beta.12" />
<PackageReference Include="Equinox.DynamoStore" Version="4.0.0-rc.1" />
<PackageReference Include="FsCodec.SystemTextJson" Version="3.0.0-rc.7.1" />
</ItemGroup>

Expand Down
2 changes: 1 addition & 1 deletion src/Propulsion.EventStoreDb/EventStoreSource.fs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ module private Impl =
for e in events do
let sn = Propulsion.Streams.StreamName.internalParseSafe e.EventStreamId
if categoryFilter (FsCodec.StreamName.category sn) then
yield sn, toTimelineEvent e |]
yield sn, Equinox.EventStoreDb.ClientCodec.timelineEvent e |]
let private checkpointPos (xs : EventRecord array) =
match Array.tryLast xs with Some e -> int64 e.Position.CommitPosition | None -> -1L
|> Propulsion.Feed.Position.parse
Expand Down
2 changes: 1 addition & 1 deletion src/Propulsion.EventStoreDb/Propulsion.EventStoreDb.fsproj
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
<ItemGroup>
<PackageReference Include="MinVer" Version="4.2.0" PrivateAssets="All" />

<PackageReference Include="Equinox.EventStoreDb" Version="4.0.0-beta.12" />
<PackageReference Include="Equinox.EventStoreDb" Version="4.0.0-rc.1" />
</ItemGroup>

<ItemGroup>
Expand Down
2 changes: 1 addition & 1 deletion src/Propulsion.Feed/FeedReader.fs
Original file line number Diff line number Diff line change
Expand Up @@ -135,7 +135,7 @@ type FeedReader
let streamsCount = batch.items |> Seq.distinctBy ValueTuple.fst |> Seq.length
log.Debug("Page {latency:f0}ms Checkpoint {checkpoint} {eventCount}e {streamCount}s",
readLatency.TotalMilliseconds, batch.checkpoint, c, streamsCount)
let epoch, streamEvents : int64 * Propulsion.Streams.StreamEvent<_> seq = int64 batch.checkpoint, Seq.ofArray batch.items
let epoch, streamEvents : int64 * Propulsion.Streams.Default.StreamEvent seq = int64 batch.checkpoint, Seq.ofArray batch.items
let ingestTimer = Stopwatch.start ()
let! struct (cur, max) = submitBatch { epoch = epoch; checkpoint = commit batch.checkpoint; items = streamEvents; onCompletion = ignore }
stats.UpdateCurMax(ingestTimer.Elapsed, cur, max) }
Expand Down
2 changes: 1 addition & 1 deletion src/Propulsion.Feed/PeriodicSource.fs
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,7 @@ type PeriodicSource
let mutable elapsed = TimeSpan.Zero
for ts, xs in crawl trancheId do
elapsed <- elapsed + ts
let streamEvents : Propulsion.Streams.StreamEvent<_> seq = seq {
let streamEvents : Propulsion.Streams.Default.StreamEvent seq = seq {
for si in xs ->
let i = index
index <- index + 1L
Expand Down
18 changes: 10 additions & 8 deletions src/Propulsion.Kafka/Codec.fs
Original file line number Diff line number Diff line change
Expand Up @@ -64,12 +64,13 @@ module RenderedSpan =
i = span[0].Index
e = span |> Array.map (fun x -> { c = x.EventType; t = x.Timestamp; d = ta x.Data; m = ta x.Meta }) }

let enum (span: RenderedSpan) : StreamEvent<byte[]> seq =
let enum (span: RenderedSpan) : Default.StreamEvent seq =
let streamName = StreamName.internalParseSafe span.s
let inline mkEvent offset (e : RenderedEvent) = FsCodec.Core.TimelineEvent.Create(span.i+int64 offset, e.c, e.d, e.m, timestamp=e.t)
let td (x : byte array) : Default.EventBody = System.ReadOnlyMemory x
let inline mkEvent offset (e : RenderedEvent) = FsCodec.Core.TimelineEvent.Create(span.i+int64 offset, e.c, td e.d, td e.m, timestamp = e.t)
span.e |> Seq.mapi (fun i e -> streamName, mkEvent i e)

let parse (spanJson: string) : StreamEvent<_> seq =
let parse (spanJson: string) : Default.StreamEvent seq =
spanJson |> RenderedSpan.Parse |> enum

// Rendition of Summary Events representing the aggregated state of a Stream at a known point / version
Expand All @@ -89,17 +90,18 @@ type [<NoEquality; NoComparison>] RenderedSummary =
/// Helpers for mapping to/from `Propulsion.Streams` canonical event contract
module RenderedSummary =

let ofStreamEvents (streamName : FsCodec.StreamName) (index : int64) (events : FsCodec.IEventData<byte[]> seq) : RenderedSummary =
let ofStreamEvents (streamName : FsCodec.StreamName) (index : int64) (events : FsCodec.IEventData<Default.EventBody> seq) : RenderedSummary =
let ta (x : Default.EventBody) : byte array = x.ToArray()
{ s = FsCodec.StreamName.toString streamName
i = index
u = [| for x in events -> { c = x.EventType; t = x.Timestamp; d = x.Data; m = x.Meta } |] }
u = [| for x in events -> { c = x.EventType; t = x.Timestamp; d = ta x.Data; m = ta x.Meta } |] }

let ofStreamEvent (streamName : FsCodec.StreamName) (index : int64) (event : FsCodec.IEventData<byte[]>) : RenderedSummary =
let ofStreamEvent (streamName : FsCodec.StreamName) (index : int64) (event : FsCodec.IEventData<Default.EventBody>) : RenderedSummary =
ofStreamEvents streamName index (Seq.singleton event)

let enum (span: RenderedSummary) : StreamEvent<_> seq =
let enum (span: RenderedSummary) : Default.StreamEvent seq =
let streamName = StreamName.internalParseSafe span.s
seq { for e in span.u -> streamName, FsCodec.Core.TimelineEvent.Create(span.i, e.c, e.d, e.m, timestamp=e.t, isUnfold=true) }

let parse (spanJson: string) : StreamEvent<_> seq =
let parse (spanJson: string) : Default.StreamEvent seq =
spanJson |> RenderedSummary.Parse |> enum
8 changes: 4 additions & 4 deletions src/Propulsion.Kafka/Consumers.fs
Original file line number Diff line number Diff line change
Expand Up @@ -287,7 +287,7 @@ module Core =
static member Start<'Outcome>
( log : ILogger, config : KafkaConsumerConfig,
// often implemented via <c>StreamNameSequenceGenerator.KeyValueToStreamEvent</c>
keyValueToStreamEvents : KeyValuePair<string, string> -> StreamEvent<_> seq,
keyValueToStreamEvents : KeyValuePair<string, string> -> Default.StreamEvent seq,
handle : struct (StreamName * StreamSpan<_>) -> Async<struct (Streams.SpanResult * 'Outcome)>, maxDop,
stats : Scheduling.Stats<struct (StreamSpan.Metrics * 'Outcome), struct (StreamSpan.Metrics * exn)>, statsInterval,
?maxSubmissionsPerPartition, ?logExternalState,
Expand Down Expand Up @@ -379,12 +379,12 @@ type StreamNameSequenceGenerator() =
/// - Stores the topic, partition and offset as a <c>ConsumeResultContext</c> in the <c>ITimelineEvent.Context</c>
member x.ConsumeResultToStreamEvent(
// Placeholder category to use for StreamName where key is null and/or does not adhere to standard {category}-{streamId} form
?defaultCategory) : ConsumeResult<string, string> -> StreamEvent<Default.EventBody> seq =
?defaultCategory) : ConsumeResult<string, string> -> Default.StreamEvent seq =
let defaultCategory = defaultArg defaultCategory ""
x.ConsumeResultToStreamEvent(Core.toStreamName defaultCategory)

/// Takes the key and value as extracted from the ConsumeResult, mapping them respectively to the StreamName and ITimelineEvent.Data
member x.KeyValueToStreamEvent(KeyValue (k, v : string), ?eventType, ?defaultCategory) : StreamEvent<Default.EventBody> seq =
member x.KeyValueToStreamEvent(KeyValue (k, v : string), ?eventType, ?defaultCategory) : Default.StreamEvent seq =
let sn = Core.parseMessageKey (defaultArg defaultCategory String.Empty) k
let e = FsCodec.Core.TimelineEvent.Create(x.GenerateIndex sn, defaultArg eventType String.Empty, System.Text.Encoding.UTF8.GetBytes v |> ReadOnlyMemory)
Seq.singleton (sn, e)
Expand All @@ -401,7 +401,7 @@ type StreamsConsumer =
static member Start<'Outcome>
( log : ILogger, config : KafkaConsumerConfig,
// often implemented via <c>StreamNameSequenceGenerator.ConsumeResultToStreamEvent</c> where the incoming message does not have an embedded sequence number
consumeResultToStreamEvents : ConsumeResult<_, _> -> StreamEvent<_> seq,
consumeResultToStreamEvents : ConsumeResult<_, _> -> Default.StreamEvent seq,
// Handler responses:
// - first component: Index at which next processing will proceed (which can trigger discarding of earlier items on that stream)
// - second component: Outcome (can be simply <c>unit</c>), to pass to the <c>stats</c> processor
Expand Down
2 changes: 1 addition & 1 deletion src/Propulsion.MemoryStore/MemoryStoreSource.fs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ type MemoryStoreSource<'F>(log, store : Equinox.MemoryStore.VolatileStore<'F>, c
let mutable prepared = -1L

let enqueueSubmission, awaitSubmissions, tryDequeueSubmission =
let c = Channel.unboundedSr<Ingestion.Batch<Propulsion.Streams.StreamEvent<_> seq>> in let r, w = c.Reader, c.Writer
let c = Channel.unboundedSr<Ingestion.Batch<Propulsion.Streams.Default.StreamEvent seq>> in let r, w = c.Reader, c.Writer
Channel.write w, Channel.awaitRead r, Channel.tryRead r

let handleStoreCommitted struct (categoryName, aggregateId, events : FsCodec.ITimelineEvent<_> []) =
Expand Down
2 changes: 1 addition & 1 deletion src/Propulsion.MemoryStore/Propulsion.MemoryStore.fsproj
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
<PackageReference Include="MinVer" Version="4.2.0" PrivateAssets="All" />

<PackageReference Include="FsCodec.Box" Version="3.0.0-rc.7.1" />
<PackageReference Include="Equinox.MemoryStore" Version="4.0.0-beta.12" />
<PackageReference Include="Equinox.MemoryStore" Version="4.0.0-rc.1" />
</ItemGroup>

<ItemGroup>
Expand Down
4 changes: 2 additions & 2 deletions src/Propulsion/Streams.fs
Original file line number Diff line number Diff line change
Expand Up @@ -996,13 +996,13 @@ module Projector =
type StreamsIngester =

static member Start(log, partitionId, maxRead, submit, statsInterval) =
let submitBatch (items : StreamEvent<_> seq, onCompletion) =
let submitBatch (items : StreamEvent<'F> seq, onCompletion) =
let items = Array.ofSeq items
let streams = items |> Seq.map ValueTuple.fst |> HashSet
let batch : Submission.Batch<_, _> = { source = partitionId; onCompletion = onCompletion; messages = items }
submit batch
struct (streams.Count, items.Length)
Ingestion.Ingester<StreamEvent<_> seq>.Start(log, partitionId, maxRead, submitBatch, statsInterval)
Ingestion.Ingester<StreamEvent<'F> seq>.Start(log, partitionId, maxRead, submitBatch, statsInterval)

type StreamsSubmitter =

Expand Down