Skip to content

Commit

Permalink
updating mark API to align with core library, exposing split_block an…
Browse files Browse the repository at this point in the history
…d join_block through to Swift (#171)

* switches mark to take ScalarValue explicitly, not Value
* split_block and join_block, using u32 to approx usize in API
* shift the Swift marks over to using ScalarValue to match the Rust core API change
  • Loading branch information
heckj authored Jun 11, 2024
1 parent 26ccaf9 commit d12280d
Show file tree
Hide file tree
Showing 7 changed files with 317 additions and 15 deletions.
266 changes: 262 additions & 4 deletions AutomergeUniffi/automerge.swift
Original file line number Diff line number Diff line change
Expand Up @@ -393,6 +393,19 @@ private struct FfiConverterUInt8: FfiConverterPrimitive {
}
}

private struct FfiConverterUInt32: FfiConverterPrimitive {
typealias FfiType = UInt32
typealias SwiftType = UInt32

public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> UInt32 {
try lift(readInt(&buf))
}

public static func write(_ value: SwiftType, into buf: inout [UInt8]) {
writeInt(&buf, lower(value))
}
}

private struct FfiConverterUInt64: FfiConverterPrimitive {
typealias FfiType = UInt64
typealias SwiftType = UInt64
Expand Down Expand Up @@ -554,6 +567,8 @@ public protocol DocProtocol: AnyObject {

func insertObjectInList(obj: ObjId, index: UInt64, objType: ObjType) throws -> ObjId

func joinBlock(obj: ObjId, index: UInt32) throws

func length(obj: ObjId) -> UInt64

func lengthAt(obj: ObjId, heads: [ChangeHash]) -> UInt64
Expand Down Expand Up @@ -600,6 +615,8 @@ public protocol DocProtocol: AnyObject {

func spliceText(obj: ObjId, start: UInt64, delete: Int64, chars: String) throws

func splitBlock(obj: ObjId, index: UInt32) throws -> ObjId

func text(obj: ObjId) throws -> String

func textAt(obj: ObjId, heads: [ChangeHash]) throws -> String
Expand Down Expand Up @@ -1008,6 +1025,16 @@ open class Doc:
})
}

open func joinBlock(obj: ObjId, index: UInt32) throws { try rustCallWithError(FfiConverterTypeDocError.lift) {
uniffi_uniffi_automerge_fn_method_doc_join_block(
self.uniffiClonePointer(),
FfiConverterTypeObjId.lower(obj),
FfiConverterUInt32.lower(index),
$0
)
}
}

open func length(obj: ObjId) -> UInt64 {
try! FfiConverterUInt64.lift(try! rustCall {
uniffi_uniffi_automerge_fn_method_doc_length(
Expand Down Expand Up @@ -1267,6 +1294,17 @@ open class Doc:
}
}

open func splitBlock(obj: ObjId, index: UInt32) throws -> ObjId {
try FfiConverterTypeObjId.lift(rustCallWithError(FfiConverterTypeDocError.lift) {
uniffi_uniffi_automerge_fn_method_doc_split_block(
self.uniffiClonePointer(),
FfiConverterTypeObjId.lower(obj),
FfiConverterUInt32.lower(index),
$0
)
})
}

open func text(obj: ObjId) throws -> String {
try FfiConverterString.lift(rustCallWithError(FfiConverterTypeDocError.lift) {
uniffi_uniffi_automerge_fn_method_doc_text(
Expand Down Expand Up @@ -1630,15 +1668,58 @@ public func FfiConverterTypeKeyValue_lower(_ value: KeyValue) -> RustBuffer {
FfiConverterTypeKeyValue.lower(value)
}

public struct MapValue {
public var value: [String: AmValue]

// Default memberwise initializers are never public by default, so we
// declare one manually.
public init(value: [String: AmValue]) {
self.value = value
}
}

extension MapValue: Equatable, Hashable {
public static func == (lhs: MapValue, rhs: MapValue) -> Bool {
if lhs.value != rhs.value {
return false
}
return true
}

public func hash(into hasher: inout Hasher) {
hasher.combine(value)
}
}

public struct FfiConverterTypeMapValue: FfiConverterRustBuffer {
public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> MapValue {
try MapValue(
value: FfiConverterDictionaryStringTypeAMValue.read(from: &buf)
)
}

public static func write(_ value: MapValue, into buf: inout [UInt8]) {
FfiConverterDictionaryStringTypeAMValue.write(value.value, into: &buf)
}
}

public func FfiConverterTypeMapValue_lift(_ buf: RustBuffer) throws -> MapValue {
try FfiConverterTypeMapValue.lift(buf)
}

public func FfiConverterTypeMapValue_lower(_ value: MapValue) -> RustBuffer {
FfiConverterTypeMapValue.lower(value)
}

public struct Mark {
public var start: UInt64
public var end: UInt64
public var name: String
public var value: Value
public var value: ScalarValue

// Default memberwise initializers are never public by default, so we
// declare one manually.
public init(start: UInt64, end: UInt64, name: String, value: Value) {
public init(start: UInt64, end: UInt64, name: String, value: ScalarValue) {
self.start = start
self.end = end
self.name = name
Expand Down Expand Up @@ -1677,15 +1758,15 @@ public struct FfiConverterTypeMark: FfiConverterRustBuffer {
start: FfiConverterUInt64.read(from: &buf),
end: FfiConverterUInt64.read(from: &buf),
name: FfiConverterString.read(from: &buf),
value: FfiConverterTypeValue.read(from: &buf)
value: FfiConverterTypeScalarValue.read(from: &buf)
)
}

public static func write(_ value: Mark, into buf: inout [UInt8]) {
FfiConverterUInt64.write(value.start, into: &buf)
FfiConverterUInt64.write(value.end, into: &buf)
FfiConverterString.write(value.name, into: &buf)
FfiConverterTypeValue.write(value.value, into: &buf)
FfiConverterTypeScalarValue.write(value.value, into: &buf)
}
}

Expand Down Expand Up @@ -1799,6 +1880,132 @@ public func FfiConverterTypePathElement_lower(_ value: PathElement) -> RustBuffe
FfiConverterTypePathElement.lower(value)
}

public struct TextValue {
public var value: String
public var marks: [Mark]

// Default memberwise initializers are never public by default, so we
// declare one manually.
public init(value: String, marks: [Mark]) {
self.value = value
self.marks = marks
}
}

extension TextValue: Equatable, Hashable {
public static func == (lhs: TextValue, rhs: TextValue) -> Bool {
if lhs.value != rhs.value {
return false
}
if lhs.marks != rhs.marks {
return false
}
return true
}

public func hash(into hasher: inout Hasher) {
hasher.combine(value)
hasher.combine(marks)
}
}

public struct FfiConverterTypeTextValue: FfiConverterRustBuffer {
public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> TextValue {
try TextValue(
value: FfiConverterString.read(from: &buf),
marks: FfiConverterSequenceTypeMark.read(from: &buf)
)
}

public static func write(_ value: TextValue, into buf: inout [UInt8]) {
FfiConverterString.write(value.value, into: &buf)
FfiConverterSequenceTypeMark.write(value.marks, into: &buf)
}
}

public func FfiConverterTypeTextValue_lift(_ buf: RustBuffer) throws -> TextValue {
try FfiConverterTypeTextValue.lift(buf)
}

public func FfiConverterTypeTextValue_lower(_ value: TextValue) -> RustBuffer {
FfiConverterTypeTextValue.lower(value)
}

// Note that we don't yet support `indirect` for enums.
// See https://github.com/mozilla/uniffi-rs/issues/396 for further discussion.

public enum AmValue {
case scalar(
value: ScalarValue
)
case list(
value: [AmValue]
)
case map(
value: MapValue
)
case text(
value: TextValue
)
}

public struct FfiConverterTypeAMValue: FfiConverterRustBuffer {
typealias SwiftType = AmValue

public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> AmValue {
let variant: Int32 = try readInt(&buf)
switch variant {
case 1: return try .scalar(
value: FfiConverterTypeScalarValue.read(from: &buf)
)

case 2: return try .list(
value: FfiConverterSequenceTypeAMValue.read(from: &buf)
)

case 3: return try .map(
value: FfiConverterTypeMapValue.read(from: &buf)
)

case 4: return try .text(
value: FfiConverterTypeTextValue.read(from: &buf)
)

default: throw UniffiInternalError.unexpectedEnumCase
}
}

public static func write(_ value: AmValue, into buf: inout [UInt8]) {
switch value {
case let .scalar(value):
writeInt(&buf, Int32(1))
FfiConverterTypeScalarValue.write(value, into: &buf)

case let .list(value):
writeInt(&buf, Int32(2))
FfiConverterSequenceTypeAMValue.write(value, into: &buf)

case let .map(value):
writeInt(&buf, Int32(3))
FfiConverterTypeMapValue.write(value, into: &buf)

case let .text(value):
writeInt(&buf, Int32(4))
FfiConverterTypeTextValue.write(value, into: &buf)
}
}
}

public func FfiConverterTypeAMValue_lift(_ buf: RustBuffer) throws -> AmValue {
try FfiConverterTypeAMValue.lift(buf)
}

public func FfiConverterTypeAMValue_lower(_ value: AmValue) -> RustBuffer {
FfiConverterTypeAMValue.lower(value)
}

extension AmValue: Equatable, Hashable {}

public enum DecodeSyncStateError {
case Internal(message: String)
}
Expand Down Expand Up @@ -2685,6 +2892,28 @@ private struct FfiConverterSequenceTypePathElement: FfiConverterRustBuffer {
}
}

private struct FfiConverterSequenceTypeAMValue: FfiConverterRustBuffer {
typealias SwiftType = [AmValue]

public static func write(_ value: [AmValue], into buf: inout [UInt8]) {
let len = Int32(value.count)
writeInt(&buf, len)
for item in value {
FfiConverterTypeAMValue.write(item, into: &buf)
}
}

public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [AmValue] {
let len: Int32 = try readInt(&buf)
var seq = [AmValue]()
seq.reserveCapacity(Int(len))
for _ in 0 ..< len {
try seq.append(FfiConverterTypeAMValue.read(from: &buf))
}
return seq
}
}

private struct FfiConverterSequenceTypeScalarValue: FfiConverterRustBuffer {
typealias SwiftType = [ScalarValue]

Expand Down Expand Up @@ -2751,6 +2980,29 @@ private struct FfiConverterSequenceTypeChangeHash: FfiConverterRustBuffer {
}
}

private struct FfiConverterDictionaryStringTypeAMValue: FfiConverterRustBuffer {
public static func write(_ value: [String: AmValue], into buf: inout [UInt8]) {
let len = Int32(value.count)
writeInt(&buf, len)
for (key, value) in value {
FfiConverterString.write(key, into: &buf)
FfiConverterTypeAMValue.write(value, into: &buf)
}
}

public static func read(from buf: inout (data: Data, offset: Data.Index)) throws -> [String: AmValue] {
let len: Int32 = try readInt(&buf)
var dict = [String: AmValue]()
dict.reserveCapacity(Int(len))
for _ in 0 ..< len {
let key = try FfiConverterString.read(from: &buf)
let value = try FfiConverterTypeAMValue.read(from: &buf)
dict[key] = value
}
return dict
}
}

private struct FfiConverterDictionaryStringTypeValue: FfiConverterRustBuffer {
public static func write(_ value: [String: Value], into buf: inout [UInt8]) {
let len = Int32(value.count)
Expand Down Expand Up @@ -3018,6 +3270,9 @@ private var initializationResult: InitializationResult {
if uniffi_uniffi_automerge_checksum_method_doc_insert_object_in_list() != 30538 {
return InitializationResult.apiChecksumMismatch
}
if uniffi_uniffi_automerge_checksum_method_doc_join_block() != 37348 {
return InitializationResult.apiChecksumMismatch
}
if uniffi_uniffi_automerge_checksum_method_doc_length() != 30352 {
return InitializationResult.apiChecksumMismatch
}
Expand Down Expand Up @@ -3087,6 +3342,9 @@ private var initializationResult: InitializationResult {
if uniffi_uniffi_automerge_checksum_method_doc_splice_text() != 20602 {
return InitializationResult.apiChecksumMismatch
}
if uniffi_uniffi_automerge_checksum_method_doc_split_block() != 10956 {
return InitializationResult.apiChecksumMismatch
}
if uniffi_uniffi_automerge_checksum_method_doc_text() != 64716 {
return InitializationResult.apiChecksumMismatch
}
Expand Down
11 changes: 8 additions & 3 deletions Sources/Automerge/Marks.swift
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ public struct Mark: Equatable, Hashable, Sendable {
/// The name of the mark.
public let name: String
/// The value associated with the mark.
public let value: Value
public let value: ScalarValue

/// Creates a new mark.
///
Expand Down Expand Up @@ -53,15 +53,20 @@ public struct Mark: Equatable, Hashable, Sendable {
/// ```swift
/// Int64("🇬🇧".unicodeScalars.count)
/// ```
public init(start: UInt64, end: UInt64, name: String, value: Value) {
public init(start: UInt64, end: UInt64, name: String, value: ScalarValue) {
self.start = start
self.end = end
self.name = name
self.value = value
}

static func fromFfi(_ ffiMark: FfiMark) -> Self {
Self(start: ffiMark.start, end: ffiMark.end, name: ffiMark.name, value: Value.fromFfi(value: ffiMark.value))
Self(
start: ffiMark.start,
end: ffiMark.end,
name: ffiMark.name,
value: ScalarValue.fromFfi(value: ffiMark.value)
)
}
}

Expand Down
Loading

0 comments on commit d12280d

Please sign in to comment.