Skip to content

Commit

Permalink
Merge pull request #21 from jemc-savi/update/latest-savi
Browse files Browse the repository at this point in the history
Update for latest Savi syntax.
  • Loading branch information
jemc authored Jun 17, 2023
2 parents e081dd9 + fb9b57d commit 7c75596
Show file tree
Hide file tree
Showing 6 changed files with 14 additions and 10 deletions.
2 changes: 1 addition & 1 deletion src/Tensor.Gen.Random.savi
Original file line number Diff line number Diff line change
Expand Up @@ -119,7 +119,7 @@
g.add(name
@_base_counter
g.const("\(name).const"
if (@algorithm == Tensor.Gen.Random.Algorithm.Philox) (
if @algorithm == Tensor.Gen.Random.Algorithm.Philox (
Tensor(U64).from_array([0, offset])
|
Tensor(U64).from_array([offset])
Expand Down
4 changes: 2 additions & 2 deletions src/Tensor.Graph.Operation.savi
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@
:fun ref add_input_list(can_outputs Array(Tensor.Graph.CanOutput)'box)
return @ if @_ptr.is_null
outputs = Array(_FFI.Output).new(can_outputs.size)
can_outputs.each -> (can_output | try (outputs << can_output.output._to_ffi!))
can_outputs.each -> (can_output | try outputs << can_output.output._to_ffi!)
@_ffi.add_input_list(@_ptr, outputs.cpointer, outputs.size.i32)
@

Expand Down Expand Up @@ -88,7 +88,7 @@
return @ if @_ptr.is_null

dimensions_i64_ptr =
if (USize.byte_width == I64.byte_width) (
if USize.byte_width == I64.byte_width (
_FFI.Cast(CPointer(USize), CPointer(I64)).pointer(dimensions.cpointer)
|
dimensions_i64 Array(I64) = []
Expand Down
2 changes: 1 addition & 1 deletion src/Tensor.Graph.Output.savi
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
)

shape = Array(I64).new(rank)
while (shape.size < rank) (shape << I64.zero)
while shape.size < rank shape << I64.zero
// TODO: make the above process more efficient - use the
// `resize_possibly_including_uninitialized_memory` method on `Bytes`
// for inspiration, but generalize to `Array`.
Expand Down
4 changes: 2 additions & 2 deletions src/Tensor.Graph.savi
Original file line number Diff line number Diff line change
Expand Up @@ -39,12 +39,12 @@
y_ffis = Array(_FFI.Output).new(y_list.size)
dy_ffis = Array(_FFI.Output).new(y_list.size)
y_list.each -> (y |
try (y_ffis << y.output._to_ffi!)
try y_ffis << y.output._to_ffi!
dy_ffis << _FFI.Output._new_temporarily_null
)

x_ffis = Array(_FFI.Output).new(x_list.size)
x_list.each -> (x | try (x_ffis << x.output._to_ffi!))
x_list.each -> (x | try x_ffis << x.output._to_ffi!)

@_ffi.add_gradients(
@_ptr
Expand Down
4 changes: 3 additions & 1 deletion src/Tensor.Shape.savi
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,9 @@

out << "["
array.each_with_index -> (dimension, i |
if (i > 0) (out << ", ")
if i > 0 (
out << ", "
)
dimension.into_string(out)
)
out << "]"
Expand Down
8 changes: 5 additions & 3 deletions src/Tensor.savi
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,7 @@
// TODO: Do this without the intermediate array?
elements Array(T) = []
total_element_count.times -> (index |
elements << yield index
elements << (yield index)
)
@from_array(elements)

Expand All @@ -84,7 +84,7 @@
shape.each -> (dimension |
element_count = element_count * dimension.usize
)
error! unless (element_count == @element_count)
error! unless element_count == @element_count

@_ffi.set_shape(
@_ptr
Expand All @@ -98,7 +98,9 @@

size = @element_count.usize
data.reserve(orig_size + size)
while (data.size < size) (data << T.zero)
while data.size < size (
data << T.zero
)
// TODO: make the above process more efficient - use the
// `resize_possibly_including_uninitialized_memory` method on `Bytes`
// for inspiration, but generalize to `Array`.
Expand Down

0 comments on commit 7c75596

Please sign in to comment.