Skip to content

Commit

Permalink
Optimize expression range allocs by using a pool (#92)
Browse files Browse the repository at this point in the history
  • Loading branch information
zix99 authored Apr 29, 2023
1 parent 25df2c4 commit 1ee2932
Show file tree
Hide file tree
Showing 6 changed files with 105 additions and 10 deletions.
4 changes: 3 additions & 1 deletion docs/usage/expressions.md
Original file line number Diff line number Diff line change
Expand Up @@ -289,14 +289,16 @@ For example, given the array `[1,2,3]`, and the function

#### @reduce

Syntax: `{@reduce <arr> <reducefunc>}`
Syntax: `{@reduce <arr> <reducefunc> [initial=""]}`

Evaluates `reducefunc` against each element and a memo. `{0}` is the memo, and
`{1}` is the current value.

For example, given the array `[1,2,3]`, and the function
`{@reduce {array} "{sumi {0} {1}}"}`, it will return `6`.

If `initial` is unset, it will use `arr[0]` as the initial value.

#### @filter

Syntax: `{@filter <arr> <filterfunc>}`
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ package stdlib

import (
. "rare/pkg/expressions" //lint:ignore ST1001 Legacy
"rare/pkg/slicepool"
"rare/pkg/stringSplitter"
"strconv"
"strings"
Expand All @@ -14,6 +15,7 @@ type subContext struct {
}

var _ KeyBuilderContext = &subContext{}
var subContextPool = slicepool.NewObjectPool[subContext](5)

func (s *subContext) GetMatch(idx int) string {
if idx < len(s.vals) {
Expand Down Expand Up @@ -111,9 +113,13 @@ func kfArrayMap(args []KeyBuilderStage) (KeyBuilderStage, error) {
}

return func(context KeyBuilderContext) string {
mapperContext := subContext{
mapperContext := subContextPool.Get()
defer subContextPool.Return(mapperContext)

*mapperContext = subContext{
parent: context,
}

return arrayOperator(
args[0](context),
ArraySeparatorString,
Expand All @@ -125,14 +131,18 @@ func kfArrayMap(args []KeyBuilderStage) (KeyBuilderStage, error) {
}, nil
}

// {@reduce <arr> <reducer>}
// {@reduce <arr> <reducer> [initial=""]}
func kfArrayReduce(args []KeyBuilderStage) (KeyBuilderStage, error) {
if len(args) != 2 {
return stageErrArgCount(args, 2)
if !isArgCountBetween(args, 2, 3) {
return stageErrArgRange(args, "2-3")
}

initial := EvalStageIndexOrDefault(args, 2, "")

return func(context KeyBuilderContext) string {
mapperContext := subContext{
mapperContext := subContextPool.Get()
defer subContextPool.Return(mapperContext)
*mapperContext = subContext{
parent: context,
}

Expand All @@ -141,10 +151,17 @@ func kfArrayReduce(args []KeyBuilderStage) (KeyBuilderStage, error) {
Delim: ArraySeparatorString,
}

memo := splitter.Next()
var memo string
if initial == "" {
memo = splitter.Next()
} else {
memo = initial
}

for !splitter.Done() {
memo = mapperContext.Eval(args[1], memo, splitter.Next())
}

return memo
}, nil
}
Expand Down Expand Up @@ -208,7 +225,9 @@ func kfArrayFilter(args []KeyBuilderStage) (KeyBuilderStage, error) {
Delim: ArraySeparatorString,
}

sub := subContext{
sub := subContextPool.Get()
defer subContextPool.Return(sub)
*sub = subContext{
parent: context,
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ func TestArrayReduce(t *testing.T) {
testExpressionErr(
t,
mockContext("0 1 2 5"),
`{@reduce {@split {0} " "} "{sumi {0} {1}}" bla}`,
`{@reduce {@split {0} " "} "{sumi {0} {1}}" bla 2}`,
"<ARGN>",
ErrArgCount,
)
Expand All @@ -130,6 +130,11 @@ func TestArrayReduce(t *testing.T) {
`{@reduce {0} "{sumi {0} {1}}"}`,
"10",
)

// With initial
testExpression(t,
mockContext(expressions.MakeArray("2", "1", "3", "5")),
`{@reduce {0} "{subi {0} {1}}" 0}`, "-11")
}

func TestArraySlice(t *testing.T) {
Expand Down Expand Up @@ -206,7 +211,7 @@ func TestArrayFilter(t *testing.T) {
)
}

// BenchmarkRangeSum-4 3456345 339.7 ns/op 48 B/op 1 allocs/op
// BenchmarkRangeSum-4 4414395 271.9 ns/op 0 B/op 0 allocs/op
func BenchmarkRangeSum(b *testing.B) {
exp := NewStdKeyBuilder()
ctx := mockContext(expressions.MakeArray("1", "1", "3", "5"))
Expand Down
3 changes: 3 additions & 0 deletions pkg/slicepool/intpool.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@ type IntPool struct {
pool []int
}

// Create an slice of integers, and release chunks at a team for read-write.
// If run out of pool space, will create a new pool of `size`
// Use when need lots of small int slices, as will limit total allocs
func NewIntPool(size int) *IntPool {
return &IntPool{
size: size,
Expand Down
43 changes: 43 additions & 0 deletions pkg/slicepool/objpool.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,43 @@
package slicepool

import "sync"

// A thread-safe pool object that can return or receive points to an object
// Technically can accept objects it didn't create, though that's not good as will pollute the size
// operates in non-blocking mode (it will create a new object if it doesn't have one readily available)
type ObjectPool[T any] struct {
pool []*T
m sync.Mutex
}

// Create an object pool of an initial size. May grow later
func NewObjectPool[T any](size int) *ObjectPool[T] {
ret := &ObjectPool[T]{
pool: make([]*T, size),
}
for i := 0; i < size; i++ {
ret.pool[i] = new(T)
}
return ret
}

func (s *ObjectPool[T]) Get() (ret *T) {
s.m.Lock()
defer s.m.Unlock()

if len(s.pool) == 0 {
return new(T)
}

end := len(s.pool) - 1
ret = s.pool[end]
s.pool = s.pool[:end]
return
}

func (s *ObjectPool[T]) Return(obj *T) {
s.m.Lock()
defer s.m.Unlock()

s.pool = append(s.pool, obj)
}
23 changes: 23 additions & 0 deletions pkg/slicepool/objpool_test.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
package slicepool

import (
"testing"

"github.com/stretchr/testify/assert"
)

func TestSimpleObjPool(t *testing.T) {
type testObj struct{}

op := NewObjectPool[testObj](1)
assert.Len(t, op.pool, 1)
v1 := op.Get()
v2 := op.Get()
assert.Len(t, op.pool, 0)
assert.NotNil(t, v1)
assert.NotNil(t, v2)

op.Return(v1)
op.Return(v2)
assert.Len(t, op.pool, 2)
}

0 comments on commit 1ee2932

Please sign in to comment.