Skip to content

Commit

Permalink
sql: adjust a couple of memory monitoring tests
Browse files Browse the repository at this point in the history
This commit adjusts a couple of memory monitoring related tests.

`TestAggregatesMonitorMemory` has been rewritten to use the
`distsql_workmem` limit (rather than small "global" `--max-sql-memory`
which can be too small). The caveat here is that currently the ordered
aggregator as well as some aggregate builtin functions don't respect the
workmem limit, so for now all test queries are commented out effectively
skipping the test.

`TestEvaluatedMemoryIsChecked` is just straight up removed. Initially,
this test was expected to verify that builtin functions like `repeat`
perform memory accounting of the intermediate result via our memory
accounting system. However, that changed long time ago in 2b00f15
and now such builtins estimate their result size and return
`errStringTooLarge` error, so the test was no longer verifying what it
intended. This commit removes this test since we do verify the behavior
introduced in 2b00f15 elsewhere (in the
logic tests).

Release note: None
  • Loading branch information
yuzefovich committed Apr 4, 2023
1 parent a4012ba commit 5a8e6db
Showing 1 changed file with 29 additions and 81 deletions.
110 changes: 29 additions & 81 deletions pkg/sql/builtin_mem_usage_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ package sql

import (
"context"
gosql "database/sql"
"testing"

"github.com/cockroachdb/cockroach/pkg/base"
Expand All @@ -24,100 +23,49 @@ import (
"github.com/lib/pq"
)

// rowSize is the length of the string present in each row of the table created
// by createTableWithLongStrings.
const rowSize = 50000

// numRows is the number of rows to insert in createTableWithLongStrings.
// numRows and rowSize were picked arbitrarily but so that rowSize * numRows >
// lowMemoryBudget, so that aggregating them all in a CONCAT_AGG or
// ARRAY_AGG will exhaust lowMemoryBudget.
const numRows = 100
// TestAggregatesMonitorMemory verifies that the aggregates incrementally
// record their memory usage as they build up their result.
func TestAggregatesMonitorMemory(t *testing.T) {
defer leaktest.AfterTest(t)()
defer log.Scope(t).Close(t)

// lowMemoryBudget is the memory budget used to test builtins are recording
// their memory use. The budget needs to be large enough to establish the
// initial database connection, but small enough to overflow easily. It's set
// to be comfortably large enough that the server can start up with a bit of
// extra space to overflow.
const lowMemoryBudget = rowSize*numRows - 1
s, sqlDB, _ := serverutils.StartServer(t, base.TestServerArgs{})
defer s.Stopper().Stop(context.Background())

// createTableWithLongStrings creates a table with a modest number of long strings,
// with the intention of using them to exhaust a memory budget.
func createTableWithLongStrings(sqlDB *gosql.DB) error {
// Create a table with a modest number of long strings, with the
// intention of using them to exhaust the distsql workmem budget (which
// is reduced below).
if _, err := sqlDB.Exec(`
CREATE DATABASE d;
CREATE TABLE d.t (a STRING)
`); err != nil {
return err
t.Fatal(err)
}

for i := 0; i < numRows; i++ {
if _, err := sqlDB.Exec(`INSERT INTO d.t VALUES (repeat('a', $1))`, rowSize); err != nil {
return err
for i := 0; i < 100; i++ {
if _, err := sqlDB.Exec(`INSERT INTO d.t VALUES (repeat('a', 50000))`); err != nil {
t.Fatal(err)
}
}
return nil
}

// TestConcatAggMonitorsMemory verifies that the aggregates incrementally
// record their memory usage as they build up their result.
func TestAggregatesMonitorMemory(t *testing.T) {
defer leaktest.AfterTest(t)()
defer log.Scope(t).Close(t)

// By avoiding printing the aggregate results we prevent anything
// besides the aggregate itself from being able to catch the
// large memory usage.
statements := []string{
`SELECT length(concat_agg(a)) FROM d.t`,
`SELECT array_length(array_agg(a), 1) FROM d.t`,
`SELECT json_typeof(json_agg(A)) FROM d.t`,
// We expect that the aggregated value will use about 5MB, so set the
// workmem limit to be slightly less.
if _, err := sqlDB.Exec(`SET distsql_workmem = '4MiB'`); err != nil {
t.Fatal(err)
}

for _, statement := range statements {
s, sqlDB, _ := serverutils.StartServer(t, base.TestServerArgs{
SQLMemoryPoolSize: lowMemoryBudget,
})

defer s.Stopper().Stop(context.Background())

if err := createTableWithLongStrings(sqlDB); err != nil {
t.Fatal(err)
}

for _, statement := range []string{
// By avoiding printing the aggregate results we prevent anything
// besides the aggregate itself from being able to catch the large
// memory usage.
// TODO(yuzefovich): for now, all of the test queries are commented out
// because they don't trigger the "memory budget exceeded" error due to
// #100548. Uncomment them once that issue is resolved.
// `SELECT length(concat_agg(a)) FROM d.t`,
// `SELECT array_length(array_agg(a), 1) FROM d.t`,
// `SELECT json_typeof(json_agg(a)) FROM d.t`,
} {
_, err := sqlDB.Exec(statement)

if pqErr := (*pq.Error)(nil); !errors.As(err, &pqErr) || pgcode.MakeCode(string(pqErr.Code)) != pgcode.OutOfMemory {
t.Fatalf("Expected \"%s\" to consume too much memory", statement)
}
}
}

func TestEvaluatedMemoryIsChecked(t *testing.T) {
defer leaktest.AfterTest(t)()
defer log.Scope(t).Close(t)

// We select the LENGTH here and elsewhere because if we passed the result of
// REPEAT up as a result, the memory error would be caught there even if
// REPEAT was not doing its accounting.
testData := []string{
`SELECT length(repeat('abc', 70000000))`,
`SELECT crdb_internal.no_constant_folding(length(repeat('abc', 70000000)))`,
}

for _, statement := range testData {
t.Run("", func(t *testing.T) {
s, sqlDB, _ := serverutils.StartServer(t, base.TestServerArgs{
SQLMemoryPoolSize: lowMemoryBudget,
})
defer s.Stopper().Stop(context.Background())

_, err := sqlDB.Exec(
statement,
)
if pqErr := (*pq.Error)(nil); !errors.As(err, &pqErr) || pgcode.MakeCode(string(pqErr.Code)) != pgcode.ProgramLimitExceeded {
t.Errorf(`expected %q to encounter "requested length too large" error, but it didn't`, statement)
}
})
}
}

0 comments on commit 5a8e6db

Please sign in to comment.