Skip to content

Commit

Permalink
build(flux): update flux to v0.171.0 (#23453)
Browse files Browse the repository at this point in the history
* build(flux): update flux to v0.171.0

* chore: remove testing.loadStorage from tests

Also update skip lists in Flux test harness

* chore: remove now redundant Go end-to-end unit tests

This testing is all now provided by the `fluxtest` harness.
  • Loading branch information
Christopher M. Wolff authored Jun 14, 2022
1 parent 53580ea commit a492993
Show file tree
Hide file tree
Showing 11 changed files with 111 additions and 534 deletions.
92 changes: 86 additions & 6 deletions etc/test-flux.sh
Original file line number Diff line number Diff line change
Expand Up @@ -31,19 +31,99 @@ build_test_harness() {
"$GO" build -o fluxtest ./internal/cmd/fluxtest-harness-influxdb
}

# Many tests targeting 3rd party databases are not yet supported in CI and should be filtered out.
DB_INTEGRATION_WRITE_TESTS=integration_mqtt_pub,integration_sqlite_write_to,integration_vertica_write_to,integration_mssql_write_to,integration_mysql_write_to,integration_mariadb_write_to,integration_pg_write_to,integration_hdb_write_to
DB_INTEGRATION_READ_TESTS=integration_sqlite_read_from_seed,integration_sqlite_read_from_nonseed,integration_vertica_read_from_seed,integration_vertica_read_from_nonseed,integration_mssql_read_from_seed,integration_mssql_read_from_nonseed,integration_mariadb_read_from_seed,integration_mariadb_read_from_nonseed,integration_mysql_read_from_seed,integration_mysql_read_from_nonseed,integration_pg_read_from_seed,integration_pg_read_from_nonseed,integration_hdb_read_from_seed,integration_hdb_read_from_nonseed
DB_INTEGRATION_INJECTION_TESTS=integration_sqlite_injection,integration_hdb_injection,integration_pg_injection,integration_mysql_injection,integration_mariadb_injection,integration_mssql_injection
DB_TESTS="${DB_INTEGRATION_WRITE_TESTS},${DB_INTEGRATION_READ_TESTS},${DB_INTEGRATION_INJECTION_TESTS}"
skipped_tests() {
doc=$(cat <<ENDSKIPS
# Integration write tests
integration_mqtt_pub
integration_sqlite_write_to
integration_vertica_write_to
integration_mssql_write_to
integration_mysql_write_to
integration_mariadb_write_to
integration_pg_write_to
integration_hdb_write_to
# Integration read tests
integration_sqlite_read_from_seed
integration_sqlite_read_from_nonseed
integration_vertica_read_from_seed
integration_vertica_read_from_nonseed
integration_mssql_read_from_seed
integration_mssql_read_from_nonseed
integration_mariadb_read_from_seed
integration_mariadb_read_from_nonseed
integration_mysql_read_from_seed
integration_mysql_read_from_nonseed
integration_pg_read_from_seed
integration_pg_read_from_nonseed
integration_hdb_read_from_seed
integration_hdb_read_from_nonseed
# Integration injection tests
integration_sqlite_injection
integration_hdb_injection
integration_pg_injection
integration_mysql_injection
integration_mariadb_injection
integration_mssql_injection
# Other skipped tests
buckets # unbounded
columns # failing with differences
cov # unbounded
covariance # failing with differences
cumulative_sum # failing with differences
cumulative_sum_default # failing with differences
cumulative_sum_noop # failing with differences
difference_columns # failing with differences
distinct # failing with differences
fill # failing with differences
first # unbounded
group # unbounded
highestAverage # unbounded
highestMax # unbounded
histogram # unbounded
histogram_quantile # failing with differences
histogram_quantile_minvalue # failing with error
join # unbounded
join_missing_on_col # unbounded
join_panic # unbounded
key_values # unbounded
key_values_host_name # unbounded
keys # failing with differences
last # unbounded
lowestAverage # failing with differences
map # unbounded
max # unbounded
min # unbounded
pivot_mean # failing with differences
sample # unbounded
secrets # failing with error
selector_preserve_time # failing with differences
set # failing with differences
shapeData # failing with differences
shapeDataWithFilter # failing with differences
shift # unbounded
shift_negative_duration # unbounded
state_changes_big_any_to_any # unbounded
state_changes_big_info_to_ok # unbounded
state_changes_big_ok_to_info # unbounded
union # unbounded
union_heterogeneous # unbounded
unique # unbounded
window_null # failing with differences
ENDSKIPS
)
echo "$doc" | sed '/^[[:space:]]*$/d' | sed 's/[[:space:]]*#.*$//' | tr '\n' ',' | sed 's/,$//'
}

run_integration_tests() {
log "Running integration tests..."
./fluxtest \
-v \
-p flux.zip \
-p query/ \
--skip "${DB_TESTS}"
--skip "$(skipped_tests)"
}

cleanup() {
Expand Down
2 changes: 1 addition & 1 deletion go.mod
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ require (
github.com/google/go-jsonnet v0.17.0
github.com/hashicorp/vault/api v1.0.2
github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe
github.com/influxdata/flux v0.169.0
github.com/influxdata/flux v0.171.0
github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69
github.com/influxdata/influx-cli/v2 v2.2.1-0.20220318222112-88ba3464cd07
github.com/influxdata/influxql v1.1.1-0.20211004132434-7e7d61973256
Expand Down
4 changes: 2 additions & 2 deletions go.sum
Original file line number Diff line number Diff line change
Expand Up @@ -497,8 +497,8 @@ github.com/inconshreveable/mousetrap v1.0.0 h1:Z8tu5sraLXCXIcARxBp/8cbvlwVa7Z1NH
github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8=
github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe h1:7j4SdN/BvQwN6WoUq7mv0kg5U9NhnFBxPGMafYRKym0=
github.com/influxdata/cron v0.0.0-20201006132531-4bb0a200dcbe/go.mod h1:XabtPPW2qsCg0tl+kjaPU+cFS+CjQXEXbT1VJvHT4og=
github.com/influxdata/flux v0.169.0 h1:jyrWWkmsqsbETryyal1RDX2ig+PWEC63rFbAqaHMNXM=
github.com/influxdata/flux v0.169.0/go.mod h1:eNApXyjdyUdCNs6LxUQRBHxjUVqK1XrJrlMPhIQSQpA=
github.com/influxdata/flux v0.171.0 h1:9s0MA0bGXPRmzeAvZPYl1412qYSdeTNQb1cgW83nu2M=
github.com/influxdata/flux v0.171.0/go.mod h1:fNtcZ8tqtVDjwWYcPRvCdlY5t3n+NYCc5xunKCmigQA=
github.com/influxdata/gosnowflake v1.6.9 h1:BhE39Mmh8bC+Rvd4QQsP2gHypfeYIH1wqW1AjGWxxrE=
github.com/influxdata/gosnowflake v1.6.9/go.mod h1:9W/BvCXOKx2gJtQ+jdi1Vudev9t9/UDOEHnlJZ/y1nU=
github.com/influxdata/httprouter v1.3.1-0.20191122104820-ee83e2772f69 h1:WQsmW0fXO4ZE/lFGIE84G6rIV5SJN3P3sjIXAP1a8eU=
Expand Down
6 changes: 0 additions & 6 deletions internal/cmd/fluxtest-harness-influxdb/test.go
Original file line number Diff line number Diff line change
Expand Up @@ -201,9 +201,6 @@ const writeOptSource = `
import "testing"
import c "csv"
option testing.loadStorage = (csv) => {
return c.from(csv: csv) |> to(bucket: bucket, org: org)
}
option testing.load = (tables=<-) => {
return tables |> to(bucket: bucket, org: org)
}
Expand All @@ -216,9 +213,6 @@ const readOptSource = `
import "testing"
import c "csv"
option testing.loadStorage = (csv) => {
return from(bucket: bucket)
}
option testing.load = (tables=<-) => {
return from(bucket: bucket)
}
Expand Down
3 changes: 2 additions & 1 deletion query/stdlib/influxdata/influxdb/filter_test.flux
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,8 @@ testcase filter {
,,0,2018-05-22T19:53:36Z,system,host.local,load1,1.63
")

got = testing.loadStorage(csv: input)
got = csv.from(csv: input)
|> testing.load()
|> range(start: -100y)
|> filter(fn: (r) => r._measurement == "system" and r._field == "load1")
|> drop(columns: ["_start", "_stop"])
Expand Down
18 changes: 12 additions & 6 deletions query/stdlib/influxdata/influxdb/multi_measure_test.flux
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,8 @@ input = "
"

testcase multi_measure {
got = testing.loadStorage(csv: input)
got = csv.from(csv: input)
|> testing.load()
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|> filter(fn: (r) => r["_measurement"] == "system" or r["_measurement"] == "sys")
|> filter(fn: (r) => r["_field"] == "load1" or r["_field"] == "load3")
Expand Down Expand Up @@ -83,7 +84,8 @@ testcase multi_measure {
}

testcase multi_measure_match_all {
got = testing.loadStorage(csv: input)
got = csv.from(csv: input)
|> testing.load()
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|> filter(fn: (r) => r["_measurement"] == "system" or r["_measurement"] == "sys" or r["_measurement"] == "var" or r["_measurement"] == "swap")
|> filter(fn: (r) => r["_field"] == "load1" or r["_field"] == "load3" or r["_field"] == "load5" or r["_field"] == "used_percent")
Expand Down Expand Up @@ -129,7 +131,8 @@ testcase multi_measure_match_all {
}

testcase multi_measure_tag_filter {
got = testing.loadStorage(csv: input)
got = csv.from(csv: input)
|> testing.load()
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|> filter(fn: (r) => r["_measurement"] == "system" or r["_measurement"] == "swap")
|> filter(fn: (r) => r["_field"] == "load1" or r["_field"] == "load3" or r["_field"] == "used_percent")
Expand Down Expand Up @@ -158,7 +161,8 @@ testcase multi_measure_tag_filter {
}

testcase multi_measure_complex_or {
got = testing.loadStorage(csv: input)
got = csv.from(csv: input)
|> testing.load()
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|> filter(fn: (r) => (r["_measurement"] == "system" or r["_measurement"] == "swap") or (r["_measurement"] != "var" and r["host"] == "host.local"))
|> drop(columns: ["_start", "_stop"])
Expand Down Expand Up @@ -197,7 +201,8 @@ testcase multi_measure_complex_or {
}

testcase multi_measure_complex_and {
got = testing.loadStorage(csv: input)
got = csv.from(csv: input)
|> testing.load()
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|> filter(fn: (r) => r["_measurement"] != "system" or r["_measurement"] == "swap")
|> filter(fn: (r) => r["_measurement"] == "swap" or r["_measurement"] == "var")
Expand Down Expand Up @@ -225,7 +230,8 @@ testcase multi_measure_complex_and {
}

testcase multi_measure_negation {
got = testing.loadStorage(csv: input)
got = csv.from(csv: input)
|> testing.load()
|> range(start: 2018-01-01T00:00:00Z, stop: 2019-01-01T00:00:00Z)
|> filter(fn: (r) => r["_measurement"] != "system")
|> filter(fn: (r) => r["host"] == "host.local" or not exists r["host"])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,8 @@ input = "
"

testcase tag_values_measurement_or_predicate {
got = testing.loadStorage(csv: input)
got = csv.from(csv: input)
|> testing.load()
|> range(start: -100y)
|> filter(fn: (r) => r["_measurement"] == "cpu")
|> filter(fn: (r) => r["_measurement"] == "someOtherThing" or r["host"] == "euterpe.local")
Expand All @@ -86,7 +87,8 @@ testcase tag_values_measurement_or_predicate {
}

testcase tag_values_measurement_or_negation {
got = testing.loadStorage(csv: input)
got = csv.from(csv: input)
|> testing.load()
|> range(start: -100y)
|> filter(fn: (r) => r["_measurement"] != "cpu")
|> filter(fn: (r) => r["_measurement"] == "someOtherThing" or r["fstype"] != "apfs")
Expand All @@ -108,7 +110,8 @@ testcase tag_values_measurement_or_negation {
}

testcase tag_values_measurement_or_regex {
got = testing.loadStorage(csv: input)
got = csv.from(csv: input)
|> testing.load()
|> range(start: -100y)
|> filter(fn: (r) => r["_measurement"] =~ /cp.*/)
|> filter(fn: (r) => r["_measurement"] == "someOtherThing" or r["host"] !~ /mnemo.*/)
Expand Down
1 change: 0 additions & 1 deletion query/stdlib/packages.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,5 +4,4 @@ package stdlib
import (
_ "github.com/influxdata/influxdb/v2/query/stdlib/influxdata/influxdb"
_ "github.com/influxdata/influxdb/v2/query/stdlib/influxdata/influxdb/v1"
_ "github.com/influxdata/influxdb/v2/query/stdlib/testing"
)
Loading

0 comments on commit a492993

Please sign in to comment.