From fea02f3c5dfbab1a427c818861e4bf3bcc2ebd4f Mon Sep 17 00:00:00 2001 From: Jan Nidzwetzki Date: Mon, 8 Jan 2024 13:21:16 +0100 Subject: [PATCH] Add plan-time chunk exclusion for real-time CAggs The watermark function for CAggs is declared as STABLE since the value of the function changes after every CAgg refresh. The function volatility prevents the planner from replacing the function invocation with a constant value and executing plan time chunk exclusion. This leads to high planning times on hypertables with many chunks. This PR replaces the function invocation with a constant value to allow plan time exclusion of chunks. We perform the replacement at plan time instead of changing the function volatility to IMMUTABLE, because we want to control the constification. Only queries that access the underlying hypertable in a query (i.e., no queries like SELECT cagg_watermark(...) without any FROM condition) are rewritten. This is done to make sure that the query is properly invalidated when the underlying table changes (e.g., the watermark is updated) and the query is replanned on the subsequent execution. Fixes: #6105, #6321 Co-authored-by: Fabrizio de Royes Mello --- .unreleased/enhancement_6325 | 1 + src/cross_module_fn.c | 7 + src/cross_module_fn.h | 1 + src/guc.c | 12 + src/guc.h | 1 + src/planner/planner.c | 3 + src/ts_catalog/continuous_aggs_watermark.c | 41 +- src/ts_catalog/continuous_aggs_watermark.h | 2 + tsl/src/continuous_aggs/CMakeLists.txt | 1 + tsl/src/continuous_aggs/common.c | 26 +- tsl/src/continuous_aggs/common.h | 2 + tsl/src/continuous_aggs/planner.c | 344 +++++ tsl/src/continuous_aggs/planner.h | 14 + tsl/src/init.c | 1 + tsl/src/planner.c | 16 + tsl/src/planner.h | 1 + tsl/test/expected/cagg_query-14.out | 821 ----------- tsl/test/expected/cagg_query-15.out | 849 ----------- tsl/test/expected/cagg_query-16.out | 849 ----------- .../{cagg_query-13.out => cagg_query.out} | 538 +++---- tsl/test/expected/cagg_watermark-13.out | 1271 ++++++++++++++++ tsl/test/expected/cagg_watermark-14.out | 1271 ++++++++++++++++ tsl/test/expected/cagg_watermark-15.out | 1272 +++++++++++++++++ tsl/test/expected/cagg_watermark-16.out | 1272 +++++++++++++++++ tsl/test/expected/cagg_watermark.out | 383 ----- tsl/test/expected/continuous_aggs-13.out | 124 +- tsl/test/expected/continuous_aggs-14.out | 124 +- tsl/test/expected/continuous_aggs-15.out | 113 +- tsl/test/expected/continuous_aggs-16.out | 113 +- tsl/test/expected/jit-13.out | 36 +- tsl/test/expected/jit-14.out | 36 +- tsl/test/expected/jit-15.out | 40 +- tsl/test/expected/jit-16.out | 40 +- .../cagg_watermark_concurrent_update.out | 230 +++ .../cagg_watermark_concurrent_update_1.out | 226 +++ tsl/test/isolation/specs/CMakeLists.txt | 1 + .../cagg_watermark_concurrent_update.spec | 85 ++ tsl/test/sql/.gitignore | 2 +- tsl/test/sql/CMakeLists.txt | 4 +- .../sql/{cagg_query.sql.in => cagg_query.sql} | 0 tsl/test/sql/cagg_watermark.sql | 201 --- tsl/test/sql/cagg_watermark.sql.in | 456 ++++++ 42 files changed, 7044 insertions(+), 3786 deletions(-) create mode 100644 .unreleased/enhancement_6325 create mode 100644 tsl/src/continuous_aggs/planner.c create mode 100644 tsl/src/continuous_aggs/planner.h delete mode 100644 tsl/test/expected/cagg_query-14.out delete mode 100644 tsl/test/expected/cagg_query-15.out delete mode 100644 tsl/test/expected/cagg_query-16.out rename tsl/test/expected/{cagg_query-13.out => cagg_query.out} (51%) create mode 100644 tsl/test/expected/cagg_watermark-13.out create mode 100644 tsl/test/expected/cagg_watermark-14.out create mode 100644 tsl/test/expected/cagg_watermark-15.out create mode 100644 tsl/test/expected/cagg_watermark-16.out delete mode 100644 tsl/test/expected/cagg_watermark.out create mode 100644 tsl/test/isolation/expected/cagg_watermark_concurrent_update.out create mode 100644 tsl/test/isolation/expected/cagg_watermark_concurrent_update_1.out create mode 100644 tsl/test/isolation/specs/cagg_watermark_concurrent_update.spec rename tsl/test/sql/{cagg_query.sql.in => cagg_query.sql} (100%) delete mode 100644 tsl/test/sql/cagg_watermark.sql create mode 100644 tsl/test/sql/cagg_watermark.sql.in diff --git a/.unreleased/enhancement_6325 b/.unreleased/enhancement_6325 new file mode 100644 index 00000000000..1139c6d232a --- /dev/null +++ b/.unreleased/enhancement_6325 @@ -0,0 +1 @@ +Implements: #6325 Add plan-time chunk exclusion for real-time CAggs diff --git a/src/cross_module_fn.c b/src/cross_module_fn.c index 1230ae50600..0f7342b081f 100644 --- a/src/cross_module_fn.c +++ b/src/cross_module_fn.c @@ -264,6 +264,12 @@ ts_tsl_loaded(PG_FUNCTION_ARGS) PG_RETURN_BOOL(ts_cm_functions != &ts_cm_functions_default); } +static void +preprocess_query_tsl_default_fn_community(Query *parse) +{ + /* No op in community licensed code */ +} + /* * Define cross-module functions' default values: * If the submodule isn't activated, using one of the cm functions will throw an @@ -369,6 +375,7 @@ TSDLLEXPORT CrossModuleFunctions ts_cm_functions_default = { .chunk_create_empty_table = error_no_default_fn_pg_community, .recompress_chunk_segmentwise = error_no_default_fn_pg_community, .get_compressed_chunk_index_for_recompression = error_no_default_fn_pg_community, + .preprocess_query_tsl = preprocess_query_tsl_default_fn_community, }; TSDLLEXPORT CrossModuleFunctions *ts_cm_functions = &ts_cm_functions_default; diff --git a/src/cross_module_fn.h b/src/cross_module_fn.h index a2ef1075fb7..676f024088e 100644 --- a/src/cross_module_fn.h +++ b/src/cross_module_fn.h @@ -155,6 +155,7 @@ typedef struct CrossModuleFunctions PGFunction chunk_unfreeze_chunk; PGFunction recompress_chunk_segmentwise; PGFunction get_compressed_chunk_index_for_recompression; + void (*preprocess_query_tsl)(Query *parse); } CrossModuleFunctions; extern TSDLLEXPORT CrossModuleFunctions *ts_cm_functions; diff --git a/src/guc.c b/src/guc.c index 3e52655facd..2e715a0576f 100644 --- a/src/guc.c +++ b/src/guc.c @@ -61,6 +61,7 @@ bool ts_guc_enable_constraint_exclusion = true; bool ts_guc_enable_qual_propagation = true; bool ts_guc_enable_cagg_reorder_groupby = true; bool ts_guc_enable_now_constify = true; +TSDLLEXPORT bool ts_guc_enable_cagg_watermark_constify = true; bool ts_guc_enable_osm_reads = true; TSDLLEXPORT bool ts_guc_enable_dml_decompression = true; TSDLLEXPORT bool ts_guc_enable_transparent_decompression = true; @@ -408,6 +409,17 @@ _guc_init(void) NULL, NULL); + DefineCustomBoolVariable("timescaledb.enable_cagg_watermark_constify", + "Enable cagg watermark constify", + "Enable constifying cagg watermark for real-time caggs", + &ts_guc_enable_cagg_watermark_constify, + true, + PGC_USERSET, + 0, + NULL, + NULL, + NULL); + DefineCustomBoolVariable("timescaledb.enable_tiered_reads", "Enable tiered data reads", "Enable reading of tiered data by including a foreign table " diff --git a/src/guc.h b/src/guc.h index 27918d15eea..c87b90cb1b4 100644 --- a/src/guc.h +++ b/src/guc.h @@ -25,6 +25,7 @@ extern bool ts_guc_enable_runtime_exclusion; extern bool ts_guc_enable_constraint_exclusion; extern bool ts_guc_enable_cagg_reorder_groupby; extern bool ts_guc_enable_now_constify; +extern TSDLLEXPORT bool ts_guc_enable_cagg_watermark_constify; extern bool ts_guc_enable_osm_reads; extern TSDLLEXPORT bool ts_guc_enable_dml_decompression; extern TSDLLEXPORT bool ts_guc_enable_transparent_decompression; diff --git a/src/planner/planner.c b/src/planner/planner.c index 1c84e8a40f1..93203f5436c 100644 --- a/src/planner/planner.c +++ b/src/planner/planner.c @@ -474,6 +474,9 @@ timescaledb_planner(Query *parse, const char *query_string, int cursor_opts, * Preprocess the hypertables in the query and warm up the caches. */ preprocess_query((Node *) parse, &context); + + if (ts_guc_enable_optimizations) + ts_cm_functions->preprocess_query_tsl(parse); } if (prev_planner_hook != NULL) diff --git a/src/ts_catalog/continuous_aggs_watermark.c b/src/ts_catalog/continuous_aggs_watermark.c index 250ed8f398e..fd5ba96717b 100644 --- a/src/ts_catalog/continuous_aggs_watermark.c +++ b/src/ts_catalog/continuous_aggs_watermark.c @@ -13,8 +13,10 @@ #include #include #include +#include #include +#include "debug_point.h" #include "ts_catalog/continuous_agg.h" #include "ts_catalog/continuous_aggs_watermark.h" #include "hypertable.h" @@ -80,8 +82,8 @@ cagg_watermark_init_scan_by_mat_hypertable_id(ScanIterator *iterator, const int3 Int32GetDatum(mat_hypertable_id)); } -static int64 -cagg_watermark_get(Hypertable *mat_ht) +int64 +ts_cagg_watermark_get(int32 hypertable_id) { PG_USED_FOR_ASSERTS_ONLY short count = 0; Datum watermark = (Datum) 0; @@ -99,7 +101,7 @@ cagg_watermark_get(Hypertable *mat_ht) iterator.ctx.snapshot = GetTransactionSnapshot(); Assert(iterator.ctx.snapshot != NULL); - cagg_watermark_init_scan_by_mat_hypertable_id(&iterator, mat_ht->fd.id); + cagg_watermark_init_scan_by_mat_hypertable_id(&iterator, hypertable_id); ts_scanner_foreach(&iterator) { @@ -114,13 +116,13 @@ cagg_watermark_get(Hypertable *mat_ht) if (value_isnull) ereport(ERROR, (errcode(ERRCODE_INVALID_PARAMETER_VALUE), - errmsg("watermark not defined for continuous aggregate: %d", mat_ht->fd.id))); + errmsg("watermark not defined for continuous aggregate: %d", hypertable_id))); /* Log the read watermark, needed for MVCC tap tests */ ereport(DEBUG5, (errcode(ERRCODE_SUCCESSFUL_COMPLETION), errmsg("watermark for continuous aggregate, '%d' is: " INT64_FORMAT, - mat_ht->fd.id, + hypertable_id, DatumGetInt64(watermark)))); return DatumGetInt64(watermark); @@ -152,7 +154,7 @@ cagg_watermark_create(const ContinuousAgg *cagg, MemoryContext top_mctx) cagg->data.mat_hypertable_id))); /* Get the stored watermark */ - watermark->value = cagg_watermark_get(ht); + watermark->value = ts_cagg_watermark_get(cagg->data.mat_hypertable_id); return watermark; } @@ -357,7 +359,8 @@ cagg_watermark_update_scan_internal(TupleInfo *ti, void *data) } static void -cagg_watermark_update_internal(int32 mat_hypertable_id, int64 new_watermark, bool force_update) +cagg_watermark_update_internal(int32 mat_hypertable_id, Oid ht_relid, int64 new_watermark, + bool force_update, bool invalidate_rel_cache) { bool watermark_updated; ScanKeyData scankey[1]; @@ -384,6 +387,19 @@ cagg_watermark_update_internal(int32 mat_hypertable_id, int64 new_watermark, boo (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("watermark not defined for continuous aggregate: %d", mat_hypertable_id))); } + + /* + * During query planning, the values of the watermark function are constified using the + * constify_cagg_watermark() function. However, this function's value changes when we update the + * Cagg (the volatility of the function is STABLE not IMMUTABLE). To ensure that caches, such as + * the query plan cache, are properly evicted, we send an invalidation message for the + * hypertable. + */ + if (invalidate_rel_cache) + { + DEBUG_WAITPOINT("cagg_watermark_update_internal_before_refresh"); + CacheInvalidateRelcacheByRelid(ht_relid); + } } TSDLLEXPORT void @@ -397,8 +413,17 @@ ts_cagg_watermark_update(Hypertable *mat_ht, int64 watermark, bool watermark_isn (errcode(ERRCODE_INVALID_PARAMETER_VALUE), errmsg("invalid materialized hypertable ID: %d", mat_ht->fd.id))); + /* If we have a real-time CAgg, it uses a watermark function. So, we have to invalidate the rel + * cache to force a replanning of prepared statements. See cagg_watermark_update_internal for + * more information. */ + bool invalidate_rel_cache = !cagg->data.materialized_only; + watermark = cagg_compute_watermark(cagg, watermark, watermark_isnull); - cagg_watermark_update_internal(mat_ht->fd.id, watermark, force_update); + cagg_watermark_update_internal(mat_ht->fd.id, + mat_ht->main_table_relid, + watermark, + force_update, + invalidate_rel_cache); } TSDLLEXPORT void diff --git a/src/ts_catalog/continuous_aggs_watermark.h b/src/ts_catalog/continuous_aggs_watermark.h index 4f2f699699f..79f7933f8e1 100644 --- a/src/ts_catalog/continuous_aggs_watermark.h +++ b/src/ts_catalog/continuous_aggs_watermark.h @@ -15,3 +15,5 @@ extern TSDLLEXPORT void ts_cagg_watermark_insert(Hypertable *mat_ht, int64 water bool watermark_isnull); extern TSDLLEXPORT void ts_cagg_watermark_update(Hypertable *mat_ht, int64 watermark, bool watermark_isnull, bool force_update); + +extern TSDLLEXPORT int64 ts_cagg_watermark_get(int32 hypertable_id); diff --git a/tsl/src/continuous_aggs/CMakeLists.txt b/tsl/src/continuous_aggs/CMakeLists.txt index 11091cdf188..a3f16e7fc16 100644 --- a/tsl/src/continuous_aggs/CMakeLists.txt +++ b/tsl/src/continuous_aggs/CMakeLists.txt @@ -7,6 +7,7 @@ set(SOURCES ${CMAKE_CURRENT_SOURCE_DIR}/invalidation.c ${CMAKE_CURRENT_SOURCE_DIR}/materialize.c ${CMAKE_CURRENT_SOURCE_DIR}/options.c + ${CMAKE_CURRENT_SOURCE_DIR}/planner.c ${CMAKE_CURRENT_SOURCE_DIR}/refresh.c ${CMAKE_CURRENT_SOURCE_DIR}/repair.c ${CMAKE_CURRENT_SOURCE_DIR}/utils.c) diff --git a/tsl/src/continuous_aggs/common.c b/tsl/src/continuous_aggs/common.c index 7c06fbb6121..2b4fa158522 100644 --- a/tsl/src/continuous_aggs/common.c +++ b/tsl/src/continuous_aggs/common.c @@ -15,8 +15,7 @@ static void caggtimebucketinfo_init(CAggTimebucketInfo *src, int32 hypertable_id static void caggtimebucket_validate(CAggTimebucketInfo *tbinfo, List *groupClause, List *targetList, bool is_cagg_create); static bool cagg_query_supported(const Query *query, StringInfo hint, StringInfo detail, - bool finalized); -static Oid cagg_get_boundary_converter_funcoid(Oid typoid); + const bool finalized); static FuncExpr *build_conversion_call(Oid type, FuncExpr *boundary); static FuncExpr *build_boundary_call(int32 ht_id, Oid type); static Const *cagg_boundary_make_lower_bound(Oid type); @@ -985,7 +984,7 @@ cagg_validate_query(const Query *query, const bool finalized, const char *cagg_s * Get oid of function to convert from our internal representation * to postgres representation. */ -static Oid +Oid cagg_get_boundary_converter_funcoid(Oid typoid) { char *function_name; @@ -1079,20 +1078,31 @@ build_conversion_call(Oid type, FuncExpr *boundary) } /* - * Build function call that returns boundary for a hypertable - * wrapped in type conversion calls when required. + * Return the Oid of the cagg_watermark function */ -static FuncExpr * -build_boundary_call(int32 ht_id, Oid type) +Oid +get_watermark_function_oid(void) { Oid argtyp[] = { INT4OID }; - FuncExpr *boundary; Oid boundary_func_oid = LookupFuncName(list_make2(makeString(FUNCTIONS_SCHEMA_NAME), makeString(BOUNDARY_FUNCTION)), lengthof(argtyp), argtyp, false); + + return boundary_func_oid; +} + +/* + * Build function call that returns boundary for a hypertable + * wrapped in type conversion calls when required. + */ +static FuncExpr * +build_boundary_call(int32 ht_id, Oid type) +{ + FuncExpr *boundary; + Oid boundary_func_oid = get_watermark_function_oid(); List *func_args = list_make1(makeConst(INT4OID, -1, InvalidOid, 4, Int32GetDatum(ht_id), false, true)); diff --git a/tsl/src/continuous_aggs/common.h b/tsl/src/continuous_aggs/common.h index e78064764ec..bbc66e77bf7 100644 --- a/tsl/src/continuous_aggs/common.h +++ b/tsl/src/continuous_aggs/common.h @@ -109,3 +109,5 @@ extern Query *build_union_query(CAggTimebucketInfo *tbinfo, int matpartcolno, Qu extern void mattablecolumninfo_init(MatTableColumnInfo *matcolinfo, List *grouplist); extern void mattablecolumninfo_addinternal(MatTableColumnInfo *matcolinfo); extern bool function_allowed_in_cagg_definition(Oid funcid); +extern Oid get_watermark_function_oid(void); +extern Oid cagg_get_boundary_converter_funcoid(Oid typoid); diff --git a/tsl/src/continuous_aggs/planner.c b/tsl/src/continuous_aggs/planner.c new file mode 100644 index 00000000000..affb666dbe2 --- /dev/null +++ b/tsl/src/continuous_aggs/planner.c @@ -0,0 +1,344 @@ +/* + * This file and its contents are licensed under the Timescale License. + * Please see the included NOTICE for copyright information and + * LICENSE-TIMESCALE for a copy of the license. + */ +#include + +#include +#include +#include +#include +#include + +#include "continuous_aggs/common.h" +#include "planner.h" +#include "ts_catalog/continuous_aggs_watermark.h" + +/* A Cagg query has always the following structure: + * 1. a coalesce expression + * 2. an optional to timestamp conversion function (date, timestamp, timestamptz) + * 3. the actual watermark function + * + * We collect (1) and (2) in the following data structure + */ +typedef struct +{ + CoalesceExpr *parent_coalesce_expr; // see (1) + FuncExpr *parent_to_timestamp_func; // see (2) + Oid watermark_function_oid; // Oid of the watermark function + List *to_timestamp_func_oids; // List of Oids of the timestamp conversion functions + List *watermark_parent_functions; // List of parent functions of a watermark + List *watermark_functions; // List of watermark functions + List *relids; // List of relids + bool valid_query; // Is the query valid or not +} ConstifyWatermarkContext; + +/* + * Walk through the elements of the query and detect the watermark functions and their + * parent functions. + */ +static bool +constify_cagg_watermark_walker(Node *node, ConstifyWatermarkContext *context) +{ + if (node == NULL) + return false; + + if (IsA(node, FuncExpr)) + { + FuncExpr *funcExpr = castNode(FuncExpr, node); + + /* Handle watermark function */ + if (context->watermark_function_oid == funcExpr->funcid) + { + /* No coalesce expression found so far, we are not interested in this expression */ + if (context->parent_coalesce_expr == NULL) + { + context->valid_query = false; + return false; + } + + /* Our function takes exactly one argument */ + Assert(list_length(funcExpr->args) == 1); + + context->watermark_functions = lappend(context->watermark_functions, funcExpr); + + if (context->parent_to_timestamp_func != NULL) + { + if (linitial(context->parent_to_timestamp_func->args) != node) + { + context->valid_query = false; + return false; + } + + context->watermark_parent_functions = + lappend(context->watermark_parent_functions, context->parent_to_timestamp_func); + } + else + { + if (linitial(context->parent_coalesce_expr->args) != node) + { + context->valid_query = false; + return false; + } + + context->watermark_parent_functions = + lappend(context->watermark_parent_functions, context->parent_coalesce_expr); + } + } + + /* Capture the timestamp conversion function */ + if (list_member_oid(context->to_timestamp_func_oids, funcExpr->funcid)) + { + FuncExpr *old_func_expr = context->parent_to_timestamp_func; + context->parent_to_timestamp_func = funcExpr; + bool result = expression_tree_walker(node, constify_cagg_watermark_walker, context); + context->parent_to_timestamp_func = old_func_expr; + + return result; + } + } + else if (IsA(node, Query)) + { + /* Recurse into subselects */ + Query *query = castNode(Query, node); + return query_tree_walker(query, + constify_cagg_watermark_walker, + context, + QTW_EXAMINE_RTES_BEFORE); + } + else if (IsA(node, CoalesceExpr)) + { + /* Capture the CoalesceExpr */ + CoalesceExpr *parent_coalesce_expr = context->parent_coalesce_expr; + context->parent_coalesce_expr = castNode(CoalesceExpr, node); + bool result = expression_tree_walker(node, constify_cagg_watermark_walker, context); + context->parent_coalesce_expr = parent_coalesce_expr; + + return result; + } + else if (IsA(node, RangeTblEntry)) + { + /* Collect the Oid of the used range tables */ + RangeTblEntry *rte = (RangeTblEntry *) node; + + if (rte->rtekind == RTE_RELATION) + { + context->relids = list_append_unique_oid(context->relids, rte->relid); + } + + /* allow range_table_walker to continue */ + return false; + } + + return expression_tree_walker(node, constify_cagg_watermark_walker, context); +} + +/* Check if the given query is a union query */ +static bool +is_union_query(Query *query) +{ + if (query->setOperations == NULL || + (((SetOperationStmt *) query->setOperations)->op != SETOP_UNION && + ((SetOperationStmt *) query->setOperations)->all != true)) + { + return false; + } + + return true; +} + +/* + * To avoid overhead by traversing the query tree, we perform a check before to determine if the + * given query could be a real-time CAgg query. So, we search for a SELECT over two subqueries. + */ +static bool pg_nodiscard +could_be_realtime_cagg_query(Query *query) +{ + if (query->commandType != CMD_SELECT) + return false; + + if (query->hasAggs || query->hasWindowFuncs || query->hasTargetSRFs) + return false; + + /* One range table, could be a query direct on a CAgg */ + if (list_length(query->rtable) == 1) + { + if (((RangeTblEntry *) linitial(query->rtable))->rtekind != RTE_SUBQUERY) + return false; + + Query *subquery = ((RangeTblEntry *) linitial(query->rtable))->subquery; + + return is_union_query(subquery); + } + /* More then one range table, could be the direct execution of the CAgg query */ + else if (list_length(query->rtable) > 1) + { + return is_union_query(query); + } + + /* No range tables involved, not a CAgg query */ + return false; +} + +/* + * The entry of the watermark HTAB. + */ +typedef struct WatermarkConstEntry +{ + int32 key; + Const *watermark_constant; +} WatermarkConstEntry; + +/* The query can contain multiple watermarks (i.e., two hierarchal real-time CAggs) + * We maintain a hash map (hypertable id -> constant) to ensure we use the same constant + * for the same watermark across the while query. + */ +static HTAB *pg_nodiscard +init_watermark_map() +{ + struct HASHCTL hctl = { + .keysize = sizeof(int32), + .entrysize = sizeof(WatermarkConstEntry), + .hcxt = CurrentMemoryContext, + }; + + /* Use 4 initial elements to have enough space for normal and hierarchical CAggs */ + return hash_create("Watermark const values", 4, &hctl, HASH_ELEM | HASH_CONTEXT | HASH_BLOBS); +} + +/* + * Get a constant value for our watermark function. The constant is cached + * in a hash map to ensure we use the same constant for invocations of the + * watermark function with the same parameter across the whole query. + */ +static Const * +get_watermark_const(HTAB *watermarks, int32 watermark_hypertable_id, List *range_table_oids) +{ + bool found; + WatermarkConstEntry *watermark_const = + hash_search(watermarks, &watermark_hypertable_id, HASH_ENTER, &found); + + if (!found) + { + /* + * Check that watermark table is also range table (for proper invalidation) + */ + Oid ht_relid = ts_hypertable_id_to_relid(watermark_hypertable_id, false); + + /* Given table is not a part of our range tables */ + if (!list_member_oid(range_table_oids, ht_relid)) + { + watermark_const->watermark_constant = NULL; + return NULL; + } + + /* Not found, create a new constant */ + int64 watermark = ts_cagg_watermark_get(watermark_hypertable_id); + Const *const_watermark = makeConst(INT8OID, + -1, + InvalidOid, + sizeof(int64), + Int64GetDatum(watermark), + false, + FLOAT8PASSBYVAL); + watermark_const->watermark_constant = const_watermark; + } + + return watermark_const->watermark_constant; +} + +static void +replace_watermark_const(ConstifyWatermarkContext *context) +{ + Assert(context != NULL); + Assert(context->valid_query); + + /* We need to have at least one watermark value */ + if (list_length(context->watermark_functions) < 1) + return; + + HTAB *watermarks = init_watermark_map(); + + /* Iterate over the function parents and the actual watermark functions. Get a + * const value for each function and replace the reference to the watermark function + * in the function parent + */ + Assert(list_length(context->watermark_parent_functions) == + list_length(context->watermark_functions)); + + ListCell *parent_lc, *watermark_lc; + forboth (parent_lc, + context->watermark_parent_functions, + watermark_lc, + context->watermark_functions) + { + FuncExpr *watermark_function = lfirst(watermark_lc); + Assert(context->watermark_function_oid == watermark_function->funcid); + Const *arg = (Const *) linitial(watermark_function->args); + int32 watermark_hypertable_id = DatumGetInt32(arg->constvalue); + + Const *watermark_const = + get_watermark_const(watermarks, watermark_hypertable_id, context->relids); + + /* No constant created, it means the watermark function's table is not a range + * table and no invalidations would be processed. */ + if (watermark_const == NULL) + continue; + + /* replace cagg_watermark FuncExpr node by a Const node */ + if (IsA(lfirst(parent_lc), FuncExpr)) + { + FuncExpr *parent_func_expr = castNode(FuncExpr, lfirst(parent_lc)); + linitial(parent_func_expr->args) = (Node *) watermark_const; + } + else + { + /* Check that the assumed parent function is our parent function */ + CoalesceExpr *parent_coalesce_expr = castNode(CoalesceExpr, lfirst(parent_lc)); + linitial(parent_coalesce_expr->args) = (Node *) watermark_const; + } + } + + /* Clean up the hash map */ + hash_destroy(watermarks); +} + +void +constify_cagg_watermark(Query *parse) +{ + if (parse == NULL) + return; + + if (!could_be_realtime_cagg_query(parse)) + return; + + Node *node = (Node *) parse; + + ConstifyWatermarkContext context = { 0 }; + context.watermark_function_oid = get_watermark_function_oid(); + context.valid_query = true; + + Ensure(OidIsValid(context.watermark_function_oid), + "unable to determine watermark function Oid"); + + /* Get Oid of all timestamp converter functions */ + context.to_timestamp_func_oids = NIL; + + context.to_timestamp_func_oids = + lappend_oid(context.to_timestamp_func_oids, cagg_get_boundary_converter_funcoid(DATEOID)); + + context.to_timestamp_func_oids = lappend_oid(context.to_timestamp_func_oids, + cagg_get_boundary_converter_funcoid(TIMESTAMPOID)); + + context.to_timestamp_func_oids = + lappend_oid(context.to_timestamp_func_oids, + cagg_get_boundary_converter_funcoid(TIMESTAMPTZOID)); + + /* Collect function information */ + constify_cagg_watermark_walker(node, &context); + + /* Replace watermark functions with const value */ + if (context.valid_query) + replace_watermark_const(&context); +} diff --git a/tsl/src/continuous_aggs/planner.h b/tsl/src/continuous_aggs/planner.h new file mode 100644 index 00000000000..58e072b079b --- /dev/null +++ b/tsl/src/continuous_aggs/planner.h @@ -0,0 +1,14 @@ +/* + * This file and its contents are licensed under the Timescale License. + * Please see the included NOTICE for copyright information and + * LICENSE-TIMESCALE for a copy of the license. + */ + +#ifndef TIMESCALEDB_TSL_CONTINUOUS_AGGS_PLANNER_H +#define TIMESCALEDB_TSL_CONTINUOUS_AGGS_PLANNER_H + +#include "planner/planner.h" + +void constify_cagg_watermark(Query *parse); + +#endif diff --git a/tsl/src/init.c b/tsl/src/init.c index 7429e7ed098..33be636ff11 100644 --- a/tsl/src/init.c +++ b/tsl/src/init.c @@ -177,6 +177,7 @@ CrossModuleFunctions tsl_cm_functions = { .recompress_chunk_segmentwise = tsl_recompress_chunk_segmentwise, .get_compressed_chunk_index_for_recompression = tsl_get_compressed_chunk_index_for_recompression, + .preprocess_query_tsl = tsl_preprocess_query, }; static void diff --git a/tsl/src/planner.c b/tsl/src/planner.c index 19d116fc6d0..0491aedd34a 100644 --- a/tsl/src/planner.c +++ b/tsl/src/planner.c @@ -14,6 +14,7 @@ #include "nodes/skip_scan/skip_scan.h" #include "chunk.h" #include "compat/compat.h" +#include "continuous_aggs/planner.h" #include "debug_guc.h" #include "guc.h" #include "hypertable_cache.h" @@ -187,3 +188,18 @@ tsl_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, RangeTblEntr return; } } + +/* + * Run preprocess query optimizations + */ +void +tsl_preprocess_query(Query *parse) +{ + Assert(parse != NULL); + + /* Check if constification of watermark values is enabled */ + if (ts_guc_enable_cagg_watermark_constify) + { + constify_cagg_watermark(parse); + } +} diff --git a/tsl/src/planner.h b/tsl/src/planner.h index 2ce2acb14ec..d37c32c3bf2 100644 --- a/tsl/src/planner.h +++ b/tsl/src/planner.h @@ -16,3 +16,4 @@ void tsl_create_upper_paths_hook(PlannerInfo *, UpperRelationKind, RelOptInfo *, void tsl_set_rel_pathlist_query(PlannerInfo *, RelOptInfo *, Index, RangeTblEntry *, Hypertable *); void tsl_set_rel_pathlist_dml(PlannerInfo *, RelOptInfo *, Index, RangeTblEntry *, Hypertable *); void tsl_set_rel_pathlist(PlannerInfo *root, RelOptInfo *rel, Index rti, RangeTblEntry *rte); +void tsl_preprocess_query(Query *parse); diff --git a/tsl/test/expected/cagg_query-14.out b/tsl/test/expected/cagg_query-14.out deleted file mode 100644 index 734cbbc7e47..00000000000 --- a/tsl/test/expected/cagg_query-14.out +++ /dev/null @@ -1,821 +0,0 @@ --- This file and its contents are licensed under the Timescale License. --- Please see the included NOTICE for copyright information and --- LICENSE-TIMESCALE for a copy of the license. -\set TEST_BASE_NAME cagg_query -SELECT - format('%s/results/%s_results_view.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') as "TEST_RESULTS_VIEW", - format('%s/results/%s_results_view_hashagg.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') as "TEST_RESULTS_VIEW_HASHAGG", - format('%s/results/%s_results_table.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') as "TEST_RESULTS_TABLE" -\gset -SELECT format('\! diff %s %s', :'TEST_RESULTS_VIEW', :'TEST_RESULTS_TABLE') as "DIFF_CMD", - format('\! diff %s %s', :'TEST_RESULTS_VIEW_HASHAGG', :'TEST_RESULTS_TABLE') as "DIFF_CMD2" -\gset -\set EXPLAIN 'EXPLAIN (VERBOSE, COSTS OFF)' -SET client_min_messages TO NOTICE; -CREATE TABLE conditions ( - timec TIMESTAMPTZ NOT NULL, - location TEXT NOT NULL, - temperature DOUBLE PRECISION NULL, - humidity DOUBLE PRECISION NULL - ); -select table_name from create_hypertable( 'conditions', 'timec'); - table_name ------------- - conditions -(1 row) - -insert into conditions values ( '2018-01-01 09:20:00-08', 'SFO', 55, 45); -insert into conditions values ( '2018-01-02 09:30:00-08', 'por', 100, 100); -insert into conditions values ( '2018-01-02 09:20:00-08', 'SFO', 65, 45); -insert into conditions values ( '2018-01-02 09:10:00-08', 'NYC', 65, 45); -insert into conditions values ( '2018-11-01 09:20:00-08', 'NYC', 45, 30); -insert into conditions values ( '2018-11-01 10:40:00-08', 'NYC', 55, 35); -insert into conditions values ( '2018-11-01 11:50:00-08', 'NYC', 65, 40); -insert into conditions values ( '2018-11-01 12:10:00-08', 'NYC', 75, 45); -insert into conditions values ( '2018-11-01 13:10:00-08', 'NYC', 85, 50); -insert into conditions values ( '2018-11-02 09:20:00-08', 'NYC', 10, 10); -insert into conditions values ( '2018-11-02 10:30:00-08', 'NYC', 20, 15); -insert into conditions values ( '2018-11-02 11:40:00-08', 'NYC', null, null); -insert into conditions values ( '2018-11-03 09:50:00-08', 'NYC', null, null); -create table location_tab( locid integer, locname text ); -insert into location_tab values( 1, 'SFO'); -insert into location_tab values( 2, 'NYC'); -insert into location_tab values( 3, 'por'); -create materialized view mat_m1( location, timec, minl, sumt , sumh) -WITH (timescaledb.continuous, timescaledb.materialized_only=false) -as -select location, time_bucket('1day', timec), min(location), sum(temperature),sum(humidity) -from conditions -group by time_bucket('1day', timec), location WITH NO DATA; ---compute time_bucketted max+bucket_width for the materialized view -SELECT time_bucket('1day' , q.timeval+ '1day'::interval) -FROM ( select max(timec)as timeval from conditions ) as q; - time_bucket ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - -CALL refresh_continuous_aggregate('mat_m1', NULL, NULL); ---test first/last -create materialized view mat_m2(location, timec, firsth, lasth, maxtemp, mintemp) -WITH (timescaledb.continuous, timescaledb.materialized_only=false) -as -select location, time_bucket('1day', timec), first(humidity, timec), last(humidity, timec), max(temperature), min(temperature) -from conditions -group by time_bucket('1day', timec), location WITH NO DATA; ---time that refresh assumes as now() for repeatability -SELECT time_bucket('1day' , q.timeval+ '1day'::interval) -FROM ( select max(timec)as timeval from conditions ) as q; - time_bucket ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - -CALL refresh_continuous_aggregate('mat_m2', NULL, NULL); ---normal view -- -create or replace view regview( location, timec, minl, sumt , sumh) -as -select location, time_bucket('1day', timec), min(location), sum(temperature),sum(humidity) -from conditions -group by location, time_bucket('1day', timec); -set enable_hashagg = false; --- NO pushdown cases --- ---when we have addl. attrs in order by that are not in the --- group by, we will still need a sort -:EXPLAIN -select * from mat_m1 order by sumh, sumt, minl, timec ; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.sumh, _materialized_hypertable_2.sumt, _materialized_hypertable_2.minl, _materialized_hypertable_2.timec - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(29 rows) - -:EXPLAIN -select * from regview order by timec desc; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) DESC - -> GroupAggregate - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), min(_hyper_1_1_chunk.location), sum(_hyper_1_1_chunk.temperature), sum(_hyper_1_1_chunk.humidity) - Group Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Sort - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - Sort Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Result - Output: _hyper_1_1_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Append - -> Seq Scan on _timescaledb_internal._hyper_1_1_chunk - Output: _hyper_1_1_chunk.location, _hyper_1_1_chunk.timec, _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Seq Scan on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -(16 rows) - --- PUSHDOWN cases -- --- all group by elts in order by , reorder group by elts to match --- group by order --- This should prevent an additional sort after GroupAggregate -:EXPLAIN -select * from mat_m1 order by timec desc, location; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC, _materialized_hypertable_2.location - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(29 rows) - -:EXPLAIN -select * from mat_m1 order by location, timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.location, _materialized_hypertable_2.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(29 rows) - -:EXPLAIN -select * from mat_m1 order by location, timec asc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.location, _materialized_hypertable_2.timec - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(29 rows) - -:EXPLAIN -select * from mat_m1 where timec > '2018-10-01' order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(27 rows) - --- outer sort is used by mat_m1 for grouping. But doesn't avoid a sort after the join --- -:EXPLAIN -select l.locid, mat_m1.* from mat_m1 , location_tab l where timec > '2018-10-01' and l.locname = mat_m1.location order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: l.locid, _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC - -> Hash Join - Output: l.locid, _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Hash Cond: (l.locname = _materialized_hypertable_2.location) - -> Seq Scan on public.location_tab l - Output: l.locid, l.locname - -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(34 rows) - -:EXPLAIN -select * from mat_m2 where timec > '2018-10-01' order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(27 rows) - -:EXPLAIN -select * from (select * from mat_m2 where timec > '2018-10-01' order by timec desc ) as q limit 1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Limit - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - -> Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(29 rows) - -:EXPLAIN -select * from (select * from mat_m2 where timec > '2018-10-01' order by timec desc , location asc nulls first) as q limit 1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Limit - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - -> Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC, _materialized_hypertable_3.location NULLS FIRST - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(29 rows) - ---plans with CTE -:EXPLAIN -with m1 as ( -Select * from mat_m2 where timec > '2018-10-01' order by timec desc ) -select * from m1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(27 rows) - --- should reorder mat_m1 group by only based on mat_m1 order-by -:EXPLAIN -select * from mat_m1, mat_m2 where mat_m1.timec > '2018-10-01' and mat_m1.timec = mat_m2.timec order by mat_m1.timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_2.timec DESC - -> Hash Join - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Hash Cond: (_materialized_hypertable_3.timec = _materialized_hypertable_2.timec) - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_5_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_5_chunk - Output: _hyper_3_5_chunk.location, _hyper_3_5_chunk.timec, _hyper_3_5_chunk.firsth, _hyper_3_5_chunk.lasth, _hyper_3_5_chunk.maxtemp, _hyper_3_5_chunk.mintemp - Index Cond: (_hyper_3_5_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: (_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) - -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), min(conditions_1.location), sum(conditions_1.temperature), sum(conditions_1.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Sort - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.temperature, conditions_1.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Custom Scan (ChunkAppend) on public.conditions conditions_1 - Output: conditions_1.location, time_bucket('@ 1 day'::interval, conditions_1.timec), conditions_1.temperature, conditions_1.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk _hyper_1_2_chunk_1 - Output: _hyper_1_2_chunk_1.location, _hyper_1_2_chunk_1.timec, _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity - Index Cond: ((_hyper_1_2_chunk_1.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk_1.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(58 rows) - ---should reorder only for mat_m1. -:EXPLAIN -select * from mat_m1, regview where mat_m1.timec > '2018-10-01' and mat_m1.timec = regview.timec order by mat_m1.timec desc; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Sort Key: _materialized_hypertable_2.timec DESC - -> Hash Join - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Hash Cond: ((time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) = _materialized_hypertable_2.timec) - -> GroupAggregate - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), min(_hyper_1_1_chunk.location), sum(_hyper_1_1_chunk.temperature), sum(_hyper_1_1_chunk.humidity) - Group Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Sort - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - Sort Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Result - Output: _hyper_1_1_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Append - -> Seq Scan on _timescaledb_internal._hyper_1_1_chunk - Output: _hyper_1_1_chunk.location, _hyper_1_1_chunk.timec, _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Seq Scan on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), min(conditions_1.location), sum(conditions_1.temperature), sum(conditions_1.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Sort - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.temperature, conditions_1.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Custom Scan (ChunkAppend) on public.conditions conditions_1 - Output: conditions_1.location, time_bucket('@ 1 day'::interval, conditions_1.timec), conditions_1.temperature, conditions_1.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk _hyper_1_2_chunk_1 - Output: _hyper_1_2_chunk_1.location, _hyper_1_2_chunk_1.timec, _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity - Index Cond: ((_hyper_1_2_chunk_1.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk_1.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(45 rows) - -select l.locid, mat_m1.* from mat_m1 , location_tab l where timec > '2018-10-01' and l.locname = mat_m1.location order by timec desc; - locid | location | timec | minl | sumt | sumh --------+----------+------------------------------+------+------+------ - 2 | NYC | Fri Nov 02 17:00:00 2018 PDT | NYC | | - 2 | NYC | Thu Nov 01 17:00:00 2018 PDT | NYC | 30 | 25 - 2 | NYC | Wed Oct 31 17:00:00 2018 PDT | NYC | 325 | 200 -(3 rows) - -\set ECHO none ----- Run the same queries with hash agg enabled now -set enable_hashagg = true; -\set ECHO none ---- Run the queries directly on the table now -set enable_hashagg = true; -\set ECHO none --- diff results view select and table select -:DIFF_CMD -:DIFF_CMD2 ---check if the guc works , reordering will not work -set timescaledb.enable_cagg_reorder_groupby = false; -set enable_hashagg = false; -:EXPLAIN -select * from mat_m1 order by timec desc, location; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC, _materialized_hypertable_2.location - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(29 rows) - ------------------------------------------------------------------------ --- Test the cagg_watermark function. The watermark gives the point --- where to UNION raw and materialized data in real-time --- aggregation. Specifically, test that the watermark caching works as --- expected. ------------------------------------------------------------------------ --- Insert some more data so that there is something to UNION in --- real-time aggregation. -insert into conditions values ( '2018-12-02 20:10:00-08', 'SFO', 55, 45); -insert into conditions values ( '2018-12-02 21:20:00-08', 'SFO', 65, 45); -insert into conditions values ( '2018-12-02 20:30:00-08', 'NYC', 65, 45); -insert into conditions values ( '2018-12-02 21:50:00-08', 'NYC', 45, 30); --- Test join of two caggs. Joining two caggs will force the cache to --- reset every time the watermark function is invoked on a different --- cagg in the same query. -SELECT mat_hypertable_id AS mat_id, - raw_hypertable_id AS raw_id, - schema_name AS mat_schema, - table_name AS mat_name, - format('%I.%I', schema_name, table_name) AS mat_table -FROM _timescaledb_catalog.continuous_agg ca, _timescaledb_catalog.hypertable h -WHERE user_view_name='mat_m1' -AND h.id = ca.mat_hypertable_id \gset -BEGIN; --- Query without join -SELECT m1.location, m1.timec, sumt, sumh -FROM mat_m1 m1 -ORDER BY m1.location COLLATE "C", m1.timec DESC -LIMIT 10; - location | timec | sumt | sumh -----------+------------------------------+------+------ - NYC | Sun Dec 02 16:00:00 2018 PST | 110 | 75 - NYC | Fri Nov 02 17:00:00 2018 PDT | | - NYC | Thu Nov 01 17:00:00 2018 PDT | 30 | 25 - NYC | Wed Oct 31 17:00:00 2018 PDT | 325 | 200 - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 - SFO | Sun Dec 02 16:00:00 2018 PST | 120 | 90 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 -(9 rows) - --- Query that joins two caggs. This should force the watermark cache --- to reset when the materialized hypertable ID changes. A hash join --- could potentially read all values from mat_m1 then all values from --- mat_m2. This would be the optimal situation for cagg_watermark --- caching. We want to avoid it in tests to see that caching doesn't --- do anything wrong in worse situations (e.g., a nested loop join). -SET enable_hashjoin=false; -SELECT m1.location, m1.timec, sumt, sumh, firsth, lasth, maxtemp, mintemp -FROM mat_m1 m1 RIGHT JOIN mat_m2 m2 -ON (m1.location = m2.location -AND m1.timec = m2.timec) -ORDER BY m1.location COLLATE "C", m1.timec DESC -LIMIT 10; - location | timec | sumt | sumh | firsth | lasth | maxtemp | mintemp -----------+------------------------------+------+------+--------+-------+---------+--------- - NYC | Sun Dec 02 16:00:00 2018 PST | 110 | 75 | 45 | 30 | 65 | 45 - NYC | Fri Nov 02 17:00:00 2018 PDT | | | | | | - NYC | Thu Nov 01 17:00:00 2018 PDT | 30 | 25 | 10 | | 20 | 10 - NYC | Wed Oct 31 17:00:00 2018 PDT | 325 | 200 | 30 | 50 | 85 | 45 - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 02 16:00:00 2018 PST | 120 | 90 | 45 | 45 | 65 | 55 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 | 45 | 45 | 55 | 55 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 | 100 | 100 | 100 | 100 -(9 rows) - --- Show the current watermark -SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:mat_id)); - to_timestamp ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - --- The watermark should, in this case, be the same as the invalidation --- threshold -SELECT _timescaledb_functions.to_timestamp(watermark) -FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold -WHERE hypertable_id = :raw_id; - to_timestamp ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - --- The watermark is the end of materialization (end of last bucket) --- while the MAX is the start of the last bucket -SELECT max(timec) FROM :mat_table; - max ------------------------------- - Fri Nov 02 17:00:00 2018 PDT -(1 row) - --- Drop the most recent chunk -SELECT chunk_name, range_start, range_end -FROM timescaledb_information.chunks -WHERE hypertable_name = :'mat_name'; - chunk_name | range_start | range_end -------------------+------------------------------+------------------------------ - _hyper_2_3_chunk | Wed Nov 29 16:00:00 2017 PST | Wed Feb 07 16:00:00 2018 PST - _hyper_2_4_chunk | Wed Sep 05 17:00:00 2018 PDT | Wed Nov 14 16:00:00 2018 PST -(2 rows) - -SELECT drop_chunks('mat_m1', newer_than=>'2018-01-01'::timestamptz); - drop_chunks ----------------------------------------- - _timescaledb_internal._hyper_2_4_chunk -(1 row) - -SELECT chunk_name, range_start, range_end -FROM timescaledb_information.chunks -WHERE hypertable_name = :'mat_name'; - chunk_name | range_start | range_end -------------------+------------------------------+------------------------------ - _hyper_2_3_chunk | Wed Nov 29 16:00:00 2017 PST | Wed Feb 07 16:00:00 2018 PST -(1 row) - --- The watermark should be updated to reflect the dropped data (i.e., --- the cache should be reset) -SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:mat_id)); - to_timestamp ------------------------------- - Tue Jan 02 16:00:00 2018 PST -(1 row) - --- Since we removed the last chunk, the invalidation threshold doesn't --- move back, while the watermark does. -SELECT _timescaledb_functions.to_timestamp(watermark) -FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold -WHERE hypertable_id = :raw_id; - to_timestamp ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - --- Compare the new watermark to the MAX time in the table -SELECT max(timec) FROM :mat_table; - max ------------------------------- - Mon Jan 01 16:00:00 2018 PST -(1 row) - --- Try a subtransaction -SAVEPOINT clear_cagg; -SELECT m1.location, m1.timec, sumt, sumh, firsth, lasth, maxtemp, mintemp -FROM mat_m1 m1 RIGHT JOIN mat_m2 m2 -ON (m1.location = m2.location -AND m1.timec = m2.timec) -ORDER BY m1.location COLLATE "C", m1.timec DESC -LIMIT 10; - location | timec | sumt | sumh | firsth | lasth | maxtemp | mintemp -----------+------------------------------+------+------+--------+-------+---------+--------- - NYC | Sun Dec 02 16:00:00 2018 PST | 110 | 75 | 45 | 30 | 65 | 45 - NYC | Fri Nov 02 17:00:00 2018 PDT | | | | | | - NYC | Thu Nov 01 17:00:00 2018 PDT | 30 | 25 | 10 | | 20 | 10 - NYC | Wed Oct 31 17:00:00 2018 PDT | 325 | 200 | 30 | 50 | 85 | 45 - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 02 16:00:00 2018 PST | 120 | 90 | 45 | 45 | 65 | 55 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 | 45 | 45 | 55 | 55 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 | 100 | 100 | 100 | 100 -(9 rows) - -ALTER MATERIALIZED VIEW mat_m1 SET (timescaledb.materialized_only=true); -SELECT m1.location, m1.timec, sumt, sumh, firsth, lasth, maxtemp, mintemp -FROM mat_m1 m1 RIGHT JOIN mat_m2 m2 -ON (m1.location = m2.location -AND m1.timec = m2.timec) -ORDER BY m1.location COLLATE "C" NULLS LAST, m1.timec DESC NULLS LAST, firsth NULLS LAST, - lasth NULLS LAST, mintemp NULLS LAST, maxtemp NULLS LAST -LIMIT 10; - location | timec | sumt | sumh | firsth | lasth | maxtemp | mintemp -----------+------------------------------+------+------+--------+-------+---------+--------- - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 | 45 | 45 | 55 | 55 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 | 100 | 100 | 100 | 100 - | | | | 10 | | 20 | 10 - | | | | 30 | 50 | 85 | 45 - | | | | 45 | 30 | 65 | 45 - | | | | 45 | 45 | 65 | 55 - | | | | | | | -(9 rows) - -ROLLBACK; diff --git a/tsl/test/expected/cagg_query-15.out b/tsl/test/expected/cagg_query-15.out deleted file mode 100644 index 7b1e7dd6b6f..00000000000 --- a/tsl/test/expected/cagg_query-15.out +++ /dev/null @@ -1,849 +0,0 @@ --- This file and its contents are licensed under the Timescale License. --- Please see the included NOTICE for copyright information and --- LICENSE-TIMESCALE for a copy of the license. -\set TEST_BASE_NAME cagg_query -SELECT - format('%s/results/%s_results_view.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') as "TEST_RESULTS_VIEW", - format('%s/results/%s_results_view_hashagg.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') as "TEST_RESULTS_VIEW_HASHAGG", - format('%s/results/%s_results_table.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') as "TEST_RESULTS_TABLE" -\gset -SELECT format('\! diff %s %s', :'TEST_RESULTS_VIEW', :'TEST_RESULTS_TABLE') as "DIFF_CMD", - format('\! diff %s %s', :'TEST_RESULTS_VIEW_HASHAGG', :'TEST_RESULTS_TABLE') as "DIFF_CMD2" -\gset -\set EXPLAIN 'EXPLAIN (VERBOSE, COSTS OFF)' -SET client_min_messages TO NOTICE; -CREATE TABLE conditions ( - timec TIMESTAMPTZ NOT NULL, - location TEXT NOT NULL, - temperature DOUBLE PRECISION NULL, - humidity DOUBLE PRECISION NULL - ); -select table_name from create_hypertable( 'conditions', 'timec'); - table_name ------------- - conditions -(1 row) - -insert into conditions values ( '2018-01-01 09:20:00-08', 'SFO', 55, 45); -insert into conditions values ( '2018-01-02 09:30:00-08', 'por', 100, 100); -insert into conditions values ( '2018-01-02 09:20:00-08', 'SFO', 65, 45); -insert into conditions values ( '2018-01-02 09:10:00-08', 'NYC', 65, 45); -insert into conditions values ( '2018-11-01 09:20:00-08', 'NYC', 45, 30); -insert into conditions values ( '2018-11-01 10:40:00-08', 'NYC', 55, 35); -insert into conditions values ( '2018-11-01 11:50:00-08', 'NYC', 65, 40); -insert into conditions values ( '2018-11-01 12:10:00-08', 'NYC', 75, 45); -insert into conditions values ( '2018-11-01 13:10:00-08', 'NYC', 85, 50); -insert into conditions values ( '2018-11-02 09:20:00-08', 'NYC', 10, 10); -insert into conditions values ( '2018-11-02 10:30:00-08', 'NYC', 20, 15); -insert into conditions values ( '2018-11-02 11:40:00-08', 'NYC', null, null); -insert into conditions values ( '2018-11-03 09:50:00-08', 'NYC', null, null); -create table location_tab( locid integer, locname text ); -insert into location_tab values( 1, 'SFO'); -insert into location_tab values( 2, 'NYC'); -insert into location_tab values( 3, 'por'); -create materialized view mat_m1( location, timec, minl, sumt , sumh) -WITH (timescaledb.continuous, timescaledb.materialized_only=false) -as -select location, time_bucket('1day', timec), min(location), sum(temperature),sum(humidity) -from conditions -group by time_bucket('1day', timec), location WITH NO DATA; ---compute time_bucketted max+bucket_width for the materialized view -SELECT time_bucket('1day' , q.timeval+ '1day'::interval) -FROM ( select max(timec)as timeval from conditions ) as q; - time_bucket ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - -CALL refresh_continuous_aggregate('mat_m1', NULL, NULL); ---test first/last -create materialized view mat_m2(location, timec, firsth, lasth, maxtemp, mintemp) -WITH (timescaledb.continuous, timescaledb.materialized_only=false) -as -select location, time_bucket('1day', timec), first(humidity, timec), last(humidity, timec), max(temperature), min(temperature) -from conditions -group by time_bucket('1day', timec), location WITH NO DATA; ---time that refresh assumes as now() for repeatability -SELECT time_bucket('1day' , q.timeval+ '1day'::interval) -FROM ( select max(timec)as timeval from conditions ) as q; - time_bucket ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - -CALL refresh_continuous_aggregate('mat_m2', NULL, NULL); ---normal view -- -create or replace view regview( location, timec, minl, sumt , sumh) -as -select location, time_bucket('1day', timec), min(location), sum(temperature),sum(humidity) -from conditions -group by location, time_bucket('1day', timec); -set enable_hashagg = false; --- NO pushdown cases --- ---when we have addl. attrs in order by that are not in the --- group by, we will still need a sort -:EXPLAIN -select * from mat_m1 order by sumh, sumt, minl, timec ; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.sumh, _materialized_hypertable_2.sumt, _materialized_hypertable_2.minl, _materialized_hypertable_2.timec - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(31 rows) - -:EXPLAIN -select * from regview order by timec desc; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) DESC - -> GroupAggregate - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), min(_hyper_1_1_chunk.location), sum(_hyper_1_1_chunk.temperature), sum(_hyper_1_1_chunk.humidity) - Group Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Sort - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - Sort Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Result - Output: _hyper_1_1_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Append - -> Seq Scan on _timescaledb_internal._hyper_1_1_chunk - Output: _hyper_1_1_chunk.location, _hyper_1_1_chunk.timec, _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Seq Scan on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -(16 rows) - --- PUSHDOWN cases -- --- all group by elts in order by , reorder group by elts to match --- group by order --- This should prevent an additional sort after GroupAggregate -:EXPLAIN -select * from mat_m1 order by timec desc, location; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC, _materialized_hypertable_2.location - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(31 rows) - -:EXPLAIN -select * from mat_m1 order by location, timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.location, _materialized_hypertable_2.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(31 rows) - -:EXPLAIN -select * from mat_m1 order by location, timec asc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.location, _materialized_hypertable_2.timec - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(31 rows) - -:EXPLAIN -select * from mat_m1 where timec > '2018-10-01' order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(29 rows) - --- outer sort is used by mat_m1 for grouping. But doesn't avoid a sort after the join --- -:EXPLAIN -select l.locid, mat_m1.* from mat_m1 , location_tab l where timec > '2018-10-01' and l.locname = mat_m1.location order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: l.locid, _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC - -> Hash Join - Output: l.locid, _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Hash Cond: (l.locname = _materialized_hypertable_2.location) - -> Seq Scan on public.location_tab l - Output: l.locid, l.locname - -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(36 rows) - -:EXPLAIN -select * from mat_m2 where timec > '2018-10-01' order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.humidity, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(29 rows) - -:EXPLAIN -select * from (select * from mat_m2 where timec > '2018-10-01' order by timec desc ) as q limit 1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Limit - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - -> Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.humidity, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(31 rows) - -:EXPLAIN -select * from (select * from mat_m2 where timec > '2018-10-01' order by timec desc , location asc nulls first) as q limit 1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Limit - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - -> Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC, _materialized_hypertable_3.location NULLS FIRST - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.humidity, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(31 rows) - ---plans with CTE -:EXPLAIN -with m1 as ( -Select * from mat_m2 where timec > '2018-10-01' order by timec desc ) -select * from m1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.humidity, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(29 rows) - --- should reorder mat_m1 group by only based on mat_m1 order-by -:EXPLAIN -select * from mat_m1, mat_m2 where mat_m1.timec > '2018-10-01' and mat_m1.timec = mat_m2.timec order by mat_m1.timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_2.timec DESC - -> Hash Join - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Hash Cond: (_materialized_hypertable_3.timec = _materialized_hypertable_2.timec) - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_5_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_5_chunk - Output: _hyper_3_5_chunk.location, _hyper_3_5_chunk.timec, _hyper_3_5_chunk.firsth, _hyper_3_5_chunk.lasth, _hyper_3_5_chunk.maxtemp, _hyper_3_5_chunk.mintemp - Index Cond: (_hyper_3_5_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: (_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.humidity, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) - -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), min(conditions_1.location), sum(conditions_1.temperature), sum(conditions_1.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Sort - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.temperature, conditions_1.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Result - Output: conditions_1.location, time_bucket('@ 1 day'::interval, conditions_1.timec), conditions_1.temperature, conditions_1.humidity - -> Custom Scan (ChunkAppend) on public.conditions conditions_1 - Output: conditions_1.location, conditions_1.timec, conditions_1.temperature, conditions_1.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk _hyper_1_2_chunk_1 - Output: _hyper_1_2_chunk_1.location, _hyper_1_2_chunk_1.timec, _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity - Index Cond: ((_hyper_1_2_chunk_1.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk_1.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(62 rows) - ---should reorder only for mat_m1. -:EXPLAIN -select * from mat_m1, regview where mat_m1.timec > '2018-10-01' and mat_m1.timec = regview.timec order by mat_m1.timec desc; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Sort Key: _materialized_hypertable_2.timec DESC - -> Hash Join - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Hash Cond: ((time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) = _materialized_hypertable_2.timec) - -> GroupAggregate - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), min(_hyper_1_1_chunk.location), sum(_hyper_1_1_chunk.temperature), sum(_hyper_1_1_chunk.humidity) - Group Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Sort - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - Sort Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Result - Output: _hyper_1_1_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Append - -> Seq Scan on _timescaledb_internal._hyper_1_1_chunk - Output: _hyper_1_1_chunk.location, _hyper_1_1_chunk.timec, _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Seq Scan on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), min(conditions_1.location), sum(conditions_1.temperature), sum(conditions_1.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Sort - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.temperature, conditions_1.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Result - Output: conditions_1.location, time_bucket('@ 1 day'::interval, conditions_1.timec), conditions_1.temperature, conditions_1.humidity - -> Custom Scan (ChunkAppend) on public.conditions conditions_1 - Output: conditions_1.location, conditions_1.timec, conditions_1.temperature, conditions_1.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk _hyper_1_2_chunk_1 - Output: _hyper_1_2_chunk_1.location, _hyper_1_2_chunk_1.timec, _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity - Index Cond: ((_hyper_1_2_chunk_1.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk_1.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(47 rows) - -select l.locid, mat_m1.* from mat_m1 , location_tab l where timec > '2018-10-01' and l.locname = mat_m1.location order by timec desc; - locid | location | timec | minl | sumt | sumh --------+----------+------------------------------+------+------+------ - 2 | NYC | Fri Nov 02 17:00:00 2018 PDT | NYC | | - 2 | NYC | Thu Nov 01 17:00:00 2018 PDT | NYC | 30 | 25 - 2 | NYC | Wed Oct 31 17:00:00 2018 PDT | NYC | 325 | 200 -(3 rows) - -\set ECHO none ----- Run the same queries with hash agg enabled now -set enable_hashagg = true; -\set ECHO none ---- Run the queries directly on the table now -set enable_hashagg = true; -\set ECHO none --- diff results view select and table select -:DIFF_CMD -:DIFF_CMD2 ---check if the guc works , reordering will not work -set timescaledb.enable_cagg_reorder_groupby = false; -set enable_hashagg = false; -:EXPLAIN -select * from mat_m1 order by timec desc, location; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC, _materialized_hypertable_2.location - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(31 rows) - ------------------------------------------------------------------------ --- Test the cagg_watermark function. The watermark gives the point --- where to UNION raw and materialized data in real-time --- aggregation. Specifically, test that the watermark caching works as --- expected. ------------------------------------------------------------------------ --- Insert some more data so that there is something to UNION in --- real-time aggregation. -insert into conditions values ( '2018-12-02 20:10:00-08', 'SFO', 55, 45); -insert into conditions values ( '2018-12-02 21:20:00-08', 'SFO', 65, 45); -insert into conditions values ( '2018-12-02 20:30:00-08', 'NYC', 65, 45); -insert into conditions values ( '2018-12-02 21:50:00-08', 'NYC', 45, 30); --- Test join of two caggs. Joining two caggs will force the cache to --- reset every time the watermark function is invoked on a different --- cagg in the same query. -SELECT mat_hypertable_id AS mat_id, - raw_hypertable_id AS raw_id, - schema_name AS mat_schema, - table_name AS mat_name, - format('%I.%I', schema_name, table_name) AS mat_table -FROM _timescaledb_catalog.continuous_agg ca, _timescaledb_catalog.hypertable h -WHERE user_view_name='mat_m1' -AND h.id = ca.mat_hypertable_id \gset -BEGIN; --- Query without join -SELECT m1.location, m1.timec, sumt, sumh -FROM mat_m1 m1 -ORDER BY m1.location COLLATE "C", m1.timec DESC -LIMIT 10; - location | timec | sumt | sumh -----------+------------------------------+------+------ - NYC | Sun Dec 02 16:00:00 2018 PST | 110 | 75 - NYC | Fri Nov 02 17:00:00 2018 PDT | | - NYC | Thu Nov 01 17:00:00 2018 PDT | 30 | 25 - NYC | Wed Oct 31 17:00:00 2018 PDT | 325 | 200 - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 - SFO | Sun Dec 02 16:00:00 2018 PST | 120 | 90 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 -(9 rows) - --- Query that joins two caggs. This should force the watermark cache --- to reset when the materialized hypertable ID changes. A hash join --- could potentially read all values from mat_m1 then all values from --- mat_m2. This would be the optimal situation for cagg_watermark --- caching. We want to avoid it in tests to see that caching doesn't --- do anything wrong in worse situations (e.g., a nested loop join). -SET enable_hashjoin=false; -SELECT m1.location, m1.timec, sumt, sumh, firsth, lasth, maxtemp, mintemp -FROM mat_m1 m1 RIGHT JOIN mat_m2 m2 -ON (m1.location = m2.location -AND m1.timec = m2.timec) -ORDER BY m1.location COLLATE "C", m1.timec DESC -LIMIT 10; - location | timec | sumt | sumh | firsth | lasth | maxtemp | mintemp -----------+------------------------------+------+------+--------+-------+---------+--------- - NYC | Sun Dec 02 16:00:00 2018 PST | 110 | 75 | 45 | 30 | 65 | 45 - NYC | Fri Nov 02 17:00:00 2018 PDT | | | | | | - NYC | Thu Nov 01 17:00:00 2018 PDT | 30 | 25 | 10 | | 20 | 10 - NYC | Wed Oct 31 17:00:00 2018 PDT | 325 | 200 | 30 | 50 | 85 | 45 - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 02 16:00:00 2018 PST | 120 | 90 | 45 | 45 | 65 | 55 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 | 45 | 45 | 55 | 55 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 | 100 | 100 | 100 | 100 -(9 rows) - --- Show the current watermark -SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:mat_id)); - to_timestamp ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - --- The watermark should, in this case, be the same as the invalidation --- threshold -SELECT _timescaledb_functions.to_timestamp(watermark) -FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold -WHERE hypertable_id = :raw_id; - to_timestamp ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - --- The watermark is the end of materialization (end of last bucket) --- while the MAX is the start of the last bucket -SELECT max(timec) FROM :mat_table; - max ------------------------------- - Fri Nov 02 17:00:00 2018 PDT -(1 row) - --- Drop the most recent chunk -SELECT chunk_name, range_start, range_end -FROM timescaledb_information.chunks -WHERE hypertable_name = :'mat_name'; - chunk_name | range_start | range_end -------------------+------------------------------+------------------------------ - _hyper_2_3_chunk | Wed Nov 29 16:00:00 2017 PST | Wed Feb 07 16:00:00 2018 PST - _hyper_2_4_chunk | Wed Sep 05 17:00:00 2018 PDT | Wed Nov 14 16:00:00 2018 PST -(2 rows) - -SELECT drop_chunks('mat_m1', newer_than=>'2018-01-01'::timestamptz); - drop_chunks ----------------------------------------- - _timescaledb_internal._hyper_2_4_chunk -(1 row) - -SELECT chunk_name, range_start, range_end -FROM timescaledb_information.chunks -WHERE hypertable_name = :'mat_name'; - chunk_name | range_start | range_end -------------------+------------------------------+------------------------------ - _hyper_2_3_chunk | Wed Nov 29 16:00:00 2017 PST | Wed Feb 07 16:00:00 2018 PST -(1 row) - --- The watermark should be updated to reflect the dropped data (i.e., --- the cache should be reset) -SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:mat_id)); - to_timestamp ------------------------------- - Tue Jan 02 16:00:00 2018 PST -(1 row) - --- Since we removed the last chunk, the invalidation threshold doesn't --- move back, while the watermark does. -SELECT _timescaledb_functions.to_timestamp(watermark) -FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold -WHERE hypertable_id = :raw_id; - to_timestamp ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - --- Compare the new watermark to the MAX time in the table -SELECT max(timec) FROM :mat_table; - max ------------------------------- - Mon Jan 01 16:00:00 2018 PST -(1 row) - --- Try a subtransaction -SAVEPOINT clear_cagg; -SELECT m1.location, m1.timec, sumt, sumh, firsth, lasth, maxtemp, mintemp -FROM mat_m1 m1 RIGHT JOIN mat_m2 m2 -ON (m1.location = m2.location -AND m1.timec = m2.timec) -ORDER BY m1.location COLLATE "C", m1.timec DESC -LIMIT 10; - location | timec | sumt | sumh | firsth | lasth | maxtemp | mintemp -----------+------------------------------+------+------+--------+-------+---------+--------- - NYC | Sun Dec 02 16:00:00 2018 PST | 110 | 75 | 45 | 30 | 65 | 45 - NYC | Fri Nov 02 17:00:00 2018 PDT | | | | | | - NYC | Thu Nov 01 17:00:00 2018 PDT | 30 | 25 | 10 | | 20 | 10 - NYC | Wed Oct 31 17:00:00 2018 PDT | 325 | 200 | 30 | 50 | 85 | 45 - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 02 16:00:00 2018 PST | 120 | 90 | 45 | 45 | 65 | 55 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 | 45 | 45 | 55 | 55 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 | 100 | 100 | 100 | 100 -(9 rows) - -ALTER MATERIALIZED VIEW mat_m1 SET (timescaledb.materialized_only=true); -SELECT m1.location, m1.timec, sumt, sumh, firsth, lasth, maxtemp, mintemp -FROM mat_m1 m1 RIGHT JOIN mat_m2 m2 -ON (m1.location = m2.location -AND m1.timec = m2.timec) -ORDER BY m1.location COLLATE "C" NULLS LAST, m1.timec DESC NULLS LAST, firsth NULLS LAST, - lasth NULLS LAST, mintemp NULLS LAST, maxtemp NULLS LAST -LIMIT 10; - location | timec | sumt | sumh | firsth | lasth | maxtemp | mintemp -----------+------------------------------+------+------+--------+-------+---------+--------- - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 | 45 | 45 | 55 | 55 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 | 100 | 100 | 100 | 100 - | | | | 10 | | 20 | 10 - | | | | 30 | 50 | 85 | 45 - | | | | 45 | 30 | 65 | 45 - | | | | 45 | 45 | 65 | 55 - | | | | | | | -(9 rows) - -ROLLBACK; diff --git a/tsl/test/expected/cagg_query-16.out b/tsl/test/expected/cagg_query-16.out deleted file mode 100644 index 7b1e7dd6b6f..00000000000 --- a/tsl/test/expected/cagg_query-16.out +++ /dev/null @@ -1,849 +0,0 @@ --- This file and its contents are licensed under the Timescale License. --- Please see the included NOTICE for copyright information and --- LICENSE-TIMESCALE for a copy of the license. -\set TEST_BASE_NAME cagg_query -SELECT - format('%s/results/%s_results_view.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') as "TEST_RESULTS_VIEW", - format('%s/results/%s_results_view_hashagg.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') as "TEST_RESULTS_VIEW_HASHAGG", - format('%s/results/%s_results_table.out', :'TEST_OUTPUT_DIR', :'TEST_BASE_NAME') as "TEST_RESULTS_TABLE" -\gset -SELECT format('\! diff %s %s', :'TEST_RESULTS_VIEW', :'TEST_RESULTS_TABLE') as "DIFF_CMD", - format('\! diff %s %s', :'TEST_RESULTS_VIEW_HASHAGG', :'TEST_RESULTS_TABLE') as "DIFF_CMD2" -\gset -\set EXPLAIN 'EXPLAIN (VERBOSE, COSTS OFF)' -SET client_min_messages TO NOTICE; -CREATE TABLE conditions ( - timec TIMESTAMPTZ NOT NULL, - location TEXT NOT NULL, - temperature DOUBLE PRECISION NULL, - humidity DOUBLE PRECISION NULL - ); -select table_name from create_hypertable( 'conditions', 'timec'); - table_name ------------- - conditions -(1 row) - -insert into conditions values ( '2018-01-01 09:20:00-08', 'SFO', 55, 45); -insert into conditions values ( '2018-01-02 09:30:00-08', 'por', 100, 100); -insert into conditions values ( '2018-01-02 09:20:00-08', 'SFO', 65, 45); -insert into conditions values ( '2018-01-02 09:10:00-08', 'NYC', 65, 45); -insert into conditions values ( '2018-11-01 09:20:00-08', 'NYC', 45, 30); -insert into conditions values ( '2018-11-01 10:40:00-08', 'NYC', 55, 35); -insert into conditions values ( '2018-11-01 11:50:00-08', 'NYC', 65, 40); -insert into conditions values ( '2018-11-01 12:10:00-08', 'NYC', 75, 45); -insert into conditions values ( '2018-11-01 13:10:00-08', 'NYC', 85, 50); -insert into conditions values ( '2018-11-02 09:20:00-08', 'NYC', 10, 10); -insert into conditions values ( '2018-11-02 10:30:00-08', 'NYC', 20, 15); -insert into conditions values ( '2018-11-02 11:40:00-08', 'NYC', null, null); -insert into conditions values ( '2018-11-03 09:50:00-08', 'NYC', null, null); -create table location_tab( locid integer, locname text ); -insert into location_tab values( 1, 'SFO'); -insert into location_tab values( 2, 'NYC'); -insert into location_tab values( 3, 'por'); -create materialized view mat_m1( location, timec, minl, sumt , sumh) -WITH (timescaledb.continuous, timescaledb.materialized_only=false) -as -select location, time_bucket('1day', timec), min(location), sum(temperature),sum(humidity) -from conditions -group by time_bucket('1day', timec), location WITH NO DATA; ---compute time_bucketted max+bucket_width for the materialized view -SELECT time_bucket('1day' , q.timeval+ '1day'::interval) -FROM ( select max(timec)as timeval from conditions ) as q; - time_bucket ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - -CALL refresh_continuous_aggregate('mat_m1', NULL, NULL); ---test first/last -create materialized view mat_m2(location, timec, firsth, lasth, maxtemp, mintemp) -WITH (timescaledb.continuous, timescaledb.materialized_only=false) -as -select location, time_bucket('1day', timec), first(humidity, timec), last(humidity, timec), max(temperature), min(temperature) -from conditions -group by time_bucket('1day', timec), location WITH NO DATA; ---time that refresh assumes as now() for repeatability -SELECT time_bucket('1day' , q.timeval+ '1day'::interval) -FROM ( select max(timec)as timeval from conditions ) as q; - time_bucket ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - -CALL refresh_continuous_aggregate('mat_m2', NULL, NULL); ---normal view -- -create or replace view regview( location, timec, minl, sumt , sumh) -as -select location, time_bucket('1day', timec), min(location), sum(temperature),sum(humidity) -from conditions -group by location, time_bucket('1day', timec); -set enable_hashagg = false; --- NO pushdown cases --- ---when we have addl. attrs in order by that are not in the --- group by, we will still need a sort -:EXPLAIN -select * from mat_m1 order by sumh, sumt, minl, timec ; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.sumh, _materialized_hypertable_2.sumt, _materialized_hypertable_2.minl, _materialized_hypertable_2.timec - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(31 rows) - -:EXPLAIN -select * from regview order by timec desc; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) DESC - -> GroupAggregate - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), min(_hyper_1_1_chunk.location), sum(_hyper_1_1_chunk.temperature), sum(_hyper_1_1_chunk.humidity) - Group Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Sort - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - Sort Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Result - Output: _hyper_1_1_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Append - -> Seq Scan on _timescaledb_internal._hyper_1_1_chunk - Output: _hyper_1_1_chunk.location, _hyper_1_1_chunk.timec, _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Seq Scan on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -(16 rows) - --- PUSHDOWN cases -- --- all group by elts in order by , reorder group by elts to match --- group by order --- This should prevent an additional sort after GroupAggregate -:EXPLAIN -select * from mat_m1 order by timec desc, location; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC, _materialized_hypertable_2.location - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(31 rows) - -:EXPLAIN -select * from mat_m1 order by location, timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.location, _materialized_hypertable_2.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(31 rows) - -:EXPLAIN -select * from mat_m1 order by location, timec asc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.location, _materialized_hypertable_2.timec - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(31 rows) - -:EXPLAIN -select * from mat_m1 where timec > '2018-10-01' order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(29 rows) - --- outer sort is used by mat_m1 for grouping. But doesn't avoid a sort after the join --- -:EXPLAIN -select l.locid, mat_m1.* from mat_m1 , location_tab l where timec > '2018-10-01' and l.locname = mat_m1.location order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: l.locid, _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC - -> Hash Join - Output: l.locid, _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Hash Cond: (l.locname = _materialized_hypertable_2.location) - -> Seq Scan on public.location_tab l - Output: l.locid, l.locname - -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(36 rows) - -:EXPLAIN -select * from mat_m2 where timec > '2018-10-01' order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.humidity, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(29 rows) - -:EXPLAIN -select * from (select * from mat_m2 where timec > '2018-10-01' order by timec desc ) as q limit 1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Limit - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - -> Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.humidity, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(31 rows) - -:EXPLAIN -select * from (select * from mat_m2 where timec > '2018-10-01' order by timec desc , location asc nulls first) as q limit 1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Limit - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - -> Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC, _materialized_hypertable_3.location NULLS FIRST - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.humidity, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(31 rows) - ---plans with CTE -:EXPLAIN -with m1 as ( -Select * from mat_m2 where timec > '2018-10-01' order by timec desc ) -select * from m1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.humidity, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(29 rows) - --- should reorder mat_m1 group by only based on mat_m1 order-by -:EXPLAIN -select * from mat_m1, mat_m2 where mat_m1.timec > '2018-10-01' and mat_m1.timec = mat_m2.timec order by mat_m1.timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_2.timec DESC - -> Hash Join - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Hash Cond: (_materialized_hypertable_3.timec = _materialized_hypertable_2.timec) - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_5_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_5_chunk - Output: _hyper_3_5_chunk.location, _hyper_3_5_chunk.timec, _hyper_3_5_chunk.firsth, _hyper_3_5_chunk.lasth, _hyper_3_5_chunk.maxtemp, _hyper_3_5_chunk.mintemp - Index Cond: (_hyper_3_5_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: (_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.humidity, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) - -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), min(conditions_1.location), sum(conditions_1.temperature), sum(conditions_1.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Sort - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.temperature, conditions_1.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Result - Output: conditions_1.location, time_bucket('@ 1 day'::interval, conditions_1.timec), conditions_1.temperature, conditions_1.humidity - -> Custom Scan (ChunkAppend) on public.conditions conditions_1 - Output: conditions_1.location, conditions_1.timec, conditions_1.temperature, conditions_1.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk _hyper_1_2_chunk_1 - Output: _hyper_1_2_chunk_1.location, _hyper_1_2_chunk_1.timec, _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity - Index Cond: ((_hyper_1_2_chunk_1.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk_1.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(62 rows) - ---should reorder only for mat_m1. -:EXPLAIN -select * from mat_m1, regview where mat_m1.timec > '2018-10-01' and mat_m1.timec = regview.timec order by mat_m1.timec desc; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Sort Key: _materialized_hypertable_2.timec DESC - -> Hash Join - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Hash Cond: ((time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) = _materialized_hypertable_2.timec) - -> GroupAggregate - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), min(_hyper_1_1_chunk.location), sum(_hyper_1_1_chunk.temperature), sum(_hyper_1_1_chunk.humidity) - Group Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Sort - Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - Sort Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) - -> Result - Output: _hyper_1_1_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec), _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Append - -> Seq Scan on _timescaledb_internal._hyper_1_1_chunk - Output: _hyper_1_1_chunk.location, _hyper_1_1_chunk.timec, _hyper_1_1_chunk.temperature, _hyper_1_1_chunk.humidity - -> Seq Scan on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - -> GroupAggregate - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), min(conditions_1.location), sum(conditions_1.temperature), sum(conditions_1.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Sort - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.temperature, conditions_1.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Result - Output: conditions_1.location, time_bucket('@ 1 day'::interval, conditions_1.timec), conditions_1.temperature, conditions_1.humidity - -> Custom Scan (ChunkAppend) on public.conditions conditions_1 - Output: conditions_1.location, conditions_1.timec, conditions_1.temperature, conditions_1.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk _hyper_1_2_chunk_1 - Output: _hyper_1_2_chunk_1.location, _hyper_1_2_chunk_1.timec, _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity - Index Cond: ((_hyper_1_2_chunk_1.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk_1.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(47 rows) - -select l.locid, mat_m1.* from mat_m1 , location_tab l where timec > '2018-10-01' and l.locname = mat_m1.location order by timec desc; - locid | location | timec | minl | sumt | sumh --------+----------+------------------------------+------+------+------ - 2 | NYC | Fri Nov 02 17:00:00 2018 PDT | NYC | | - 2 | NYC | Thu Nov 01 17:00:00 2018 PDT | NYC | 30 | 25 - 2 | NYC | Wed Oct 31 17:00:00 2018 PDT | NYC | 325 | 200 -(3 rows) - -\set ECHO none ----- Run the same queries with hash agg enabled now -set enable_hashagg = true; -\set ECHO none ---- Run the queries directly on the table now -set enable_hashagg = true; -\set ECHO none --- diff results view select and table select -:DIFF_CMD -:DIFF_CMD2 ---check if the guc works , reordering will not work -set timescaledb.enable_cagg_reorder_groupby = false; -set enable_hashagg = false; -:EXPLAIN -select * from mat_m1 order by timec desc, location; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- - Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC, _materialized_hypertable_2.location - -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk - Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) - -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Result - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, conditions.timec, conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 - -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk - Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(31 rows) - ------------------------------------------------------------------------ --- Test the cagg_watermark function. The watermark gives the point --- where to UNION raw and materialized data in real-time --- aggregation. Specifically, test that the watermark caching works as --- expected. ------------------------------------------------------------------------ --- Insert some more data so that there is something to UNION in --- real-time aggregation. -insert into conditions values ( '2018-12-02 20:10:00-08', 'SFO', 55, 45); -insert into conditions values ( '2018-12-02 21:20:00-08', 'SFO', 65, 45); -insert into conditions values ( '2018-12-02 20:30:00-08', 'NYC', 65, 45); -insert into conditions values ( '2018-12-02 21:50:00-08', 'NYC', 45, 30); --- Test join of two caggs. Joining two caggs will force the cache to --- reset every time the watermark function is invoked on a different --- cagg in the same query. -SELECT mat_hypertable_id AS mat_id, - raw_hypertable_id AS raw_id, - schema_name AS mat_schema, - table_name AS mat_name, - format('%I.%I', schema_name, table_name) AS mat_table -FROM _timescaledb_catalog.continuous_agg ca, _timescaledb_catalog.hypertable h -WHERE user_view_name='mat_m1' -AND h.id = ca.mat_hypertable_id \gset -BEGIN; --- Query without join -SELECT m1.location, m1.timec, sumt, sumh -FROM mat_m1 m1 -ORDER BY m1.location COLLATE "C", m1.timec DESC -LIMIT 10; - location | timec | sumt | sumh -----------+------------------------------+------+------ - NYC | Sun Dec 02 16:00:00 2018 PST | 110 | 75 - NYC | Fri Nov 02 17:00:00 2018 PDT | | - NYC | Thu Nov 01 17:00:00 2018 PDT | 30 | 25 - NYC | Wed Oct 31 17:00:00 2018 PDT | 325 | 200 - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 - SFO | Sun Dec 02 16:00:00 2018 PST | 120 | 90 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 -(9 rows) - --- Query that joins two caggs. This should force the watermark cache --- to reset when the materialized hypertable ID changes. A hash join --- could potentially read all values from mat_m1 then all values from --- mat_m2. This would be the optimal situation for cagg_watermark --- caching. We want to avoid it in tests to see that caching doesn't --- do anything wrong in worse situations (e.g., a nested loop join). -SET enable_hashjoin=false; -SELECT m1.location, m1.timec, sumt, sumh, firsth, lasth, maxtemp, mintemp -FROM mat_m1 m1 RIGHT JOIN mat_m2 m2 -ON (m1.location = m2.location -AND m1.timec = m2.timec) -ORDER BY m1.location COLLATE "C", m1.timec DESC -LIMIT 10; - location | timec | sumt | sumh | firsth | lasth | maxtemp | mintemp -----------+------------------------------+------+------+--------+-------+---------+--------- - NYC | Sun Dec 02 16:00:00 2018 PST | 110 | 75 | 45 | 30 | 65 | 45 - NYC | Fri Nov 02 17:00:00 2018 PDT | | | | | | - NYC | Thu Nov 01 17:00:00 2018 PDT | 30 | 25 | 10 | | 20 | 10 - NYC | Wed Oct 31 17:00:00 2018 PDT | 325 | 200 | 30 | 50 | 85 | 45 - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 02 16:00:00 2018 PST | 120 | 90 | 45 | 45 | 65 | 55 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 | 45 | 45 | 55 | 55 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 | 100 | 100 | 100 | 100 -(9 rows) - --- Show the current watermark -SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:mat_id)); - to_timestamp ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - --- The watermark should, in this case, be the same as the invalidation --- threshold -SELECT _timescaledb_functions.to_timestamp(watermark) -FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold -WHERE hypertable_id = :raw_id; - to_timestamp ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - --- The watermark is the end of materialization (end of last bucket) --- while the MAX is the start of the last bucket -SELECT max(timec) FROM :mat_table; - max ------------------------------- - Fri Nov 02 17:00:00 2018 PDT -(1 row) - --- Drop the most recent chunk -SELECT chunk_name, range_start, range_end -FROM timescaledb_information.chunks -WHERE hypertable_name = :'mat_name'; - chunk_name | range_start | range_end -------------------+------------------------------+------------------------------ - _hyper_2_3_chunk | Wed Nov 29 16:00:00 2017 PST | Wed Feb 07 16:00:00 2018 PST - _hyper_2_4_chunk | Wed Sep 05 17:00:00 2018 PDT | Wed Nov 14 16:00:00 2018 PST -(2 rows) - -SELECT drop_chunks('mat_m1', newer_than=>'2018-01-01'::timestamptz); - drop_chunks ----------------------------------------- - _timescaledb_internal._hyper_2_4_chunk -(1 row) - -SELECT chunk_name, range_start, range_end -FROM timescaledb_information.chunks -WHERE hypertable_name = :'mat_name'; - chunk_name | range_start | range_end -------------------+------------------------------+------------------------------ - _hyper_2_3_chunk | Wed Nov 29 16:00:00 2017 PST | Wed Feb 07 16:00:00 2018 PST -(1 row) - --- The watermark should be updated to reflect the dropped data (i.e., --- the cache should be reset) -SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:mat_id)); - to_timestamp ------------------------------- - Tue Jan 02 16:00:00 2018 PST -(1 row) - --- Since we removed the last chunk, the invalidation threshold doesn't --- move back, while the watermark does. -SELECT _timescaledb_functions.to_timestamp(watermark) -FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold -WHERE hypertable_id = :raw_id; - to_timestamp ------------------------------- - Sat Nov 03 17:00:00 2018 PDT -(1 row) - --- Compare the new watermark to the MAX time in the table -SELECT max(timec) FROM :mat_table; - max ------------------------------- - Mon Jan 01 16:00:00 2018 PST -(1 row) - --- Try a subtransaction -SAVEPOINT clear_cagg; -SELECT m1.location, m1.timec, sumt, sumh, firsth, lasth, maxtemp, mintemp -FROM mat_m1 m1 RIGHT JOIN mat_m2 m2 -ON (m1.location = m2.location -AND m1.timec = m2.timec) -ORDER BY m1.location COLLATE "C", m1.timec DESC -LIMIT 10; - location | timec | sumt | sumh | firsth | lasth | maxtemp | mintemp -----------+------------------------------+------+------+--------+-------+---------+--------- - NYC | Sun Dec 02 16:00:00 2018 PST | 110 | 75 | 45 | 30 | 65 | 45 - NYC | Fri Nov 02 17:00:00 2018 PDT | | | | | | - NYC | Thu Nov 01 17:00:00 2018 PDT | 30 | 25 | 10 | | 20 | 10 - NYC | Wed Oct 31 17:00:00 2018 PDT | 325 | 200 | 30 | 50 | 85 | 45 - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 02 16:00:00 2018 PST | 120 | 90 | 45 | 45 | 65 | 55 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 | 45 | 45 | 55 | 55 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 | 100 | 100 | 100 | 100 -(9 rows) - -ALTER MATERIALIZED VIEW mat_m1 SET (timescaledb.materialized_only=true); -SELECT m1.location, m1.timec, sumt, sumh, firsth, lasth, maxtemp, mintemp -FROM mat_m1 m1 RIGHT JOIN mat_m2 m2 -ON (m1.location = m2.location -AND m1.timec = m2.timec) -ORDER BY m1.location COLLATE "C" NULLS LAST, m1.timec DESC NULLS LAST, firsth NULLS LAST, - lasth NULLS LAST, mintemp NULLS LAST, maxtemp NULLS LAST -LIMIT 10; - location | timec | sumt | sumh | firsth | lasth | maxtemp | mintemp -----------+------------------------------+------+------+--------+-------+---------+--------- - NYC | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Mon Jan 01 16:00:00 2018 PST | 65 | 45 | 45 | 45 | 65 | 65 - SFO | Sun Dec 31 16:00:00 2017 PST | 55 | 45 | 45 | 45 | 55 | 55 - por | Mon Jan 01 16:00:00 2018 PST | 100 | 100 | 100 | 100 | 100 | 100 - | | | | 10 | | 20 | 10 - | | | | 30 | 50 | 85 | 45 - | | | | 45 | 30 | 65 | 45 - | | | | 45 | 45 | 65 | 55 - | | | | | | | -(9 rows) - -ROLLBACK; diff --git a/tsl/test/expected/cagg_query-13.out b/tsl/test/expected/cagg_query.out similarity index 51% rename from tsl/test/expected/cagg_query-13.out rename to tsl/test/expected/cagg_query.out index 734cbbc7e47..5983819948b 100644 --- a/tsl/test/expected/cagg_query-13.out +++ b/tsl/test/expected/cagg_query.out @@ -84,38 +84,31 @@ set enable_hashagg = false; -- group by, we will still need a sort :EXPLAIN select * from mat_m1 order by sumh, sumt, minl, timec ; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.sumh, _materialized_hypertable_2.sumt, _materialized_hypertable_2.minl, _materialized_hypertable_2.timec + Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh + Sort Key: _hyper_2_3_chunk.sumh, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.timec -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + -> Append -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_2_3_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(29 rows) + Index Cond: (_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) +(22 rows) :EXPLAIN select * from regview order by timec desc; @@ -145,398 +138,314 @@ select * from regview order by timec desc; -- This should prevent an additional sort after GroupAggregate :EXPLAIN select * from mat_m1 order by timec desc, location; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC, _materialized_hypertable_2.location + Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh + Sort Key: _hyper_2_3_chunk.timec DESC, _hyper_2_3_chunk.location -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + -> Append -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_2_3_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(29 rows) + Index Cond: (_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) +(22 rows) :EXPLAIN select * from mat_m1 order by location, timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.location, _materialized_hypertable_2.timec DESC + Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh + Sort Key: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec DESC -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + -> Append -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_2_3_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(29 rows) + Index Cond: (_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) +(22 rows) :EXPLAIN select * from mat_m1 order by location, timec asc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.location, _materialized_hypertable_2.timec + Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh + Sort Key: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + -> Append -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_2_3_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(29 rows) + Index Cond: (_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) +(22 rows) :EXPLAIN select * from mat_m1 where timec > '2018-10-01' order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh + Sort Key: _hyper_2_4_chunk.timec DESC -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh + Index Cond: ((_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(27 rows) +(19 rows) -- outer sort is used by mat_m1 for grouping. But doesn't avoid a sort after the join --- :EXPLAIN select l.locid, mat_m1.* from mat_m1 , location_tab l where timec > '2018-10-01' and l.locname = mat_m1.location order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Output: l.locid, _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC + Output: l.locid, _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh + Sort Key: _hyper_2_4_chunk.timec DESC -> Hash Join - Output: l.locid, _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Hash Cond: (l.locname = _materialized_hypertable_2.location) + Output: l.locid, _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh + Hash Cond: (l.locname = _hyper_2_4_chunk.location) -> Seq Scan on public.location_tab l Output: l.locid, l.locname -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh + Index Cond: ((_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(34 rows) +(26 rows) :EXPLAIN select * from mat_m2 where timec > '2018-10-01' order by timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp + Sort Key: _hyper_3_6_chunk.timec DESC -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp + Index Cond: ((_hyper_3_6_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), first(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), last(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), max(_hyper_1_2_chunk.temperature), min(_hyper_1_2_chunk.temperature) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(27 rows) +(19 rows) :EXPLAIN select * from (select * from mat_m2 where timec > '2018-10-01' order by timec desc ) as q limit 1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Limit - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp -> Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp + Sort Key: _hyper_3_6_chunk.timec DESC -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp + Index Cond: ((_hyper_3_6_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), first(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), last(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), max(_hyper_1_2_chunk.temperature), min(_hyper_1_2_chunk.temperature) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(29 rows) +(21 rows) :EXPLAIN select * from (select * from mat_m2 where timec > '2018-10-01' order by timec desc , location asc nulls first) as q limit 1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Limit - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp -> Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC, _materialized_hypertable_3.location NULLS FIRST + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp + Sort Key: _hyper_3_6_chunk.timec DESC, _hyper_3_6_chunk.location NULLS FIRST -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp + Index Cond: ((_hyper_3_6_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), first(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), last(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), max(_hyper_1_2_chunk.temperature), min(_hyper_1_2_chunk.temperature) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(29 rows) +(21 rows) --plans with CTE :EXPLAIN with m1 as ( Select * from mat_m2 where timec > '2018-10-01' order by timec desc ) select * from m1; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Sort - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_3.timec DESC + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp + Sort Key: _hyper_3_6_chunk.timec DESC -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk - Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: ((_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk + Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp + Index Cond: ((_hyper_3_6_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_3_6_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), first(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), last(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), max(_hyper_1_2_chunk.temperature), min(_hyper_1_2_chunk.temperature) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: ((_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Index Cond: ((_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(27 rows) +(19 rows) -- should reorder mat_m1 group by only based on mat_m1 order-by :EXPLAIN select * from mat_m1, mat_m2 where mat_m1.timec > '2018-10-01' and mat_m1.timec = mat_m2.timec order by mat_m1.timec desc; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Sort Key: _materialized_hypertable_2.timec DESC + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh, _hyper_3_5_chunk.location, _hyper_3_5_chunk.timec, _hyper_3_5_chunk.firsth, _hyper_3_5_chunk.lasth, _hyper_3_5_chunk.maxtemp, _hyper_3_5_chunk.mintemp + Sort Key: _hyper_2_4_chunk.timec DESC -> Hash Join - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Hash Cond: (_materialized_hypertable_3.timec = _materialized_hypertable_2.timec) + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh, _hyper_3_5_chunk.location, _hyper_3_5_chunk.timec, _hyper_3_5_chunk.firsth, _hyper_3_5_chunk.lasth, _hyper_3_5_chunk.maxtemp, _hyper_3_5_chunk.mintemp + Hash Cond: (_hyper_3_5_chunk.timec = _hyper_2_4_chunk.timec) -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_3 - Output: _materialized_hypertable_3.location, _materialized_hypertable_3.timec, _materialized_hypertable_3.firsth, _materialized_hypertable_3.lasth, _materialized_hypertable_3.maxtemp, _materialized_hypertable_3.mintemp - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + -> Append -> Index Scan using _hyper_3_5_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_5_chunk Output: _hyper_3_5_chunk.location, _hyper_3_5_chunk.timec, _hyper_3_5_chunk.firsth, _hyper_3_5_chunk.lasth, _hyper_3_5_chunk.maxtemp, _hyper_3_5_chunk.mintemp - Index Cond: (_hyper_3_5_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_3_5_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> Index Scan using _hyper_3_6_chunk__materialized_hypertable_3_timec_idx on _timescaledb_internal._hyper_3_6_chunk Output: _hyper_3_6_chunk.location, _hyper_3_6_chunk.timec, _hyper_3_6_chunk.firsth, _hyper_3_6_chunk.lasth, _hyper_3_6_chunk.maxtemp, _hyper_3_6_chunk.mintemp - Index Cond: (_hyper_3_6_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_3_6_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), first(conditions.humidity, conditions.timec), last(conditions.humidity, conditions.timec), max(conditions.temperature), min(conditions.temperature) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), first(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), last(_hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec), max(_hyper_1_2_chunk.temperature), min(_hyper_1_2_chunk.temperature) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.humidity, conditions.timec, conditions.temperature - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.humidity, conditions.timec, conditions.temperature - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.humidity, _hyper_1_2_chunk.temperature - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(3)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh + Index Cond: ((_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) -> GroupAggregate - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), min(conditions_1.location), sum(conditions_1.temperature), sum(conditions_1.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location + Output: _hyper_1_2_chunk_1.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec)), min(_hyper_1_2_chunk_1.location), sum(_hyper_1_2_chunk_1.temperature), sum(_hyper_1_2_chunk_1.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec)), _hyper_1_2_chunk_1.location -> Sort - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.temperature, conditions_1.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Custom Scan (ChunkAppend) on public.conditions conditions_1 - Output: conditions_1.location, time_bucket('@ 1 day'::interval, conditions_1.timec), conditions_1.temperature, conditions_1.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + Output: _hyper_1_2_chunk_1.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec)), _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec)), _hyper_1_2_chunk_1.location + -> Result + Output: _hyper_1_2_chunk_1.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec), _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk _hyper_1_2_chunk_1 Output: _hyper_1_2_chunk_1.location, _hyper_1_2_chunk_1.timec, _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity - Index Cond: ((_hyper_1_2_chunk_1.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk_1.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Index Cond: ((_hyper_1_2_chunk_1.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk_1.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(58 rows) +(43 rows) --should reorder only for mat_m1. :EXPLAIN select * from mat_m1, regview where mat_m1.timec > '2018-10-01' and mat_m1.timec = regview.timec order by mat_m1.timec desc; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Sort Key: _materialized_hypertable_2.timec DESC + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh, _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) + Sort Key: _hyper_2_4_chunk.timec DESC -> Hash Join - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh, _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) - Hash Cond: ((time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) = _materialized_hypertable_2.timec) + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh, _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), (min(_hyper_1_1_chunk.location)), (sum(_hyper_1_1_chunk.temperature)), (sum(_hyper_1_1_chunk.humidity)) + Hash Cond: ((time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) = _hyper_2_4_chunk.timec) -> GroupAggregate Output: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)), min(_hyper_1_1_chunk.location), sum(_hyper_1_1_chunk.temperature), sum(_hyper_1_1_chunk.humidity) Group Key: _hyper_1_1_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_1_chunk.timec)) @@ -551,32 +460,24 @@ select * from mat_m1, regview where mat_m1.timec > '2018-10-01' and mat_m1.timec -> Seq Scan on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -> Hash - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk - Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: ((_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk + Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh + Index Cond: ((_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_2_4_chunk.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) -> GroupAggregate - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), min(conditions_1.location), sum(conditions_1.temperature), sum(conditions_1.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location + Output: _hyper_1_2_chunk_1.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec)), min(_hyper_1_2_chunk_1.location), sum(_hyper_1_2_chunk_1.temperature), sum(_hyper_1_2_chunk_1.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec)), _hyper_1_2_chunk_1.location -> Sort - Output: conditions_1.location, (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.temperature, conditions_1.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions_1.timec)), conditions_1.location - -> Custom Scan (ChunkAppend) on public.conditions conditions_1 - Output: conditions_1.location, time_bucket('@ 1 day'::interval, conditions_1.timec), conditions_1.temperature, conditions_1.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + Output: _hyper_1_2_chunk_1.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec)), _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec)), _hyper_1_2_chunk_1.location + -> Result + Output: _hyper_1_2_chunk_1.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec), _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk _hyper_1_2_chunk_1 Output: _hyper_1_2_chunk_1.location, _hyper_1_2_chunk_1.timec, _hyper_1_2_chunk_1.temperature, _hyper_1_2_chunk_1.humidity - Index Cond: ((_hyper_1_2_chunk_1.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) AND (_hyper_1_2_chunk_1.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) + Index Cond: ((_hyper_1_2_chunk_1.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) AND (_hyper_1_2_chunk_1.timec > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone)) Filter: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk_1.timec) > 'Mon Oct 01 00:00:00 2018 PDT'::timestamp with time zone) -(45 rows) +(37 rows) select l.locid, mat_m1.* from mat_m1 , location_tab l where timec > '2018-10-01' and l.locname = mat_m1.location order by timec desc; locid | location | timec | minl | sumt | sumh @@ -601,38 +502,31 @@ set timescaledb.enable_cagg_reorder_groupby = false; set enable_hashagg = false; :EXPLAIN select * from mat_m1 order by timec desc, location; - QUERY PLAN --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Sort - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Sort Key: _materialized_hypertable_2.timec DESC, _materialized_hypertable_2.location + Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh + Sort Key: _hyper_2_3_chunk.timec DESC, _hyper_2_3_chunk.location -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_2 - Output: _materialized_hypertable_2.location, _materialized_hypertable_2.timec, _materialized_hypertable_2.minl, _materialized_hypertable_2.sumt, _materialized_hypertable_2.sumh - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 + -> Append -> Index Scan using _hyper_2_3_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_3_chunk Output: _hyper_2_3_chunk.location, _hyper_2_3_chunk.timec, _hyper_2_3_chunk.minl, _hyper_2_3_chunk.sumt, _hyper_2_3_chunk.sumh - Index Cond: (_hyper_2_3_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_2_3_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> Index Scan using _hyper_2_4_chunk__materialized_hypertable_2_timec_idx on _timescaledb_internal._hyper_2_4_chunk Output: _hyper_2_4_chunk.location, _hyper_2_4_chunk.timec, _hyper_2_4_chunk.minl, _hyper_2_4_chunk.sumt, _hyper_2_4_chunk.sumh - Index Cond: (_hyper_2_4_chunk.timec < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) + Index Cond: (_hyper_2_4_chunk.timec < 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) -> GroupAggregate - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), min(conditions.location), sum(conditions.temperature), sum(conditions.humidity) - Group Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), min(_hyper_1_2_chunk.location), sum(_hyper_1_2_chunk.temperature), sum(_hyper_1_2_chunk.humidity) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location -> Sort - Output: conditions.location, (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.temperature, conditions.humidity - Sort Key: (time_bucket('@ 1 day'::interval, conditions.timec)), conditions.location - -> Custom Scan (ChunkAppend) on public.conditions - Output: conditions.location, time_bucket('@ 1 day'::interval, conditions.timec), conditions.temperature, conditions.humidity - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 1 + Output: _hyper_1_2_chunk.location, (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity + Sort Key: (time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec)), _hyper_1_2_chunk.location + -> Result + Output: _hyper_1_2_chunk.location, time_bucket('@ 1 day'::interval, _hyper_1_2_chunk.timec), _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity -> Index Scan using _hyper_1_2_chunk_conditions_timec_idx on _timescaledb_internal._hyper_1_2_chunk Output: _hyper_1_2_chunk.location, _hyper_1_2_chunk.timec, _hyper_1_2_chunk.temperature, _hyper_1_2_chunk.humidity - Index Cond: (_hyper_1_2_chunk.timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(2)), '-infinity'::timestamp with time zone)) -(29 rows) + Index Cond: (_hyper_1_2_chunk.timec >= 'Sat Nov 03 17:00:00 2018 PDT'::timestamp with time zone) +(22 rows) ----------------------------------------------------------------------- -- Test the cagg_watermark function. The watermark gives the point diff --git a/tsl/test/expected/cagg_watermark-13.out b/tsl/test/expected/cagg_watermark-13.out new file mode 100644 index 00000000000..5cc58c7d1da --- /dev/null +++ b/tsl/test/expected/cagg_watermark-13.out @@ -0,0 +1,1271 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. +\set EXPLAIN_ANALYZE 'EXPLAIN (analyze,costs off,timing off,summary off)' +CREATE TABLE continuous_agg_test(time int, data int); +select create_hypertable('continuous_agg_test', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +---------------------------------- + (1,public,continuous_agg_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test1() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM continuous_agg_test $$; +SELECT set_integer_now_func('continuous_agg_test', 'integer_now_test1'); + set_integer_now_func +---------------------- + +(1 row) + +-- watermark tabels start out empty +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +-- inserting into a table that does not have continuous_agg_insert_trigger doesn't change the watermark +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +CREATE TABLE continuous_agg_test_mat(time int); +select create_hypertable('continuous_agg_test_mat', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +-------------------------------------- + (2,public,continuous_agg_test_mat,t) +(1 row) + +INSERT INTO _timescaledb_catalog.continuous_agg VALUES (2, 1, NULL, '','','','',0,'',''); +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- create the trigger +CREATE TRIGGER continuous_agg_insert_trigger + AFTER INSERT ON continuous_agg_test + FOR EACH ROW EXECUTE FUNCTION _timescaledb_functions.continuous_agg_invalidation_trigger(1); +-- inserting into the table still doesn't change the watermark since there's no +-- continuous_aggs_invalidation_threshold. We treat that case as a invalidation_watermark of +-- BIG_INT_MIN, since the first run of the aggregation will need to scan the +-- entire table anyway. +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +-- set the continuous_aggs_invalidation_threshold to 15, any insertions below that value need an invalidation +\c :TEST_DBNAME :ROLE_SUPERUSER +INSERT INTO _timescaledb_catalog.continuous_aggs_invalidation_threshold VALUES (1, 15); +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 +(1 row) + +-- INSERTs only above the continuous_aggs_invalidation_threshold won't change the continuous_aggs_hypertable_invalidation_log +INSERT INTO continuous_agg_test VALUES (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 +(1 row) + +-- INSERTs only below the continuous_aggs_invalidation_threshold will change the continuous_aggs_hypertable_invalidation_log +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 +(2 rows) + +-- test INSERTing other values +INSERT INTO continuous_agg_test VALUES (1, 7), (12, 6), (24, 5), (51, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 +(3 rows) + +-- INSERT after dropping a COLUMN +ALTER TABLE continuous_agg_test DROP COLUMN data; +INSERT INTO continuous_agg_test VALUES (-1), (-2), (-3), (-4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 +(4 rows) + +INSERT INTO continuous_agg_test VALUES (100); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 +(4 rows) + +-- INSERT after adding a COLUMN +ALTER TABLE continuous_agg_test ADD COLUMN d BOOLEAN; +INSERT INTO continuous_agg_test VALUES (-6, true), (-7, false), (-3, true), (-4, false); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 + 1 | -7 | -3 +(5 rows) + +INSERT INTO continuous_agg_test VALUES (120, false), (200, true); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 + 1 | -7 | -3 +(5 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +DELETE FROM _timescaledb_catalog.continuous_agg where mat_hypertable_id = 2; +DELETE FROM _timescaledb_config.bgw_job WHERE id = 2; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +DROP TABLE continuous_agg_test CASCADE; +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- CREATE VIEW creates the invalidation trigger correctly +CREATE TABLE ca_inval_test(time int); +SELECT create_hypertable('ca_inval_test', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +---------------------------- + (3,public,ca_inval_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test2() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ca_inval_test $$; +SELECT set_integer_now_func('ca_inval_test', 'integer_now_test2'); + set_integer_now_func +---------------------- + +(1 row) + +CREATE MATERIALIZED VIEW cit_view + WITH (timescaledb.continuous, timescaledb.materialized_only=false) + AS SELECT time_bucket('5', time), COUNT(time) + FROM ca_inval_test + GROUP BY 1 WITH NO DATA; +INSERT INTO ca_inval_test SELECT generate_series(0, 5); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+------------- + 3 | -2147483648 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold +SET watermark = 15 +WHERE hypertable_id = 3; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO ca_inval_test SELECT generate_series(5, 15); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 15 +(1 row) + +INSERT INTO ca_inval_test SELECT generate_series(16, 20); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 15 +(1 row) + +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- updates below the threshold update both the old and new values +UPDATE ca_inval_test SET time = 5 WHERE time = 6; +UPDATE ca_inval_test SET time = 7 WHERE time = 5; +UPDATE ca_inval_test SET time = 17 WHERE time = 14; +UPDATE ca_inval_test SET time = 12 WHERE time = 16; +-- updates purely above the threshold are not logged +UPDATE ca_inval_test SET time = 19 WHERE time = 18; +UPDATE ca_inval_test SET time = 17 WHERE time = 19; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 6 + 3 | 5 | 7 + 3 | 14 | 17 + 3 | 12 | 16 +(4 rows) + +DROP TABLE ca_inval_test CASCADE; +NOTICE: drop cascades to 3 other objects +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- invalidation trigger is created correctly on chunks that existed before +-- the view was created +CREATE TABLE ts_continuous_test(time INTEGER, location INTEGER); + SELECT create_hypertable('ts_continuous_test', 'time', chunk_time_interval => 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +--------------------------------- + (5,public,ts_continuous_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test3() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ts_continuous_test $$; +SELECT set_integer_now_func('ts_continuous_test', 'integer_now_test3'); + set_integer_now_func +---------------------- + +(1 row) + +INSERT INTO ts_continuous_test SELECT i, i FROM + (SELECT generate_series(0, 29) AS i) AS i; +CREATE MATERIALIZED VIEW continuous_view + WITH (timescaledb.continuous, timescaledb.materialized_only=false) + AS SELECT time_bucket('5', time), COUNT(location) + FROM ts_continuous_test + GROUP BY 1 WITH NO DATA; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+------------- + 5 | -2147483648 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold +SET watermark = 2 +WHERE hypertable_id = 5; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO ts_continuous_test VALUES (1, 1); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 5 | 2 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 5 | 1 | 1 +(1 row) + +-- aborts don't get written +BEGIN; + INSERT INTO ts_continuous_test VALUES (-20, -20); +ABORT; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 5 | 2 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 5 | 1 | 1 +(1 row) + +DROP TABLE ts_continuous_test CASCADE; +NOTICE: drop cascades to 3 other objects +---- +-- Test watermark invalidation and chunk exclusion with prepared and ad-hoc queries +---- +CREATE TABLE chunks(time timestamptz, device int, value float); +SELECT FROM create_hypertable('chunks','time',chunk_time_interval:='1d'::interval); +NOTICE: adding not-null constraint to column "time" +-- +(1 row) + +CREATE MATERIALIZED VIEW chunks_1h WITH (timescaledb.continuous) + AS SELECT time_bucket('1 hour', time) AS bucket, device, max(value) AS max FROM chunks GROUP BY 1, 2; +NOTICE: continuous aggregate "chunks_1h" is already up-to-date +ALTER MATERIALIZED VIEW chunks_1h set (timescaledb.materialized_only = false); +-- Get id fg the materialization hypertable +SELECT id AS "MAT_HT_ID_1H" FROM _timescaledb_catalog.hypertable + WHERE table_name=( + SELECT materialization_hypertable_name + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1h' + ) \gset +SELECT materialization_hypertable_schema || '.' || materialization_hypertable_name AS "MAT_HT_NAME_1H" + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1h' +\gset +-- Prepared scan on hypertable (identical to the query of a real-time CAgg) +PREPARE ht_scan_realtime_1h AS + SELECT bucket, device, max + FROM :MAT_HT_NAME_1H + WHERE bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) +UNION ALL + SELECT time_bucket('01:00:00'::interval, chunks."time") AS bucket, + chunks.device, + max(chunks.value) AS max + FROM chunks + WHERE chunks."time" >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) + GROUP BY (time_bucket('01:00:00'::interval, chunks."time")), chunks.device; +PREPARE cagg_scan_1h AS SELECT * FROM chunks_1h; +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +------------------------------------------------------------------------------ + HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, chunks."time"), chunks.device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(5 rows) + +INSERT INTO chunks VALUES ('1901-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 17:00:00 1901 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(9 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 +(1 row) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 +(1 row) + +-- Add new chunks to the non materialized part of the CAgg +INSERT INTO chunks VALUES ('1910-08-01 01:01:01+01', 1, 2); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(12 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(12 rows) + +INSERT INTO chunks VALUES ('1911-08-01 01:01:01+01', 1, 2); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(14 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(14 rows) + +-- Materialize CAgg and check for plan time chunk exclusion +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +-- Check plan when chunk_append and constraint_aware_append cannot be used +-- There should be no plans for scans of chunks that are materialized in the CAgg +-- on the underlying hypertable +SET timescaledb.enable_chunk_append = OFF; +SET timescaledb.enable_constraint_aware_append = OFF; +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +RESET timescaledb.enable_chunk_append; +RESET timescaledb.enable_constraint_aware_append; +-- Insert new values and check watermark changes +INSERT INTO chunks VALUES ('1920-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sat Jul 31 17:00:00 1920 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=4 loops=1) + -> Append (actual rows=4 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_22_chunk."time"), _hyper_7_22_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_22_chunk_chunks_time_idx on _hyper_7_22_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) +(16 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 +(4 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 +(4 rows) + +INSERT INTO chunks VALUES ('1930-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Thu Jul 31 17:00:00 1930 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=5 loops=1) + -> Append (actual rows=5 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_24_chunk."time"), _hyper_7_24_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_24_chunk_chunks_time_idx on _hyper_7_24_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) +(18 rows) + +-- Two invalidations without prepared statement execution between +INSERT INTO chunks VALUES ('1931-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +INSERT INTO chunks VALUES ('1932-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sun Jul 31 17:00:00 1932 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +-- Multiple prepared statement executions followed by one invalidation +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +INSERT INTO chunks VALUES ('1940-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=8 loops=1) + -> Append (actual rows=8 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_30_chunk."time"), _hyper_7_30_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_30_chunk_chunks_time_idx on _hyper_7_30_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) +(24 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +-- Delete data from hypertable - data is only present in cagg after this point. If the watermark in the prepared +-- statement is not moved to the most-recent watermark, we would see an empty result. +TRUNCATE chunks; +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +-- Refresh the CAgg +CALL refresh_continuous_aggregate('chunks_1h', NULL, NULL); +EXECUTE cagg_scan_1h; + bucket | device | max +--------+--------+----- +(0 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +--------+--------+----- +(0 rows) + +-- Check new watermark +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 17:00:00 1940 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=0 loops=1) + -> Append (actual rows=0 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(23 rows) + +-- Update after truncate +INSERT INTO chunks VALUES ('1950-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Mon Jul 31 17:00:00 1950 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_32_chunk."time"), _hyper_7_32_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_32_chunk_chunks_time_idx on _hyper_7_32_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) +(26 rows) + +-- Test with CAgg on CAgg +CREATE MATERIALIZED VIEW chunks_1d WITH (timescaledb.continuous) + AS SELECT time_bucket('1 days', bucket) AS bucket, device, max(max) AS max FROM chunks_1h GROUP BY 1, 2; +NOTICE: refreshing continuous aggregate "chunks_1d" +ALTER MATERIALIZED VIEW chunks_1d set (timescaledb.materialized_only = false); +SELECT id AS "MAT_HT_ID_1D" FROM _timescaledb_catalog.hypertable + WHERE table_name=( + SELECT materialization_hypertable_name + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1d' + ) \gset +SELECT materialization_hypertable_schema || '.' || materialization_hypertable_name AS "MAT_HT_NAME_1D" + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1d' +\gset +-- Prepared scan on hypertable (identical to the query of a real-time CAgg) +PREPARE ht_scan_realtime_1d AS + SELECT bucket, device, max + FROM :MAT_HT_NAME_1D + WHERE bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1D)), '-infinity'::timestamp with time zone) +UNION ALL + SELECT time_bucket('@ 1 day'::interval, chunks_1h.bucket) AS bucket, + chunks_1h.device, + max(chunks_1h.max) AS max + FROM chunks_1h + WHERE chunks_1h.bucket >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1D)), '-infinity'::timestamp with time zone) + GROUP BY (time_bucket('@ 1 day'::interval, chunks_1h.bucket)), chunks_1h.device; +PREPARE cagg_scan_1d AS SELECT * FROM chunks_1d; +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 16:00:00 1950 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(12 rows) + +INSERT INTO chunks VALUES ('2000-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +CALL refresh_continuous_aggregate('chunks_1d', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 17:00:00 2000 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_37_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_37_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 17:00:00 2000 PDT'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(15 rows) + +INSERT INTO chunks VALUES ('2010-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +CALL refresh_continuous_aggregate('chunks_1d', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_37_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_37_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_40_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_40_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(17 rows) + +-- Stored procedure - watermark +CREATE FUNCTION cur_watermark_plsql(mat_table int) RETURNS timestamptz +AS $$ +DECLARE +cur_watermark_value timestamptz; +BEGIN + SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(mat_table)) INTO cur_watermark_value; + RETURN cur_watermark_value; +END$$ LANGUAGE plpgsql; +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Sat Jul 31 18:00:00 2010 PDT +(1 row) + +INSERT INTO chunks VALUES ('2011-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Sun Jul 31 18:00:00 2011 PDT +(1 row) + +INSERT INTO chunks VALUES ('2012-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Tue Jul 31 18:00:00 2012 PDT +(1 row) + +-- Stored procedure - result +CREATE FUNCTION cur_cagg_result_count() RETURNS int +AS $$ +DECLARE +count_value int; +BEGIN + SELECT count(*) FROM chunks_1h INTO count_value; + RETURN count_value; +END$$ LANGUAGE plpgsql; +-- Cache function value +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 5 +(1 row) + +-- Add to non-materialized part +INSERT INTO chunks VALUES ('2013-08-01 01:01:01+01', 1, 2); +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +-- Materialize +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +-- Ensure all elements are materialized (i.e., watermark is moved properly) +TRUNCATE chunks; +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +SELECT count(*) FROM chunks_1h; + count +------- + 6 +(1 row) + +-- Test watermark call directly +PREPARE watermark_query AS + SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 18:00:00 2013 PDT +(1 row) + +EXECUTE watermark_query; + to_timestamp +------------------------------ + Wed Jul 31 18:00:00 2013 PDT +(1 row) + +INSERT INTO chunks VALUES ('2013-09-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sat Aug 31 18:00:00 2013 PDT +(1 row) + +EXECUTE watermark_query; + to_timestamp +------------------------------ + Sat Aug 31 18:00:00 2013 PDT +(1 row) + +-- Disable constification of watermark values +SET timescaledb.enable_cagg_watermark_constify = OFF; +INSERT INTO chunks VALUES ('2014-01-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_8 (actual rows=2 loops=1) + Chunks excluded during startup: 0 + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_36_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_36_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_39_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_39_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_42_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_42_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_44_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_44_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_46_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_46_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_48_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_48_chunk (actual rows=1 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_50_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_50_chunk (actual rows=1 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, chunks."time"), chunks.device + Batches: 1 + -> Custom Scan (ChunkAppend) on chunks (actual rows=0 loops=1) + Chunks excluded during startup: 1 + -> Index Scan using _hyper_7_49_chunk_chunks_time_idx on _hyper_7_49_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) +(42 rows) + +RESET timescaledb.enable_cagg_watermark_constify; +-- Select with projection +INSERT INTO chunks VALUES ('2015-01-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE SELECT device FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Subquery Scan on "*SELECT* 1" (actual rows=3 loops=1) + -> Result (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_36_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_36_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_39_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_39_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_42_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_42_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_44_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_44_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_46_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_46_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_48_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_48_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_50_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_50_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_52_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_52_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Subquery Scan on "*SELECT* 2" (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_51_chunk."time"), _hyper_7_51_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_51_chunk_chunks_time_idx on _hyper_7_51_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) +(45 rows) + +-- Watermark function use other tables in WHERE condition (should not be constified) +CREATE TABLE continuous_agg_test(time int, data int); +:EXPLAIN_ANALYZE SELECT * FROM continuous_agg_test AS t1, continuous_agg_test AS t2 WHERE COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) IS NOT NULL; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=0 loops=1) + One-Time Filter: (COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone) IS NOT NULL) + -> Nested Loop (actual rows=0 loops=1) + -> Seq Scan on continuous_agg_test t1 (actual rows=0 loops=1) + -> Materialize (never executed) + -> Seq Scan on continuous_agg_test t2 (never executed) +(6 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER diff --git a/tsl/test/expected/cagg_watermark-14.out b/tsl/test/expected/cagg_watermark-14.out new file mode 100644 index 00000000000..5cc58c7d1da --- /dev/null +++ b/tsl/test/expected/cagg_watermark-14.out @@ -0,0 +1,1271 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. +\set EXPLAIN_ANALYZE 'EXPLAIN (analyze,costs off,timing off,summary off)' +CREATE TABLE continuous_agg_test(time int, data int); +select create_hypertable('continuous_agg_test', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +---------------------------------- + (1,public,continuous_agg_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test1() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM continuous_agg_test $$; +SELECT set_integer_now_func('continuous_agg_test', 'integer_now_test1'); + set_integer_now_func +---------------------- + +(1 row) + +-- watermark tabels start out empty +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +-- inserting into a table that does not have continuous_agg_insert_trigger doesn't change the watermark +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +CREATE TABLE continuous_agg_test_mat(time int); +select create_hypertable('continuous_agg_test_mat', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +-------------------------------------- + (2,public,continuous_agg_test_mat,t) +(1 row) + +INSERT INTO _timescaledb_catalog.continuous_agg VALUES (2, 1, NULL, '','','','',0,'',''); +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- create the trigger +CREATE TRIGGER continuous_agg_insert_trigger + AFTER INSERT ON continuous_agg_test + FOR EACH ROW EXECUTE FUNCTION _timescaledb_functions.continuous_agg_invalidation_trigger(1); +-- inserting into the table still doesn't change the watermark since there's no +-- continuous_aggs_invalidation_threshold. We treat that case as a invalidation_watermark of +-- BIG_INT_MIN, since the first run of the aggregation will need to scan the +-- entire table anyway. +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +-- set the continuous_aggs_invalidation_threshold to 15, any insertions below that value need an invalidation +\c :TEST_DBNAME :ROLE_SUPERUSER +INSERT INTO _timescaledb_catalog.continuous_aggs_invalidation_threshold VALUES (1, 15); +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 +(1 row) + +-- INSERTs only above the continuous_aggs_invalidation_threshold won't change the continuous_aggs_hypertable_invalidation_log +INSERT INTO continuous_agg_test VALUES (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 +(1 row) + +-- INSERTs only below the continuous_aggs_invalidation_threshold will change the continuous_aggs_hypertable_invalidation_log +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 +(2 rows) + +-- test INSERTing other values +INSERT INTO continuous_agg_test VALUES (1, 7), (12, 6), (24, 5), (51, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 +(3 rows) + +-- INSERT after dropping a COLUMN +ALTER TABLE continuous_agg_test DROP COLUMN data; +INSERT INTO continuous_agg_test VALUES (-1), (-2), (-3), (-4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 +(4 rows) + +INSERT INTO continuous_agg_test VALUES (100); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 +(4 rows) + +-- INSERT after adding a COLUMN +ALTER TABLE continuous_agg_test ADD COLUMN d BOOLEAN; +INSERT INTO continuous_agg_test VALUES (-6, true), (-7, false), (-3, true), (-4, false); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 + 1 | -7 | -3 +(5 rows) + +INSERT INTO continuous_agg_test VALUES (120, false), (200, true); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 + 1 | -7 | -3 +(5 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +DELETE FROM _timescaledb_catalog.continuous_agg where mat_hypertable_id = 2; +DELETE FROM _timescaledb_config.bgw_job WHERE id = 2; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +DROP TABLE continuous_agg_test CASCADE; +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- CREATE VIEW creates the invalidation trigger correctly +CREATE TABLE ca_inval_test(time int); +SELECT create_hypertable('ca_inval_test', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +---------------------------- + (3,public,ca_inval_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test2() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ca_inval_test $$; +SELECT set_integer_now_func('ca_inval_test', 'integer_now_test2'); + set_integer_now_func +---------------------- + +(1 row) + +CREATE MATERIALIZED VIEW cit_view + WITH (timescaledb.continuous, timescaledb.materialized_only=false) + AS SELECT time_bucket('5', time), COUNT(time) + FROM ca_inval_test + GROUP BY 1 WITH NO DATA; +INSERT INTO ca_inval_test SELECT generate_series(0, 5); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+------------- + 3 | -2147483648 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold +SET watermark = 15 +WHERE hypertable_id = 3; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO ca_inval_test SELECT generate_series(5, 15); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 15 +(1 row) + +INSERT INTO ca_inval_test SELECT generate_series(16, 20); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 15 +(1 row) + +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- updates below the threshold update both the old and new values +UPDATE ca_inval_test SET time = 5 WHERE time = 6; +UPDATE ca_inval_test SET time = 7 WHERE time = 5; +UPDATE ca_inval_test SET time = 17 WHERE time = 14; +UPDATE ca_inval_test SET time = 12 WHERE time = 16; +-- updates purely above the threshold are not logged +UPDATE ca_inval_test SET time = 19 WHERE time = 18; +UPDATE ca_inval_test SET time = 17 WHERE time = 19; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 6 + 3 | 5 | 7 + 3 | 14 | 17 + 3 | 12 | 16 +(4 rows) + +DROP TABLE ca_inval_test CASCADE; +NOTICE: drop cascades to 3 other objects +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- invalidation trigger is created correctly on chunks that existed before +-- the view was created +CREATE TABLE ts_continuous_test(time INTEGER, location INTEGER); + SELECT create_hypertable('ts_continuous_test', 'time', chunk_time_interval => 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +--------------------------------- + (5,public,ts_continuous_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test3() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ts_continuous_test $$; +SELECT set_integer_now_func('ts_continuous_test', 'integer_now_test3'); + set_integer_now_func +---------------------- + +(1 row) + +INSERT INTO ts_continuous_test SELECT i, i FROM + (SELECT generate_series(0, 29) AS i) AS i; +CREATE MATERIALIZED VIEW continuous_view + WITH (timescaledb.continuous, timescaledb.materialized_only=false) + AS SELECT time_bucket('5', time), COUNT(location) + FROM ts_continuous_test + GROUP BY 1 WITH NO DATA; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+------------- + 5 | -2147483648 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold +SET watermark = 2 +WHERE hypertable_id = 5; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO ts_continuous_test VALUES (1, 1); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 5 | 2 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 5 | 1 | 1 +(1 row) + +-- aborts don't get written +BEGIN; + INSERT INTO ts_continuous_test VALUES (-20, -20); +ABORT; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 5 | 2 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 5 | 1 | 1 +(1 row) + +DROP TABLE ts_continuous_test CASCADE; +NOTICE: drop cascades to 3 other objects +---- +-- Test watermark invalidation and chunk exclusion with prepared and ad-hoc queries +---- +CREATE TABLE chunks(time timestamptz, device int, value float); +SELECT FROM create_hypertable('chunks','time',chunk_time_interval:='1d'::interval); +NOTICE: adding not-null constraint to column "time" +-- +(1 row) + +CREATE MATERIALIZED VIEW chunks_1h WITH (timescaledb.continuous) + AS SELECT time_bucket('1 hour', time) AS bucket, device, max(value) AS max FROM chunks GROUP BY 1, 2; +NOTICE: continuous aggregate "chunks_1h" is already up-to-date +ALTER MATERIALIZED VIEW chunks_1h set (timescaledb.materialized_only = false); +-- Get id fg the materialization hypertable +SELECT id AS "MAT_HT_ID_1H" FROM _timescaledb_catalog.hypertable + WHERE table_name=( + SELECT materialization_hypertable_name + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1h' + ) \gset +SELECT materialization_hypertable_schema || '.' || materialization_hypertable_name AS "MAT_HT_NAME_1H" + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1h' +\gset +-- Prepared scan on hypertable (identical to the query of a real-time CAgg) +PREPARE ht_scan_realtime_1h AS + SELECT bucket, device, max + FROM :MAT_HT_NAME_1H + WHERE bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) +UNION ALL + SELECT time_bucket('01:00:00'::interval, chunks."time") AS bucket, + chunks.device, + max(chunks.value) AS max + FROM chunks + WHERE chunks."time" >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) + GROUP BY (time_bucket('01:00:00'::interval, chunks."time")), chunks.device; +PREPARE cagg_scan_1h AS SELECT * FROM chunks_1h; +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +------------------------------------------------------------------------------ + HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, chunks."time"), chunks.device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(5 rows) + +INSERT INTO chunks VALUES ('1901-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 17:00:00 1901 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(9 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 +(1 row) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 +(1 row) + +-- Add new chunks to the non materialized part of the CAgg +INSERT INTO chunks VALUES ('1910-08-01 01:01:01+01', 1, 2); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(12 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(12 rows) + +INSERT INTO chunks VALUES ('1911-08-01 01:01:01+01', 1, 2); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(14 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(14 rows) + +-- Materialize CAgg and check for plan time chunk exclusion +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +-- Check plan when chunk_append and constraint_aware_append cannot be used +-- There should be no plans for scans of chunks that are materialized in the CAgg +-- on the underlying hypertable +SET timescaledb.enable_chunk_append = OFF; +SET timescaledb.enable_constraint_aware_append = OFF; +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +RESET timescaledb.enable_chunk_append; +RESET timescaledb.enable_constraint_aware_append; +-- Insert new values and check watermark changes +INSERT INTO chunks VALUES ('1920-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sat Jul 31 17:00:00 1920 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=4 loops=1) + -> Append (actual rows=4 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_22_chunk."time"), _hyper_7_22_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_22_chunk_chunks_time_idx on _hyper_7_22_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) +(16 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 +(4 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 +(4 rows) + +INSERT INTO chunks VALUES ('1930-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Thu Jul 31 17:00:00 1930 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=5 loops=1) + -> Append (actual rows=5 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_24_chunk."time"), _hyper_7_24_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_24_chunk_chunks_time_idx on _hyper_7_24_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) +(18 rows) + +-- Two invalidations without prepared statement execution between +INSERT INTO chunks VALUES ('1931-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +INSERT INTO chunks VALUES ('1932-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sun Jul 31 17:00:00 1932 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +-- Multiple prepared statement executions followed by one invalidation +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +INSERT INTO chunks VALUES ('1940-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=8 loops=1) + -> Append (actual rows=8 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_30_chunk."time"), _hyper_7_30_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_30_chunk_chunks_time_idx on _hyper_7_30_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) +(24 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +-- Delete data from hypertable - data is only present in cagg after this point. If the watermark in the prepared +-- statement is not moved to the most-recent watermark, we would see an empty result. +TRUNCATE chunks; +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +-- Refresh the CAgg +CALL refresh_continuous_aggregate('chunks_1h', NULL, NULL); +EXECUTE cagg_scan_1h; + bucket | device | max +--------+--------+----- +(0 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +--------+--------+----- +(0 rows) + +-- Check new watermark +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 17:00:00 1940 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=0 loops=1) + -> Append (actual rows=0 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(23 rows) + +-- Update after truncate +INSERT INTO chunks VALUES ('1950-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Mon Jul 31 17:00:00 1950 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_32_chunk."time"), _hyper_7_32_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_32_chunk_chunks_time_idx on _hyper_7_32_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) +(26 rows) + +-- Test with CAgg on CAgg +CREATE MATERIALIZED VIEW chunks_1d WITH (timescaledb.continuous) + AS SELECT time_bucket('1 days', bucket) AS bucket, device, max(max) AS max FROM chunks_1h GROUP BY 1, 2; +NOTICE: refreshing continuous aggregate "chunks_1d" +ALTER MATERIALIZED VIEW chunks_1d set (timescaledb.materialized_only = false); +SELECT id AS "MAT_HT_ID_1D" FROM _timescaledb_catalog.hypertable + WHERE table_name=( + SELECT materialization_hypertable_name + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1d' + ) \gset +SELECT materialization_hypertable_schema || '.' || materialization_hypertable_name AS "MAT_HT_NAME_1D" + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1d' +\gset +-- Prepared scan on hypertable (identical to the query of a real-time CAgg) +PREPARE ht_scan_realtime_1d AS + SELECT bucket, device, max + FROM :MAT_HT_NAME_1D + WHERE bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1D)), '-infinity'::timestamp with time zone) +UNION ALL + SELECT time_bucket('@ 1 day'::interval, chunks_1h.bucket) AS bucket, + chunks_1h.device, + max(chunks_1h.max) AS max + FROM chunks_1h + WHERE chunks_1h.bucket >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1D)), '-infinity'::timestamp with time zone) + GROUP BY (time_bucket('@ 1 day'::interval, chunks_1h.bucket)), chunks_1h.device; +PREPARE cagg_scan_1d AS SELECT * FROM chunks_1d; +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 16:00:00 1950 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(12 rows) + +INSERT INTO chunks VALUES ('2000-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +CALL refresh_continuous_aggregate('chunks_1d', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 17:00:00 2000 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_37_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_37_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 17:00:00 2000 PDT'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(15 rows) + +INSERT INTO chunks VALUES ('2010-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +CALL refresh_continuous_aggregate('chunks_1d', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_37_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_37_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_40_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_40_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(17 rows) + +-- Stored procedure - watermark +CREATE FUNCTION cur_watermark_plsql(mat_table int) RETURNS timestamptz +AS $$ +DECLARE +cur_watermark_value timestamptz; +BEGIN + SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(mat_table)) INTO cur_watermark_value; + RETURN cur_watermark_value; +END$$ LANGUAGE plpgsql; +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Sat Jul 31 18:00:00 2010 PDT +(1 row) + +INSERT INTO chunks VALUES ('2011-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Sun Jul 31 18:00:00 2011 PDT +(1 row) + +INSERT INTO chunks VALUES ('2012-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Tue Jul 31 18:00:00 2012 PDT +(1 row) + +-- Stored procedure - result +CREATE FUNCTION cur_cagg_result_count() RETURNS int +AS $$ +DECLARE +count_value int; +BEGIN + SELECT count(*) FROM chunks_1h INTO count_value; + RETURN count_value; +END$$ LANGUAGE plpgsql; +-- Cache function value +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 5 +(1 row) + +-- Add to non-materialized part +INSERT INTO chunks VALUES ('2013-08-01 01:01:01+01', 1, 2); +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +-- Materialize +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +-- Ensure all elements are materialized (i.e., watermark is moved properly) +TRUNCATE chunks; +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +SELECT count(*) FROM chunks_1h; + count +------- + 6 +(1 row) + +-- Test watermark call directly +PREPARE watermark_query AS + SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 18:00:00 2013 PDT +(1 row) + +EXECUTE watermark_query; + to_timestamp +------------------------------ + Wed Jul 31 18:00:00 2013 PDT +(1 row) + +INSERT INTO chunks VALUES ('2013-09-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sat Aug 31 18:00:00 2013 PDT +(1 row) + +EXECUTE watermark_query; + to_timestamp +------------------------------ + Sat Aug 31 18:00:00 2013 PDT +(1 row) + +-- Disable constification of watermark values +SET timescaledb.enable_cagg_watermark_constify = OFF; +INSERT INTO chunks VALUES ('2014-01-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_8 (actual rows=2 loops=1) + Chunks excluded during startup: 0 + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_36_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_36_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_39_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_39_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_42_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_42_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_44_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_44_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_46_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_46_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_48_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_48_chunk (actual rows=1 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_50_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_50_chunk (actual rows=1 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, chunks."time"), chunks.device + Batches: 1 + -> Custom Scan (ChunkAppend) on chunks (actual rows=0 loops=1) + Chunks excluded during startup: 1 + -> Index Scan using _hyper_7_49_chunk_chunks_time_idx on _hyper_7_49_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) +(42 rows) + +RESET timescaledb.enable_cagg_watermark_constify; +-- Select with projection +INSERT INTO chunks VALUES ('2015-01-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE SELECT device FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Subquery Scan on "*SELECT* 1" (actual rows=3 loops=1) + -> Result (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_36_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_36_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_39_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_39_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_42_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_42_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_44_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_44_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_46_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_46_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_48_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_48_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_50_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_50_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_52_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_52_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Subquery Scan on "*SELECT* 2" (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_51_chunk."time"), _hyper_7_51_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_51_chunk_chunks_time_idx on _hyper_7_51_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) +(45 rows) + +-- Watermark function use other tables in WHERE condition (should not be constified) +CREATE TABLE continuous_agg_test(time int, data int); +:EXPLAIN_ANALYZE SELECT * FROM continuous_agg_test AS t1, continuous_agg_test AS t2 WHERE COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) IS NOT NULL; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=0 loops=1) + One-Time Filter: (COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone) IS NOT NULL) + -> Nested Loop (actual rows=0 loops=1) + -> Seq Scan on continuous_agg_test t1 (actual rows=0 loops=1) + -> Materialize (never executed) + -> Seq Scan on continuous_agg_test t2 (never executed) +(6 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER diff --git a/tsl/test/expected/cagg_watermark-15.out b/tsl/test/expected/cagg_watermark-15.out new file mode 100644 index 00000000000..44d2ec004e5 --- /dev/null +++ b/tsl/test/expected/cagg_watermark-15.out @@ -0,0 +1,1272 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. +\set EXPLAIN_ANALYZE 'EXPLAIN (analyze,costs off,timing off,summary off)' +CREATE TABLE continuous_agg_test(time int, data int); +select create_hypertable('continuous_agg_test', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +---------------------------------- + (1,public,continuous_agg_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test1() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM continuous_agg_test $$; +SELECT set_integer_now_func('continuous_agg_test', 'integer_now_test1'); + set_integer_now_func +---------------------- + +(1 row) + +-- watermark tabels start out empty +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +-- inserting into a table that does not have continuous_agg_insert_trigger doesn't change the watermark +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +CREATE TABLE continuous_agg_test_mat(time int); +select create_hypertable('continuous_agg_test_mat', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +-------------------------------------- + (2,public,continuous_agg_test_mat,t) +(1 row) + +INSERT INTO _timescaledb_catalog.continuous_agg VALUES (2, 1, NULL, '','','','',0,'',''); +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- create the trigger +CREATE TRIGGER continuous_agg_insert_trigger + AFTER INSERT ON continuous_agg_test + FOR EACH ROW EXECUTE FUNCTION _timescaledb_functions.continuous_agg_invalidation_trigger(1); +-- inserting into the table still doesn't change the watermark since there's no +-- continuous_aggs_invalidation_threshold. We treat that case as a invalidation_watermark of +-- BIG_INT_MIN, since the first run of the aggregation will need to scan the +-- entire table anyway. +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +-- set the continuous_aggs_invalidation_threshold to 15, any insertions below that value need an invalidation +\c :TEST_DBNAME :ROLE_SUPERUSER +INSERT INTO _timescaledb_catalog.continuous_aggs_invalidation_threshold VALUES (1, 15); +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 +(1 row) + +-- INSERTs only above the continuous_aggs_invalidation_threshold won't change the continuous_aggs_hypertable_invalidation_log +INSERT INTO continuous_agg_test VALUES (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 +(1 row) + +-- INSERTs only below the continuous_aggs_invalidation_threshold will change the continuous_aggs_hypertable_invalidation_log +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 +(2 rows) + +-- test INSERTing other values +INSERT INTO continuous_agg_test VALUES (1, 7), (12, 6), (24, 5), (51, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 +(3 rows) + +-- INSERT after dropping a COLUMN +ALTER TABLE continuous_agg_test DROP COLUMN data; +INSERT INTO continuous_agg_test VALUES (-1), (-2), (-3), (-4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 +(4 rows) + +INSERT INTO continuous_agg_test VALUES (100); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 +(4 rows) + +-- INSERT after adding a COLUMN +ALTER TABLE continuous_agg_test ADD COLUMN d BOOLEAN; +INSERT INTO continuous_agg_test VALUES (-6, true), (-7, false), (-3, true), (-4, false); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 + 1 | -7 | -3 +(5 rows) + +INSERT INTO continuous_agg_test VALUES (120, false), (200, true); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 + 1 | -7 | -3 +(5 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +DELETE FROM _timescaledb_catalog.continuous_agg where mat_hypertable_id = 2; +DELETE FROM _timescaledb_config.bgw_job WHERE id = 2; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +DROP TABLE continuous_agg_test CASCADE; +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- CREATE VIEW creates the invalidation trigger correctly +CREATE TABLE ca_inval_test(time int); +SELECT create_hypertable('ca_inval_test', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +---------------------------- + (3,public,ca_inval_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test2() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ca_inval_test $$; +SELECT set_integer_now_func('ca_inval_test', 'integer_now_test2'); + set_integer_now_func +---------------------- + +(1 row) + +CREATE MATERIALIZED VIEW cit_view + WITH (timescaledb.continuous, timescaledb.materialized_only=false) + AS SELECT time_bucket('5', time), COUNT(time) + FROM ca_inval_test + GROUP BY 1 WITH NO DATA; +INSERT INTO ca_inval_test SELECT generate_series(0, 5); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+------------- + 3 | -2147483648 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold +SET watermark = 15 +WHERE hypertable_id = 3; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO ca_inval_test SELECT generate_series(5, 15); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 15 +(1 row) + +INSERT INTO ca_inval_test SELECT generate_series(16, 20); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 15 +(1 row) + +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- updates below the threshold update both the old and new values +UPDATE ca_inval_test SET time = 5 WHERE time = 6; +UPDATE ca_inval_test SET time = 7 WHERE time = 5; +UPDATE ca_inval_test SET time = 17 WHERE time = 14; +UPDATE ca_inval_test SET time = 12 WHERE time = 16; +-- updates purely above the threshold are not logged +UPDATE ca_inval_test SET time = 19 WHERE time = 18; +UPDATE ca_inval_test SET time = 17 WHERE time = 19; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 6 + 3 | 5 | 7 + 3 | 14 | 17 + 3 | 12 | 16 +(4 rows) + +DROP TABLE ca_inval_test CASCADE; +NOTICE: drop cascades to 3 other objects +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- invalidation trigger is created correctly on chunks that existed before +-- the view was created +CREATE TABLE ts_continuous_test(time INTEGER, location INTEGER); + SELECT create_hypertable('ts_continuous_test', 'time', chunk_time_interval => 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +--------------------------------- + (5,public,ts_continuous_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test3() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ts_continuous_test $$; +SELECT set_integer_now_func('ts_continuous_test', 'integer_now_test3'); + set_integer_now_func +---------------------- + +(1 row) + +INSERT INTO ts_continuous_test SELECT i, i FROM + (SELECT generate_series(0, 29) AS i) AS i; +CREATE MATERIALIZED VIEW continuous_view + WITH (timescaledb.continuous, timescaledb.materialized_only=false) + AS SELECT time_bucket('5', time), COUNT(location) + FROM ts_continuous_test + GROUP BY 1 WITH NO DATA; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+------------- + 5 | -2147483648 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold +SET watermark = 2 +WHERE hypertable_id = 5; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO ts_continuous_test VALUES (1, 1); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 5 | 2 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 5 | 1 | 1 +(1 row) + +-- aborts don't get written +BEGIN; + INSERT INTO ts_continuous_test VALUES (-20, -20); +ABORT; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 5 | 2 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 5 | 1 | 1 +(1 row) + +DROP TABLE ts_continuous_test CASCADE; +NOTICE: drop cascades to 3 other objects +---- +-- Test watermark invalidation and chunk exclusion with prepared and ad-hoc queries +---- +CREATE TABLE chunks(time timestamptz, device int, value float); +SELECT FROM create_hypertable('chunks','time',chunk_time_interval:='1d'::interval); +NOTICE: adding not-null constraint to column "time" +-- +(1 row) + +CREATE MATERIALIZED VIEW chunks_1h WITH (timescaledb.continuous) + AS SELECT time_bucket('1 hour', time) AS bucket, device, max(value) AS max FROM chunks GROUP BY 1, 2; +NOTICE: continuous aggregate "chunks_1h" is already up-to-date +ALTER MATERIALIZED VIEW chunks_1h set (timescaledb.materialized_only = false); +-- Get id fg the materialization hypertable +SELECT id AS "MAT_HT_ID_1H" FROM _timescaledb_catalog.hypertable + WHERE table_name=( + SELECT materialization_hypertable_name + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1h' + ) \gset +SELECT materialization_hypertable_schema || '.' || materialization_hypertable_name AS "MAT_HT_NAME_1H" + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1h' +\gset +-- Prepared scan on hypertable (identical to the query of a real-time CAgg) +PREPARE ht_scan_realtime_1h AS + SELECT bucket, device, max + FROM :MAT_HT_NAME_1H + WHERE bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) +UNION ALL + SELECT time_bucket('01:00:00'::interval, chunks."time") AS bucket, + chunks.device, + max(chunks.value) AS max + FROM chunks + WHERE chunks."time" >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) + GROUP BY (time_bucket('01:00:00'::interval, chunks."time")), chunks.device; +PREPARE cagg_scan_1h AS SELECT * FROM chunks_1h; +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +------------------------------------------------------------------------------ + HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, chunks."time"), chunks.device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(5 rows) + +INSERT INTO chunks VALUES ('1901-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 17:00:00 1901 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(9 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 +(1 row) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 +(1 row) + +-- Add new chunks to the non materialized part of the CAgg +INSERT INTO chunks VALUES ('1910-08-01 01:01:01+01', 1, 2); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(12 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(12 rows) + +INSERT INTO chunks VALUES ('1911-08-01 01:01:01+01', 1, 2); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(14 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(14 rows) + +-- Materialize CAgg and check for plan time chunk exclusion +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +-- Check plan when chunk_append and constraint_aware_append cannot be used +-- There should be no plans for scans of chunks that are materialized in the CAgg +-- on the underlying hypertable +SET timescaledb.enable_chunk_append = OFF; +SET timescaledb.enable_constraint_aware_append = OFF; +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +RESET timescaledb.enable_chunk_append; +RESET timescaledb.enable_constraint_aware_append; +-- Insert new values and check watermark changes +INSERT INTO chunks VALUES ('1920-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sat Jul 31 17:00:00 1920 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=4 loops=1) + -> Append (actual rows=4 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_22_chunk."time"), _hyper_7_22_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_22_chunk_chunks_time_idx on _hyper_7_22_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) +(16 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 +(4 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 +(4 rows) + +INSERT INTO chunks VALUES ('1930-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Thu Jul 31 17:00:00 1930 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=5 loops=1) + -> Append (actual rows=5 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_24_chunk."time"), _hyper_7_24_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_24_chunk_chunks_time_idx on _hyper_7_24_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) +(18 rows) + +-- Two invalidations without prepared statement execution between +INSERT INTO chunks VALUES ('1931-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +INSERT INTO chunks VALUES ('1932-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sun Jul 31 17:00:00 1932 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +-- Multiple prepared statement executions followed by one invalidation +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +INSERT INTO chunks VALUES ('1940-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=8 loops=1) + -> Append (actual rows=8 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_30_chunk."time"), _hyper_7_30_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_30_chunk_chunks_time_idx on _hyper_7_30_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) +(24 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +-- Delete data from hypertable - data is only present in cagg after this point. If the watermark in the prepared +-- statement is not moved to the most-recent watermark, we would see an empty result. +TRUNCATE chunks; +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +-- Refresh the CAgg +CALL refresh_continuous_aggregate('chunks_1h', NULL, NULL); +EXECUTE cagg_scan_1h; + bucket | device | max +--------+--------+----- +(0 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +--------+--------+----- +(0 rows) + +-- Check new watermark +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 17:00:00 1940 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=0 loops=1) + -> Append (actual rows=0 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(23 rows) + +-- Update after truncate +INSERT INTO chunks VALUES ('1950-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Mon Jul 31 17:00:00 1950 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_32_chunk."time"), _hyper_7_32_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_32_chunk_chunks_time_idx on _hyper_7_32_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) +(26 rows) + +-- Test with CAgg on CAgg +CREATE MATERIALIZED VIEW chunks_1d WITH (timescaledb.continuous) + AS SELECT time_bucket('1 days', bucket) AS bucket, device, max(max) AS max FROM chunks_1h GROUP BY 1, 2; +NOTICE: refreshing continuous aggregate "chunks_1d" +ALTER MATERIALIZED VIEW chunks_1d set (timescaledb.materialized_only = false); +SELECT id AS "MAT_HT_ID_1D" FROM _timescaledb_catalog.hypertable + WHERE table_name=( + SELECT materialization_hypertable_name + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1d' + ) \gset +SELECT materialization_hypertable_schema || '.' || materialization_hypertable_name AS "MAT_HT_NAME_1D" + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1d' +\gset +-- Prepared scan on hypertable (identical to the query of a real-time CAgg) +PREPARE ht_scan_realtime_1d AS + SELECT bucket, device, max + FROM :MAT_HT_NAME_1D + WHERE bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1D)), '-infinity'::timestamp with time zone) +UNION ALL + SELECT time_bucket('@ 1 day'::interval, chunks_1h.bucket) AS bucket, + chunks_1h.device, + max(chunks_1h.max) AS max + FROM chunks_1h + WHERE chunks_1h.bucket >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1D)), '-infinity'::timestamp with time zone) + GROUP BY (time_bucket('@ 1 day'::interval, chunks_1h.bucket)), chunks_1h.device; +PREPARE cagg_scan_1d AS SELECT * FROM chunks_1d; +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 16:00:00 1950 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(12 rows) + +INSERT INTO chunks VALUES ('2000-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +CALL refresh_continuous_aggregate('chunks_1d', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 17:00:00 2000 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_37_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_37_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 17:00:00 2000 PDT'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(15 rows) + +INSERT INTO chunks VALUES ('2010-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +CALL refresh_continuous_aggregate('chunks_1d', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_37_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_37_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_40_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_40_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(17 rows) + +-- Stored procedure - watermark +CREATE FUNCTION cur_watermark_plsql(mat_table int) RETURNS timestamptz +AS $$ +DECLARE +cur_watermark_value timestamptz; +BEGIN + SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(mat_table)) INTO cur_watermark_value; + RETURN cur_watermark_value; +END$$ LANGUAGE plpgsql; +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Sat Jul 31 18:00:00 2010 PDT +(1 row) + +INSERT INTO chunks VALUES ('2011-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Sun Jul 31 18:00:00 2011 PDT +(1 row) + +INSERT INTO chunks VALUES ('2012-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Tue Jul 31 18:00:00 2012 PDT +(1 row) + +-- Stored procedure - result +CREATE FUNCTION cur_cagg_result_count() RETURNS int +AS $$ +DECLARE +count_value int; +BEGIN + SELECT count(*) FROM chunks_1h INTO count_value; + RETURN count_value; +END$$ LANGUAGE plpgsql; +-- Cache function value +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 5 +(1 row) + +-- Add to non-materialized part +INSERT INTO chunks VALUES ('2013-08-01 01:01:01+01', 1, 2); +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +-- Materialize +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +-- Ensure all elements are materialized (i.e., watermark is moved properly) +TRUNCATE chunks; +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +SELECT count(*) FROM chunks_1h; + count +------- + 6 +(1 row) + +-- Test watermark call directly +PREPARE watermark_query AS + SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 18:00:00 2013 PDT +(1 row) + +EXECUTE watermark_query; + to_timestamp +------------------------------ + Wed Jul 31 18:00:00 2013 PDT +(1 row) + +INSERT INTO chunks VALUES ('2013-09-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sat Aug 31 18:00:00 2013 PDT +(1 row) + +EXECUTE watermark_query; + to_timestamp +------------------------------ + Sat Aug 31 18:00:00 2013 PDT +(1 row) + +-- Disable constification of watermark values +SET timescaledb.enable_cagg_watermark_constify = OFF; +INSERT INTO chunks VALUES ('2014-01-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_8 (actual rows=2 loops=1) + Chunks excluded during startup: 0 + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_36_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_36_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_39_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_39_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_42_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_42_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_44_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_44_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_46_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_46_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_48_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_48_chunk (actual rows=1 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_50_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_50_chunk (actual rows=1 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, chunks."time"), chunks.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Custom Scan (ChunkAppend) on chunks (actual rows=0 loops=1) + Chunks excluded during startup: 1 + -> Index Scan using _hyper_7_49_chunk_chunks_time_idx on _hyper_7_49_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) +(43 rows) + +RESET timescaledb.enable_cagg_watermark_constify; +-- Select with projection +INSERT INTO chunks VALUES ('2015-01-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE SELECT device FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Subquery Scan on "*SELECT* 1" (actual rows=3 loops=1) + -> Result (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_36_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_36_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_39_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_39_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_42_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_42_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_44_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_44_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_46_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_46_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_48_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_48_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_50_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_50_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_52_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_52_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Subquery Scan on "*SELECT* 2" (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_51_chunk."time"), _hyper_7_51_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_51_chunk_chunks_time_idx on _hyper_7_51_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) +(45 rows) + +-- Watermark function use other tables in WHERE condition (should not be constified) +CREATE TABLE continuous_agg_test(time int, data int); +:EXPLAIN_ANALYZE SELECT * FROM continuous_agg_test AS t1, continuous_agg_test AS t2 WHERE COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) IS NOT NULL; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=0 loops=1) + One-Time Filter: (COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone) IS NOT NULL) + -> Nested Loop (actual rows=0 loops=1) + -> Seq Scan on continuous_agg_test t1 (actual rows=0 loops=1) + -> Materialize (never executed) + -> Seq Scan on continuous_agg_test t2 (never executed) +(6 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER diff --git a/tsl/test/expected/cagg_watermark-16.out b/tsl/test/expected/cagg_watermark-16.out new file mode 100644 index 00000000000..44d2ec004e5 --- /dev/null +++ b/tsl/test/expected/cagg_watermark-16.out @@ -0,0 +1,1272 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. +\set EXPLAIN_ANALYZE 'EXPLAIN (analyze,costs off,timing off,summary off)' +CREATE TABLE continuous_agg_test(time int, data int); +select create_hypertable('continuous_agg_test', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +---------------------------------- + (1,public,continuous_agg_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test1() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM continuous_agg_test $$; +SELECT set_integer_now_func('continuous_agg_test', 'integer_now_test1'); + set_integer_now_func +---------------------- + +(1 row) + +-- watermark tabels start out empty +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +-- inserting into a table that does not have continuous_agg_insert_trigger doesn't change the watermark +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +CREATE TABLE continuous_agg_test_mat(time int); +select create_hypertable('continuous_agg_test_mat', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +-------------------------------------- + (2,public,continuous_agg_test_mat,t) +(1 row) + +INSERT INTO _timescaledb_catalog.continuous_agg VALUES (2, 1, NULL, '','','','',0,'',''); +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- create the trigger +CREATE TRIGGER continuous_agg_insert_trigger + AFTER INSERT ON continuous_agg_test + FOR EACH ROW EXECUTE FUNCTION _timescaledb_functions.continuous_agg_invalidation_trigger(1); +-- inserting into the table still doesn't change the watermark since there's no +-- continuous_aggs_invalidation_threshold. We treat that case as a invalidation_watermark of +-- BIG_INT_MIN, since the first run of the aggregation will need to scan the +-- entire table anyway. +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- +(0 rows) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +-- set the continuous_aggs_invalidation_threshold to 15, any insertions below that value need an invalidation +\c :TEST_DBNAME :ROLE_SUPERUSER +INSERT INTO _timescaledb_catalog.continuous_aggs_invalidation_threshold VALUES (1, 15); +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 +(1 row) + +-- INSERTs only above the continuous_aggs_invalidation_threshold won't change the continuous_aggs_hypertable_invalidation_log +INSERT INTO continuous_agg_test VALUES (21, 3), (22, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 +(1 row) + +-- INSERTs only below the continuous_aggs_invalidation_threshold will change the continuous_aggs_hypertable_invalidation_log +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 +(2 rows) + +-- test INSERTing other values +INSERT INTO continuous_agg_test VALUES (1, 7), (12, 6), (24, 5), (51, 4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 +(3 rows) + +-- INSERT after dropping a COLUMN +ALTER TABLE continuous_agg_test DROP COLUMN data; +INSERT INTO continuous_agg_test VALUES (-1), (-2), (-3), (-4); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 +(4 rows) + +INSERT INTO continuous_agg_test VALUES (100); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 +(4 rows) + +-- INSERT after adding a COLUMN +ALTER TABLE continuous_agg_test ADD COLUMN d BOOLEAN; +INSERT INTO continuous_agg_test VALUES (-6, true), (-7, false), (-3, true), (-4, false); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 + 1 | -7 | -3 +(5 rows) + +INSERT INTO continuous_agg_test VALUES (120, false), (200, true); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 1 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 1 | 10 | 22 + 1 | 10 | 11 + 1 | 1 | 51 + 1 | -4 | -1 + 1 | -7 | -3 +(5 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +DELETE FROM _timescaledb_catalog.continuous_agg where mat_hypertable_id = 2; +DELETE FROM _timescaledb_config.bgw_job WHERE id = 2; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +DROP TABLE continuous_agg_test CASCADE; +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- CREATE VIEW creates the invalidation trigger correctly +CREATE TABLE ca_inval_test(time int); +SELECT create_hypertable('ca_inval_test', 'time', chunk_time_interval=> 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +---------------------------- + (3,public,ca_inval_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test2() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ca_inval_test $$; +SELECT set_integer_now_func('ca_inval_test', 'integer_now_test2'); + set_integer_now_func +---------------------- + +(1 row) + +CREATE MATERIALIZED VIEW cit_view + WITH (timescaledb.continuous, timescaledb.materialized_only=false) + AS SELECT time_bucket('5', time), COUNT(time) + FROM ca_inval_test + GROUP BY 1 WITH NO DATA; +INSERT INTO ca_inval_test SELECT generate_series(0, 5); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+------------- + 3 | -2147483648 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold +SET watermark = 15 +WHERE hypertable_id = 3; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO ca_inval_test SELECT generate_series(5, 15); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 15 +(1 row) + +INSERT INTO ca_inval_test SELECT generate_series(16, 20); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 15 +(1 row) + +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- updates below the threshold update both the old and new values +UPDATE ca_inval_test SET time = 5 WHERE time = 6; +UPDATE ca_inval_test SET time = 7 WHERE time = 5; +UPDATE ca_inval_test SET time = 17 WHERE time = 14; +UPDATE ca_inval_test SET time = 12 WHERE time = 16; +-- updates purely above the threshold are not logged +UPDATE ca_inval_test SET time = 19 WHERE time = 18; +UPDATE ca_inval_test SET time = 17 WHERE time = 19; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 3 | 15 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 3 | 5 | 6 + 3 | 5 | 7 + 3 | 14 | 17 + 3 | 12 | 16 +(4 rows) + +DROP TABLE ca_inval_test CASCADE; +NOTICE: drop cascades to 3 other objects +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +-- invalidation trigger is created correctly on chunks that existed before +-- the view was created +CREATE TABLE ts_continuous_test(time INTEGER, location INTEGER); + SELECT create_hypertable('ts_continuous_test', 'time', chunk_time_interval => 10); +NOTICE: adding not-null constraint to column "time" + create_hypertable +--------------------------------- + (5,public,ts_continuous_test,t) +(1 row) + +CREATE OR REPLACE FUNCTION integer_now_test3() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ts_continuous_test $$; +SELECT set_integer_now_func('ts_continuous_test', 'integer_now_test3'); + set_integer_now_func +---------------------- + +(1 row) + +INSERT INTO ts_continuous_test SELECT i, i FROM + (SELECT generate_series(0, 29) AS i) AS i; +CREATE MATERIALIZED VIEW continuous_view + WITH (timescaledb.continuous, timescaledb.materialized_only=false) + AS SELECT time_bucket('5', time), COUNT(location) + FROM ts_continuous_test + GROUP BY 1 WITH NO DATA; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+------------- + 5 | -2147483648 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- +(0 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold +SET watermark = 2 +WHERE hypertable_id = 5; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER +INSERT INTO ts_continuous_test VALUES (1, 1); +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 5 | 2 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 5 | 1 | 1 +(1 row) + +-- aborts don't get written +BEGIN; + INSERT INTO ts_continuous_test VALUES (-20, -20); +ABORT; +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; + hypertable_id | watermark +---------------+----------- + 5 | 2 +(1 row) + +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + hypertable_id | lowest_modified_value | greatest_modified_value +---------------+-----------------------+------------------------- + 5 | 1 | 1 +(1 row) + +DROP TABLE ts_continuous_test CASCADE; +NOTICE: drop cascades to 3 other objects +---- +-- Test watermark invalidation and chunk exclusion with prepared and ad-hoc queries +---- +CREATE TABLE chunks(time timestamptz, device int, value float); +SELECT FROM create_hypertable('chunks','time',chunk_time_interval:='1d'::interval); +NOTICE: adding not-null constraint to column "time" +-- +(1 row) + +CREATE MATERIALIZED VIEW chunks_1h WITH (timescaledb.continuous) + AS SELECT time_bucket('1 hour', time) AS bucket, device, max(value) AS max FROM chunks GROUP BY 1, 2; +NOTICE: continuous aggregate "chunks_1h" is already up-to-date +ALTER MATERIALIZED VIEW chunks_1h set (timescaledb.materialized_only = false); +-- Get id fg the materialization hypertable +SELECT id AS "MAT_HT_ID_1H" FROM _timescaledb_catalog.hypertable + WHERE table_name=( + SELECT materialization_hypertable_name + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1h' + ) \gset +SELECT materialization_hypertable_schema || '.' || materialization_hypertable_name AS "MAT_HT_NAME_1H" + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1h' +\gset +-- Prepared scan on hypertable (identical to the query of a real-time CAgg) +PREPARE ht_scan_realtime_1h AS + SELECT bucket, device, max + FROM :MAT_HT_NAME_1H + WHERE bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) +UNION ALL + SELECT time_bucket('01:00:00'::interval, chunks."time") AS bucket, + chunks.device, + max(chunks.value) AS max + FROM chunks + WHERE chunks."time" >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) + GROUP BY (time_bucket('01:00:00'::interval, chunks."time")), chunks.device; +PREPARE cagg_scan_1h AS SELECT * FROM chunks_1h; +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +------------------------------------------------------------------------------ + HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, chunks."time"), chunks.device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(5 rows) + +INSERT INTO chunks VALUES ('1901-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 17:00:00 1901 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(9 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 +(1 row) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 +(1 row) + +-- Add new chunks to the non materialized part of the CAgg +INSERT INTO chunks VALUES ('1910-08-01 01:01:01+01', 1, 2); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(12 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=1 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(12 rows) + +INSERT INTO chunks VALUES ('1911-08-01 01:01:01+01', 1, 2); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(14 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> HashAggregate (actual rows=2 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_16_chunk."time"), _hyper_7_16_chunk.device + Batches: 1 + -> Result (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_7_16_chunk_chunks_time_idx on _hyper_7_16_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_18_chunk_chunks_time_idx on _hyper_7_18_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=1 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1901 PST'::timestamp with time zone) +(14 rows) + +-- Materialize CAgg and check for plan time chunk exclusion +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +-- Check plan when chunk_append and constraint_aware_append cannot be used +-- There should be no plans for scans of chunks that are materialized in the CAgg +-- on the underlying hypertable +SET timescaledb.enable_chunk_append = OFF; +SET timescaledb.enable_constraint_aware_append = OFF; +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_19_chunk."time"), _hyper_7_19_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_19_chunk_chunks_time_idx on _hyper_7_19_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1911 PST'::timestamp with time zone) +(14 rows) + +RESET timescaledb.enable_chunk_append; +RESET timescaledb.enable_constraint_aware_append; +-- Insert new values and check watermark changes +INSERT INTO chunks VALUES ('1920-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sat Jul 31 17:00:00 1920 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=4 loops=1) + -> Append (actual rows=4 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_22_chunk."time"), _hyper_7_22_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_22_chunk_chunks_time_idx on _hyper_7_22_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sat Jul 31 17:00:00 1920 PST'::timestamp with time zone) +(16 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 +(4 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 +(4 rows) + +INSERT INTO chunks VALUES ('1930-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Thu Jul 31 17:00:00 1930 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=5 loops=1) + -> Append (actual rows=5 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_24_chunk."time"), _hyper_7_24_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_24_chunk_chunks_time_idx on _hyper_7_24_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Thu Jul 31 17:00:00 1930 PST'::timestamp with time zone) +(18 rows) + +-- Two invalidations without prepared statement execution between +INSERT INTO chunks VALUES ('1931-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +INSERT INTO chunks VALUES ('1932-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sun Jul 31 17:00:00 1932 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +-- Multiple prepared statement executions followed by one invalidation +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=7 loops=1) + -> Append (actual rows=7 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_28_chunk."time"), _hyper_7_28_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_28_chunk_chunks_time_idx on _hyper_7_28_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Sun Jul 31 17:00:00 1932 PST'::timestamp with time zone) +(22 rows) + +INSERT INTO chunks VALUES ('1940-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=8 loops=1) + -> Append (actual rows=8 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_30_chunk."time"), _hyper_7_30_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_30_chunk_chunks_time_idx on _hyper_7_30_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) +(24 rows) + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +-- Delete data from hypertable - data is only present in cagg after this point. If the watermark in the prepared +-- statement is not moved to the most-recent watermark, we would see an empty result. +TRUNCATE chunks; +EXECUTE cagg_scan_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +------------------------------+--------+----- + Wed Jul 31 16:00:00 1901 PST | 1 | 2 + Sun Jul 31 16:00:00 1910 PST | 1 | 2 + Mon Jul 31 16:00:00 1911 PST | 1 | 2 + Sat Jul 31 16:00:00 1920 PST | 1 | 2 + Thu Jul 31 16:00:00 1930 PST | 1 | 2 + Fri Jul 31 16:00:00 1931 PST | 1 | 2 + Sun Jul 31 16:00:00 1932 PST | 1 | 2 + Wed Jul 31 16:00:00 1940 PST | 1 | 2 +(8 rows) + +-- Refresh the CAgg +CALL refresh_continuous_aggregate('chunks_1h', NULL, NULL); +EXECUTE cagg_scan_1h; + bucket | device | max +--------+--------+----- +(0 rows) + +SELECT * FROM chunks_1h; + bucket | device | max +--------+--------+----- +(0 rows) + +-- Check new watermark +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 17:00:00 1940 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=0 loops=1) + -> Append (actual rows=0 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Jul 31 17:00:00 1940 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(23 rows) + +-- Update after truncate +INSERT INTO chunks VALUES ('1950-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Mon Jul 31 17:00:00 1950 PST +(1 row) + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Append (actual rows=1 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_32_chunk."time"), _hyper_7_32_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_32_chunk_chunks_time_idx on _hyper_7_32_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Mon Jul 31 17:00:00 1950 PST'::timestamp with time zone) +(26 rows) + +-- Test with CAgg on CAgg +CREATE MATERIALIZED VIEW chunks_1d WITH (timescaledb.continuous) + AS SELECT time_bucket('1 days', bucket) AS bucket, device, max(max) AS max FROM chunks_1h GROUP BY 1, 2; +NOTICE: refreshing continuous aggregate "chunks_1d" +ALTER MATERIALIZED VIEW chunks_1d set (timescaledb.materialized_only = false); +SELECT id AS "MAT_HT_ID_1D" FROM _timescaledb_catalog.hypertable + WHERE table_name=( + SELECT materialization_hypertable_name + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1d' + ) \gset +SELECT materialization_hypertable_schema || '.' || materialization_hypertable_name AS "MAT_HT_NAME_1D" + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1d' +\gset +-- Prepared scan on hypertable (identical to the query of a real-time CAgg) +PREPARE ht_scan_realtime_1d AS + SELECT bucket, device, max + FROM :MAT_HT_NAME_1D + WHERE bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1D)), '-infinity'::timestamp with time zone) +UNION ALL + SELECT time_bucket('@ 1 day'::interval, chunks_1h.bucket) AS bucket, + chunks_1h.device, + max(chunks_1h.max) AS max + FROM chunks_1h + WHERE chunks_1h.bucket >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1D)), '-infinity'::timestamp with time zone) + GROUP BY (time_bucket('@ 1 day'::interval, chunks_1h.bucket)), chunks_1h.device; +PREPARE cagg_scan_1d AS SELECT * FROM chunks_1d; +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=1 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 16:00:00 1950 PST'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(12 rows) + +INSERT INTO chunks VALUES ('2000-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +CALL refresh_continuous_aggregate('chunks_1d', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Append (actual rows=2 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 17:00:00 2000 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_37_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_37_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Tue Aug 01 17:00:00 2000 PDT'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(15 rows) + +INSERT INTO chunks VALUES ('2010-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +CALL refresh_continuous_aggregate('chunks_1d', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_9_34_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_34_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_37_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_37_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> Index Scan using _hyper_9_40_chunk__materialized_hypertable_9_bucket_idx on _hyper_9_40_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Sun Aug 01 17:00:00 2010 PDT'::timestamp with time zone) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 day'::interval, (time_bucket('@ 1 hour'::interval, "time"))), device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, "time"), device + Batches: 1 + -> Result (actual rows=0 loops=1) + One-Time Filter: false +(17 rows) + +-- Stored procedure - watermark +CREATE FUNCTION cur_watermark_plsql(mat_table int) RETURNS timestamptz +AS $$ +DECLARE +cur_watermark_value timestamptz; +BEGIN + SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(mat_table)) INTO cur_watermark_value; + RETURN cur_watermark_value; +END$$ LANGUAGE plpgsql; +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Sat Jul 31 18:00:00 2010 PDT +(1 row) + +INSERT INTO chunks VALUES ('2011-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Sun Jul 31 18:00:00 2011 PDT +(1 row) + +INSERT INTO chunks VALUES ('2012-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + cur_watermark_plsql +------------------------------ + Tue Jul 31 18:00:00 2012 PDT +(1 row) + +-- Stored procedure - result +CREATE FUNCTION cur_cagg_result_count() RETURNS int +AS $$ +DECLARE +count_value int; +BEGIN + SELECT count(*) FROM chunks_1h INTO count_value; + RETURN count_value; +END$$ LANGUAGE plpgsql; +-- Cache function value +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 5 +(1 row) + +-- Add to non-materialized part +INSERT INTO chunks VALUES ('2013-08-01 01:01:01+01', 1, 2); +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +-- Materialize +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +-- Ensure all elements are materialized (i.e., watermark is moved properly) +TRUNCATE chunks; +SELECT * FROM cur_cagg_result_count(); + cur_cagg_result_count +----------------------- + 6 +(1 row) + +SELECT count(*) FROM chunks_1h; + count +------- + 6 +(1 row) + +-- Test watermark call directly +PREPARE watermark_query AS + SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Wed Jul 31 18:00:00 2013 PDT +(1 row) + +EXECUTE watermark_query; + to_timestamp +------------------------------ + Wed Jul 31 18:00:00 2013 PDT +(1 row) + +INSERT INTO chunks VALUES ('2013-09-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + to_timestamp +------------------------------ + Sat Aug 31 18:00:00 2013 PDT +(1 row) + +EXECUTE watermark_query; + to_timestamp +------------------------------ + Sat Aug 31 18:00:00 2013 PDT +(1 row) + +-- Disable constification of watermark values +SET timescaledb.enable_cagg_watermark_constify = OFF; +INSERT INTO chunks VALUES ('2014-01-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + QUERY PLAN +---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=2 loops=1) + -> Custom Scan (ChunkAppend) on _materialized_hypertable_8 (actual rows=2 loops=1) + Chunks excluded during startup: 0 + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_36_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_36_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_39_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_39_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_42_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_42_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_44_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_44_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_46_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_46_chunk (actual rows=0 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_48_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_48_chunk (actual rows=1 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> Index Scan using _hyper_8_50_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_50_chunk (actual rows=1 loops=1) + Index Cond: (bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, chunks."time"), chunks.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Custom Scan (ChunkAppend) on chunks (actual rows=0 loops=1) + Chunks excluded during startup: 1 + -> Index Scan using _hyper_7_49_chunk_chunks_time_idx on _hyper_7_49_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone)) +(43 rows) + +RESET timescaledb.enable_cagg_watermark_constify; +-- Select with projection +INSERT INTO chunks VALUES ('2015-01-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE SELECT device FROM chunks_1h; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------- + Append (actual rows=3 loops=1) + -> Subquery Scan on "*SELECT* 1" (actual rows=3 loops=1) + -> Result (actual rows=3 loops=1) + -> Append (actual rows=3 loops=1) + -> Index Scan using _hyper_8_17_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_17_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_20_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_20_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_21_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_21_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_23_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_23_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_25_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_25_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_27_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_27_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_29_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_29_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_31_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_31_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_33_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_33_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_36_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_36_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_39_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_39_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_42_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_42_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_44_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_44_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_46_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_46_chunk (actual rows=0 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_48_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_48_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_50_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_50_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Index Scan using _hyper_8_52_chunk__materialized_hypertable_8_bucket_idx on _hyper_8_52_chunk (actual rows=1 loops=1) + Index Cond: (bucket < 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) + -> Subquery Scan on "*SELECT* 2" (actual rows=0 loops=1) + -> HashAggregate (actual rows=0 loops=1) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_7_51_chunk."time"), _hyper_7_51_chunk.device + Batches: 1 + -> Result (actual rows=0 loops=1) + -> Index Scan using _hyper_7_51_chunk_chunks_time_idx on _hyper_7_51_chunk (actual rows=0 loops=1) + Index Cond: ("time" >= 'Wed Dec 31 17:00:00 2014 PST'::timestamp with time zone) +(45 rows) + +-- Watermark function use other tables in WHERE condition (should not be constified) +CREATE TABLE continuous_agg_test(time int, data int); +:EXPLAIN_ANALYZE SELECT * FROM continuous_agg_test AS t1, continuous_agg_test AS t2 WHERE COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) IS NOT NULL; + QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------------------------------------------- + Result (actual rows=0 loops=1) + One-Time Filter: (COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(8)), '-infinity'::timestamp with time zone) IS NOT NULL) + -> Nested Loop (actual rows=0 loops=1) + -> Seq Scan on continuous_agg_test t1 (actual rows=0 loops=1) + -> Materialize (never executed) + -> Seq Scan on continuous_agg_test t2 (never executed) +(6 rows) + +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER diff --git a/tsl/test/expected/cagg_watermark.out b/tsl/test/expected/cagg_watermark.out deleted file mode 100644 index 3ad3f5c39cf..00000000000 --- a/tsl/test/expected/cagg_watermark.out +++ /dev/null @@ -1,383 +0,0 @@ --- This file and its contents are licensed under the Timescale License. --- Please see the included NOTICE for copyright information and --- LICENSE-TIMESCALE for a copy of the license. -CREATE TABLE continuous_agg_test(time int, data int); -select create_hypertable('continuous_agg_test', 'time', chunk_time_interval=> 10); -NOTICE: adding not-null constraint to column "time" - create_hypertable ----------------------------------- - (1,public,continuous_agg_test,t) -(1 row) - -CREATE OR REPLACE FUNCTION integer_now_test1() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM continuous_agg_test $$; -SELECT set_integer_now_func('continuous_agg_test', 'integer_now_test1'); - set_integer_now_func ----------------------- - -(1 row) - --- watermark tabels start out empty -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- -(0 rows) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- -(0 rows) - --- inserting into a table that does not have continuous_agg_insert_trigger doesn't change the watermark -INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- -(0 rows) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- -(0 rows) - -\c :TEST_DBNAME :ROLE_SUPERUSER -CREATE TABLE continuous_agg_test_mat(time int); -select create_hypertable('continuous_agg_test_mat', 'time', chunk_time_interval=> 10); -NOTICE: adding not-null constraint to column "time" - create_hypertable --------------------------------------- - (2,public,continuous_agg_test_mat,t) -(1 row) - -INSERT INTO _timescaledb_catalog.continuous_agg VALUES (2, 1, NULL, '','','','',0,'',''); -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER --- create the trigger -CREATE TRIGGER continuous_agg_insert_trigger - AFTER INSERT ON continuous_agg_test - FOR EACH ROW EXECUTE FUNCTION _timescaledb_functions.continuous_agg_invalidation_trigger(1); --- inserting into the table still doesn't change the watermark since there's no --- continuous_aggs_invalidation_threshold. We treat that case as a invalidation_watermark of --- BIG_INT_MIN, since the first run of the aggregation will need to scan the --- entire table anyway. -INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- -(0 rows) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- -(0 rows) - --- set the continuous_aggs_invalidation_threshold to 15, any insertions below that value need an invalidation -\c :TEST_DBNAME :ROLE_SUPERUSER -INSERT INTO _timescaledb_catalog.continuous_aggs_invalidation_threshold VALUES (1, 15); -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER -INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 1 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 1 | 10 | 22 -(1 row) - --- INSERTs only above the continuous_aggs_invalidation_threshold won't change the continuous_aggs_hypertable_invalidation_log -INSERT INTO continuous_agg_test VALUES (21, 3), (22, 4); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 1 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 1 | 10 | 22 -(1 row) - --- INSERTs only below the continuous_aggs_invalidation_threshold will change the continuous_aggs_hypertable_invalidation_log -INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 1 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 1 | 10 | 22 - 1 | 10 | 11 -(2 rows) - --- test INSERTing other values -INSERT INTO continuous_agg_test VALUES (1, 7), (12, 6), (24, 5), (51, 4); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 1 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 1 | 10 | 22 - 1 | 10 | 11 - 1 | 1 | 51 -(3 rows) - --- INSERT after dropping a COLUMN -ALTER TABLE continuous_agg_test DROP COLUMN data; -INSERT INTO continuous_agg_test VALUES (-1), (-2), (-3), (-4); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 1 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 1 | 10 | 22 - 1 | 10 | 11 - 1 | 1 | 51 - 1 | -4 | -1 -(4 rows) - -INSERT INTO continuous_agg_test VALUES (100); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 1 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 1 | 10 | 22 - 1 | 10 | 11 - 1 | 1 | 51 - 1 | -4 | -1 -(4 rows) - --- INSERT after adding a COLUMN -ALTER TABLE continuous_agg_test ADD COLUMN d BOOLEAN; -INSERT INTO continuous_agg_test VALUES (-6, true), (-7, false), (-3, true), (-4, false); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 1 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 1 | 10 | 22 - 1 | 10 | 11 - 1 | 1 | 51 - 1 | -4 | -1 - 1 | -7 | -3 -(5 rows) - -INSERT INTO continuous_agg_test VALUES (120, false), (200, true); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 1 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 1 | 10 | 22 - 1 | 10 | 11 - 1 | 1 | 51 - 1 | -4 | -1 - 1 | -7 | -3 -(5 rows) - -\c :TEST_DBNAME :ROLE_SUPERUSER -DELETE FROM _timescaledb_catalog.continuous_agg where mat_hypertable_id = 2; -DELETE FROM _timescaledb_config.bgw_job WHERE id = 2; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER -DROP TABLE continuous_agg_test CASCADE; -\c :TEST_DBNAME :ROLE_SUPERUSER -TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; -TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER --- CREATE VIEW creates the invalidation trigger correctly -CREATE TABLE ca_inval_test(time int); -SELECT create_hypertable('ca_inval_test', 'time', chunk_time_interval=> 10); -NOTICE: adding not-null constraint to column "time" - create_hypertable ----------------------------- - (3,public,ca_inval_test,t) -(1 row) - -CREATE OR REPLACE FUNCTION integer_now_test2() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ca_inval_test $$; -SELECT set_integer_now_func('ca_inval_test', 'integer_now_test2'); - set_integer_now_func ----------------------- - -(1 row) - -CREATE MATERIALIZED VIEW cit_view - WITH (timescaledb.continuous, timescaledb.materialized_only=false) - AS SELECT time_bucket('5', time), COUNT(time) - FROM ca_inval_test - GROUP BY 1 WITH NO DATA; -INSERT INTO ca_inval_test SELECT generate_series(0, 5); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+------------- - 3 | -2147483648 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- -(0 rows) - -\c :TEST_DBNAME :ROLE_SUPERUSER -UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold -SET watermark = 15 -WHERE hypertable_id = 3; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER -INSERT INTO ca_inval_test SELECT generate_series(5, 15); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 3 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 3 | 5 | 15 -(1 row) - -INSERT INTO ca_inval_test SELECT generate_series(16, 20); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 3 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 3 | 5 | 15 -(1 row) - -\c :TEST_DBNAME :ROLE_SUPERUSER -TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER --- updates below the threshold update both the old and new values -UPDATE ca_inval_test SET time = 5 WHERE time = 6; -UPDATE ca_inval_test SET time = 7 WHERE time = 5; -UPDATE ca_inval_test SET time = 17 WHERE time = 14; -UPDATE ca_inval_test SET time = 12 WHERE time = 16; --- updates purely above the threshold are not logged -UPDATE ca_inval_test SET time = 19 WHERE time = 18; -UPDATE ca_inval_test SET time = 17 WHERE time = 19; -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 3 | 15 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 3 | 5 | 6 - 3 | 5 | 7 - 3 | 14 | 17 - 3 | 12 | 16 -(4 rows) - -DROP TABLE ca_inval_test CASCADE; -NOTICE: drop cascades to 3 other objects -\c :TEST_DBNAME :ROLE_SUPERUSER -TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; -TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER --- invalidation trigger is created correctly on chunks that existed before --- the view was created -CREATE TABLE ts_continuous_test(time INTEGER, location INTEGER); - SELECT create_hypertable('ts_continuous_test', 'time', chunk_time_interval => 10); -NOTICE: adding not-null constraint to column "time" - create_hypertable ---------------------------------- - (5,public,ts_continuous_test,t) -(1 row) - -CREATE OR REPLACE FUNCTION integer_now_test3() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ts_continuous_test $$; -SELECT set_integer_now_func('ts_continuous_test', 'integer_now_test3'); - set_integer_now_func ----------------------- - -(1 row) - -INSERT INTO ts_continuous_test SELECT i, i FROM - (SELECT generate_series(0, 29) AS i) AS i; -CREATE MATERIALIZED VIEW continuous_view - WITH (timescaledb.continuous, timescaledb.materialized_only=false) - AS SELECT time_bucket('5', time), COUNT(location) - FROM ts_continuous_test - GROUP BY 1 WITH NO DATA; -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+------------- - 5 | -2147483648 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- -(0 rows) - -\c :TEST_DBNAME :ROLE_SUPERUSER -UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold -SET watermark = 2 -WHERE hypertable_id = 5; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER -INSERT INTO ts_continuous_test VALUES (1, 1); -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 5 | 2 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 5 | 1 | 1 -(1 row) - --- aborts don't get written -BEGIN; - INSERT INTO ts_continuous_test VALUES (-20, -20); -ABORT; -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; - hypertable_id | watermark ----------------+----------- - 5 | 2 -(1 row) - -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - hypertable_id | lowest_modified_value | greatest_modified_value ----------------+-----------------------+------------------------- - 5 | 1 | 1 -(1 row) - -DROP TABLE ts_continuous_test CASCADE; -NOTICE: drop cascades to 3 other objects -\c :TEST_DBNAME :ROLE_SUPERUSER -TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; -TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER diff --git a/tsl/test/expected/continuous_aggs-13.out b/tsl/test/expected/continuous_aggs-13.out index 8bd346f2aaf..3e969d9d18e 100644 --- a/tsl/test/expected/continuous_aggs-13.out +++ b/tsl/test/expected/continuous_aggs-13.out @@ -2034,30 +2034,27 @@ SELECT * FROM mat_m1; -- Merge Append EXPLAIN (COSTS OFF) SELECT * FROM mat_m1; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------ Merge Append - Sort Key: _materialized_hypertable_59.sum DESC - -> Custom Scan (ConstraintAwareAppend) - Hypertable: _materialized_hypertable_59 - Chunks excluded during startup: 0 - -> Merge Append - Sort Key: _hyper_59_123_chunk.sum DESC - -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Sort Key: _hyper_59_123_chunk.sum DESC + -> Merge Append + Sort Key: _hyper_59_123_chunk.sum DESC + -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_52_111_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 7 days'::interval, conditions.timec) - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 1 - -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) -(21 rows) + Group Key: time_bucket('@ 7 days'::interval, _hyper_52_111_chunk.timec) + -> Result + -> Append + -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) +(18 rows) -- Ordering by another column SELECT * FROM mat_m1 ORDER BY count; @@ -2070,32 +2067,29 @@ SELECT * FROM mat_m1 ORDER BY count; (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM mat_m1 ORDER BY count; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------ Sort - Sort Key: _materialized_hypertable_59.count + Sort Key: _hyper_59_123_chunk.count -> Merge Append - Sort Key: _materialized_hypertable_59.sum DESC - -> Custom Scan (ConstraintAwareAppend) - Hypertable: _materialized_hypertable_59 - Chunks excluded during startup: 0 - -> Merge Append - Sort Key: _hyper_59_123_chunk.sum DESC - -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Sort Key: _hyper_59_123_chunk.sum DESC + -> Merge Append + Sort Key: _hyper_59_123_chunk.sum DESC + -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_52_111_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 7 days'::interval, conditions.timec) - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 1 - -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) -(23 rows) + Group Key: time_bucket('@ 7 days'::interval, _hyper_52_111_chunk.timec) + -> Result + -> Append + -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) +(20 rows) -- Change the type of cagg ALTER MATERIALIZED VIEW mat_m1 SET (timescaledb.materialized_only=true); @@ -2280,28 +2274,30 @@ SET parallel_setup_cost = 0; SET parallel_tuple_cost = 0; -- Parallel planning EXPLAIN (COSTS OFF, TIMING OFF) SELECT * FROM conditions_daily WHERE time_bucket >= '2023-07-01'; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Merge Append - Sort Key: _materialized_hypertable_63.sum DESC - -> Sort - Sort Key: _materialized_hypertable_63.sum DESC - -> Custom Scan (ChunkAppend) on _materialized_hypertable_63 - Chunks excluded during startup: 0 - -> Index Scan using _hyper_63_183_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_183_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) - -> Index Scan using _hyper_63_187_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_187_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) - -> Index Scan using _hyper_63_188_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_188_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + Sort Key: _hyper_63_183_chunk.sum DESC + -> Gather Merge + Workers Planned: 2 + -> Sort + Sort Key: _hyper_63_183_chunk.sum DESC + -> Parallel Append + -> Parallel Index Scan using _hyper_63_183_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_183_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + -> Parallel Index Scan using _hyper_63_187_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_187_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + -> Parallel Index Scan using _hyper_63_188_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_188_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_62_182_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 1 day'::interval, conditions.timec) - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 26 - -> Index Scan Backward using _hyper_62_182_chunk_conditions_timec_idx on _hyper_62_182_chunk - Index Cond: ((timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (timec >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, timec) >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone) -(21 rows) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_62_182_chunk.timec)) + -> Gather + Workers Planned: 1 + -> Result + -> Parallel Index Scan Backward using _hyper_62_182_chunk_conditions_timec_idx on _hyper_62_182_chunk + Index Cond: ((timec >= 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (timec >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + Filter: (time_bucket('@ 1 day'::interval, timec) >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone) +(23 rows) diff --git a/tsl/test/expected/continuous_aggs-14.out b/tsl/test/expected/continuous_aggs-14.out index e43ea781b98..d7746b6a231 100644 --- a/tsl/test/expected/continuous_aggs-14.out +++ b/tsl/test/expected/continuous_aggs-14.out @@ -2033,30 +2033,27 @@ SELECT * FROM mat_m1; -- Merge Append EXPLAIN (COSTS OFF) SELECT * FROM mat_m1; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------ Merge Append - Sort Key: _materialized_hypertable_59.sum DESC - -> Custom Scan (ConstraintAwareAppend) - Hypertable: _materialized_hypertable_59 - Chunks excluded during startup: 0 - -> Merge Append - Sort Key: _hyper_59_123_chunk.sum DESC - -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Sort Key: _hyper_59_123_chunk.sum DESC + -> Merge Append + Sort Key: _hyper_59_123_chunk.sum DESC + -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_52_111_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 7 days'::interval, conditions.timec) - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 1 - -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) -(21 rows) + Group Key: time_bucket('@ 7 days'::interval, _hyper_52_111_chunk.timec) + -> Result + -> Append + -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) +(18 rows) -- Ordering by another column SELECT * FROM mat_m1 ORDER BY count; @@ -2069,32 +2066,29 @@ SELECT * FROM mat_m1 ORDER BY count; (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM mat_m1 ORDER BY count; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------ Sort - Sort Key: _materialized_hypertable_59.count + Sort Key: _hyper_59_123_chunk.count -> Merge Append - Sort Key: _materialized_hypertable_59.sum DESC - -> Custom Scan (ConstraintAwareAppend) - Hypertable: _materialized_hypertable_59 - Chunks excluded during startup: 0 - -> Merge Append - Sort Key: _hyper_59_123_chunk.sum DESC - -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Sort Key: _hyper_59_123_chunk.sum DESC + -> Merge Append + Sort Key: _hyper_59_123_chunk.sum DESC + -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_52_111_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 7 days'::interval, conditions.timec) - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 1 - -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) -(23 rows) + Group Key: time_bucket('@ 7 days'::interval, _hyper_52_111_chunk.timec) + -> Result + -> Append + -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) +(20 rows) -- Change the type of cagg ALTER MATERIALIZED VIEW mat_m1 SET (timescaledb.materialized_only=true); @@ -2279,28 +2273,30 @@ SET parallel_setup_cost = 0; SET parallel_tuple_cost = 0; -- Parallel planning EXPLAIN (COSTS OFF, TIMING OFF) SELECT * FROM conditions_daily WHERE time_bucket >= '2023-07-01'; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Merge Append - Sort Key: _materialized_hypertable_63.sum DESC - -> Sort - Sort Key: _materialized_hypertable_63.sum DESC - -> Custom Scan (ChunkAppend) on _materialized_hypertable_63 - Chunks excluded during startup: 0 - -> Index Scan using _hyper_63_183_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_183_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) - -> Index Scan using _hyper_63_187_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_187_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) - -> Index Scan using _hyper_63_188_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_188_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + Sort Key: _hyper_63_183_chunk.sum DESC + -> Gather Merge + Workers Planned: 2 + -> Sort + Sort Key: _hyper_63_183_chunk.sum DESC + -> Parallel Append + -> Parallel Index Scan using _hyper_63_183_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_183_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + -> Parallel Index Scan using _hyper_63_187_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_187_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + -> Parallel Index Scan using _hyper_63_188_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_188_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_62_182_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 1 day'::interval, conditions.timec) - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 26 - -> Index Scan Backward using _hyper_62_182_chunk_conditions_timec_idx on _hyper_62_182_chunk - Index Cond: ((timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (timec >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) - Filter: (time_bucket('@ 1 day'::interval, timec) >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone) -(21 rows) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_62_182_chunk.timec)) + -> Gather + Workers Planned: 1 + -> Result + -> Parallel Index Scan Backward using _hyper_62_182_chunk_conditions_timec_idx on _hyper_62_182_chunk + Index Cond: ((timec >= 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (timec >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + Filter: (time_bucket('@ 1 day'::interval, timec) >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone) +(23 rows) diff --git a/tsl/test/expected/continuous_aggs-15.out b/tsl/test/expected/continuous_aggs-15.out index f196a4ad743..d7746b6a231 100644 --- a/tsl/test/expected/continuous_aggs-15.out +++ b/tsl/test/expected/continuous_aggs-15.out @@ -2033,31 +2033,27 @@ SELECT * FROM mat_m1; -- Merge Append EXPLAIN (COSTS OFF) SELECT * FROM mat_m1; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------ Merge Append - Sort Key: _materialized_hypertable_59.sum DESC - -> Custom Scan (ConstraintAwareAppend) - Hypertable: _materialized_hypertable_59 - Chunks excluded during startup: 0 - -> Merge Append - Sort Key: _hyper_59_123_chunk.sum DESC - -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Sort Key: _hyper_59_123_chunk.sum DESC + -> Merge Append + Sort Key: _hyper_59_123_chunk.sum DESC + -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_52_111_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 7 days'::interval, conditions.timec) + Group Key: time_bucket('@ 7 days'::interval, _hyper_52_111_chunk.timec) -> Result - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 1 + -> Append -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) -(22 rows) + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) +(18 rows) -- Ordering by another column SELECT * FROM mat_m1 ORDER BY count; @@ -2070,33 +2066,29 @@ SELECT * FROM mat_m1 ORDER BY count; (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM mat_m1 ORDER BY count; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------ Sort - Sort Key: _materialized_hypertable_59.count + Sort Key: _hyper_59_123_chunk.count -> Merge Append - Sort Key: _materialized_hypertable_59.sum DESC - -> Custom Scan (ConstraintAwareAppend) - Hypertable: _materialized_hypertable_59 - Chunks excluded during startup: 0 - -> Merge Append - Sort Key: _hyper_59_123_chunk.sum DESC - -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Sort Key: _hyper_59_123_chunk.sum DESC + -> Merge Append + Sort Key: _hyper_59_123_chunk.sum DESC + -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_52_111_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 7 days'::interval, conditions.timec) + Group Key: time_bucket('@ 7 days'::interval, _hyper_52_111_chunk.timec) -> Result - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 1 + -> Append -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) -(24 rows) + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) +(20 rows) -- Change the type of cagg ALTER MATERIALIZED VIEW mat_m1 SET (timescaledb.materialized_only=true); @@ -2281,29 +2273,30 @@ SET parallel_setup_cost = 0; SET parallel_tuple_cost = 0; -- Parallel planning EXPLAIN (COSTS OFF, TIMING OFF) SELECT * FROM conditions_daily WHERE time_bucket >= '2023-07-01'; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Merge Append - Sort Key: _materialized_hypertable_63.sum DESC - -> Sort - Sort Key: _materialized_hypertable_63.sum DESC - -> Custom Scan (ChunkAppend) on _materialized_hypertable_63 - Chunks excluded during startup: 0 - -> Index Scan using _hyper_63_183_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_183_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) - -> Index Scan using _hyper_63_187_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_187_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) - -> Index Scan using _hyper_63_188_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_188_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + Sort Key: _hyper_63_183_chunk.sum DESC + -> Gather Merge + Workers Planned: 2 + -> Sort + Sort Key: _hyper_63_183_chunk.sum DESC + -> Parallel Append + -> Parallel Index Scan using _hyper_63_183_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_183_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + -> Parallel Index Scan using _hyper_63_187_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_187_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + -> Parallel Index Scan using _hyper_63_188_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_188_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_62_182_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 1 day'::interval, conditions.timec) - -> Result - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 26 - -> Index Scan Backward using _hyper_62_182_chunk_conditions_timec_idx on _hyper_62_182_chunk - Index Cond: ((timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (timec >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_62_182_chunk.timec)) + -> Gather + Workers Planned: 1 + -> Result + -> Parallel Index Scan Backward using _hyper_62_182_chunk_conditions_timec_idx on _hyper_62_182_chunk + Index Cond: ((timec >= 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (timec >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) Filter: (time_bucket('@ 1 day'::interval, timec) >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone) -(22 rows) +(23 rows) diff --git a/tsl/test/expected/continuous_aggs-16.out b/tsl/test/expected/continuous_aggs-16.out index 642ba04adea..097ee3f081c 100644 --- a/tsl/test/expected/continuous_aggs-16.out +++ b/tsl/test/expected/continuous_aggs-16.out @@ -2033,31 +2033,27 @@ SELECT * FROM mat_m1; -- Merge Append EXPLAIN (COSTS OFF) SELECT * FROM mat_m1; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------ Merge Append - Sort Key: _materialized_hypertable_59.sum DESC - -> Custom Scan (ConstraintAwareAppend) - Hypertable: _materialized_hypertable_59 - Chunks excluded during startup: 0 - -> Merge Append - Sort Key: _hyper_59_123_chunk.sum DESC - -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Sort Key: _hyper_59_123_chunk.sum DESC + -> Merge Append + Sort Key: _hyper_59_123_chunk.sum DESC + -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_52_111_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 7 days'::interval, conditions.timec) + Group Key: time_bucket('@ 7 days'::interval, _hyper_52_111_chunk.timec) -> Result - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 1 + -> Append -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) -(22 rows) + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) +(18 rows) -- Ordering by another column SELECT * FROM mat_m1 ORDER BY count; @@ -2070,33 +2066,29 @@ SELECT * FROM mat_m1 ORDER BY count; (4 rows) EXPLAIN (COSTS OFF) SELECT * FROM mat_m1 ORDER BY count; - QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------ Sort - Sort Key: _materialized_hypertable_59.count + Sort Key: _hyper_59_123_chunk.count -> Merge Append - Sort Key: _materialized_hypertable_59.sum DESC - -> Custom Scan (ConstraintAwareAppend) - Hypertable: _materialized_hypertable_59 - Chunks excluded during startup: 0 - -> Merge Append - Sort Key: _hyper_59_123_chunk.sum DESC - -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) - -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk - Index Cond: (time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Sort Key: _hyper_59_123_chunk.sum DESC + -> Merge Append + Sort Key: _hyper_59_123_chunk.sum DESC + -> Index Scan Backward using _hyper_59_123_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_123_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_59_124_chunk__materialized_hypertable_59_sum_time_bucket on _hyper_59_124_chunk + Index Cond: (time_bucket < 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_52_111_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 7 days'::interval, conditions.timec) + Group Key: time_bucket('@ 7 days'::interval, _hyper_52_111_chunk.timec) -> Result - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 1 + -> Append -> Index Scan Backward using _hyper_52_111_chunk_conditions_timec_idx on _hyper_52_111_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) -> Index Scan Backward using _hyper_52_125_chunk_conditions_timec_idx on _hyper_52_125_chunk - Index Cond: (timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(59)), '-infinity'::timestamp with time zone)) -(24 rows) + Index Cond: (timec >= 'Sun Nov 04 16:00:00 2018 PST'::timestamp with time zone) +(20 rows) -- Change the type of cagg ALTER MATERIALIZED VIEW mat_m1 SET (timescaledb.materialized_only=true); @@ -2281,29 +2273,30 @@ SET parallel_setup_cost = 0; SET parallel_tuple_cost = 0; -- Parallel planning EXPLAIN (COSTS OFF, TIMING OFF) SELECT * FROM conditions_daily WHERE time_bucket >= '2023-07-01'; - QUERY PLAN ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- + QUERY PLAN +------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ Merge Append - Sort Key: _materialized_hypertable_63.sum DESC - -> Sort - Sort Key: _materialized_hypertable_63.sum DESC - -> Custom Scan (ChunkAppend) on _materialized_hypertable_63 - Chunks excluded during startup: 0 - -> Index Scan using _hyper_63_183_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_183_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) - -> Index Scan using _hyper_63_187_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_187_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) - -> Index Scan using _hyper_63_188_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_188_chunk - Index Cond: ((time_bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + Sort Key: _hyper_63_183_chunk.sum DESC + -> Gather Merge + Workers Planned: 2 + -> Sort + Sort Key: _hyper_63_183_chunk.sum DESC + -> Parallel Append + -> Parallel Index Scan using _hyper_63_183_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_183_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + -> Parallel Index Scan using _hyper_63_187_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_187_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + -> Parallel Index Scan using _hyper_63_188_chunk__materialized_hypertable_63_time_bucket_idx on _hyper_63_188_chunk + Index Cond: ((time_bucket < 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (time_bucket >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) -> Sort - Sort Key: (sum(conditions.temperature)) DESC + Sort Key: (sum(_hyper_62_182_chunk.temperature)) DESC -> HashAggregate - Group Key: time_bucket('@ 1 day'::interval, conditions.timec) - -> Result - -> Custom Scan (ChunkAppend) on conditions - Chunks excluded during startup: 26 - -> Index Scan Backward using _hyper_62_182_chunk_conditions_timec_idx on _hyper_62_182_chunk - Index Cond: ((timec >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(63)), '-infinity'::timestamp with time zone)) AND (timec >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) + Group Key: (time_bucket('@ 1 day'::interval, _hyper_62_182_chunk.timec)) + -> Gather + Workers Planned: 1 + -> Result + -> Parallel Index Scan Backward using _hyper_62_182_chunk_conditions_timec_idx on _hyper_62_182_chunk + Index Cond: ((timec >= 'Mon Jan 01 16:00:00 2024 PST'::timestamp with time zone) AND (timec >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone)) Filter: (time_bucket('@ 1 day'::interval, timec) >= 'Sat Jul 01 00:00:00 2023 PDT'::timestamp with time zone) -(22 rows) +(23 rows) diff --git a/tsl/test/expected/jit-13.out b/tsl/test/expected/jit-13.out index 2e75aeaa4f4..22056739fe6 100644 --- a/tsl/test/expected/jit-13.out +++ b/tsl/test/expected/jit-13.out @@ -187,33 +187,25 @@ SELECT * FROM jit_device_summary WHERE metric_spread = 1800 ORDER BY bucket DESC QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread -> Sort - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread - Sort Key: _materialized_hypertable_4.bucket DESC, _materialized_hypertable_4.device_id + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread + Sort Key: _hyper_4_6_chunk.bucket DESC, _hyper_4_6_chunk.device_id -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_4 - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_4_6_chunk__materialized_hypertable_4_bucket_idx on _timescaledb_internal._hyper_4_6_chunk - Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread - Index Cond: (_hyper_4_6_chunk.bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(4)), '-infinity'::timestamp with time zone)) - Filter: (_hyper_4_6_chunk.metric_spread = '1800'::double precision) + -> Index Scan using _hyper_4_6_chunk__materialized_hypertable_4_bucket_idx on _timescaledb_internal._hyper_4_6_chunk + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread + Index Cond: (_hyper_4_6_chunk.bucket < 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone) + Filter: (_hyper_4_6_chunk.metric_spread = '1800'::double precision) -> HashAggregate - Output: (time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time)), jit_test_contagg.device_id, avg(jit_test_contagg.metric), (max(jit_test_contagg.metric) - min(jit_test_contagg.metric)) - Group Key: time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time), jit_test_contagg.device_id - Filter: ((max(jit_test_contagg.metric) - min(jit_test_contagg.metric)) = '1800'::double precision) - -> Custom Scan (ChunkAppend) on public.jit_test_contagg - Output: time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time), jit_test_contagg.device_id, jit_test_contagg.metric - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 4 + Output: (time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time)), _hyper_3_5_chunk.device_id, avg(_hyper_3_5_chunk.metric), (max(_hyper_3_5_chunk.metric) - min(_hyper_3_5_chunk.metric)) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id + Filter: ((max(_hyper_3_5_chunk.metric) - min(_hyper_3_5_chunk.metric)) = '1800'::double precision) + -> Result + Output: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric -> Index Scan using _hyper_3_5_chunk_jit_test_contagg_observation_time_idx on _timescaledb_internal._hyper_3_5_chunk Output: _hyper_3_5_chunk.observation_time, _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric - Index Cond: (_hyper_3_5_chunk.observation_time >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(4)), '-infinity'::timestamp with time zone)) -(27 rows) + Index Cond: (_hyper_3_5_chunk.observation_time >= 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone) +(19 rows) -- generate the results into two different files \set ECHO errors diff --git a/tsl/test/expected/jit-14.out b/tsl/test/expected/jit-14.out index 3748531bd4a..05cf87671f1 100644 --- a/tsl/test/expected/jit-14.out +++ b/tsl/test/expected/jit-14.out @@ -188,33 +188,25 @@ SELECT * FROM jit_device_summary WHERE metric_spread = 1800 ORDER BY bucket DESC QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread -> Sort - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread - Sort Key: _materialized_hypertable_4.bucket DESC, _materialized_hypertable_4.device_id + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread + Sort Key: _hyper_4_6_chunk.bucket DESC, _hyper_4_6_chunk.device_id -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_4 - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_4_6_chunk__materialized_hypertable_4_bucket_idx on _timescaledb_internal._hyper_4_6_chunk - Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread - Index Cond: (_hyper_4_6_chunk.bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(4)), '-infinity'::timestamp with time zone)) - Filter: (_hyper_4_6_chunk.metric_spread = '1800'::double precision) + -> Index Scan using _hyper_4_6_chunk__materialized_hypertable_4_bucket_idx on _timescaledb_internal._hyper_4_6_chunk + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread + Index Cond: (_hyper_4_6_chunk.bucket < 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone) + Filter: (_hyper_4_6_chunk.metric_spread = '1800'::double precision) -> HashAggregate - Output: (time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time)), jit_test_contagg.device_id, avg(jit_test_contagg.metric), (max(jit_test_contagg.metric) - min(jit_test_contagg.metric)) - Group Key: time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time), jit_test_contagg.device_id - Filter: ((max(jit_test_contagg.metric) - min(jit_test_contagg.metric)) = '1800'::double precision) - -> Custom Scan (ChunkAppend) on public.jit_test_contagg - Output: time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time), jit_test_contagg.device_id, jit_test_contagg.metric - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 4 + Output: (time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time)), _hyper_3_5_chunk.device_id, avg(_hyper_3_5_chunk.metric), (max(_hyper_3_5_chunk.metric) - min(_hyper_3_5_chunk.metric)) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id + Filter: ((max(_hyper_3_5_chunk.metric) - min(_hyper_3_5_chunk.metric)) = '1800'::double precision) + -> Result + Output: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric -> Index Scan using _hyper_3_5_chunk_jit_test_contagg_observation_time_idx on _timescaledb_internal._hyper_3_5_chunk Output: _hyper_3_5_chunk.observation_time, _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric - Index Cond: (_hyper_3_5_chunk.observation_time >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(4)), '-infinity'::timestamp with time zone)) -(27 rows) + Index Cond: (_hyper_3_5_chunk.observation_time >= 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone) +(19 rows) -- generate the results into two different files \set ECHO errors diff --git a/tsl/test/expected/jit-15.out b/tsl/test/expected/jit-15.out index 516294a5496..05cf87671f1 100644 --- a/tsl/test/expected/jit-15.out +++ b/tsl/test/expected/jit-15.out @@ -188,35 +188,25 @@ SELECT * FROM jit_device_summary WHERE metric_spread = 1800 ORDER BY bucket DESC QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread -> Sort - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread - Sort Key: _materialized_hypertable_4.bucket DESC, _materialized_hypertable_4.device_id + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread + Sort Key: _hyper_4_6_chunk.bucket DESC, _hyper_4_6_chunk.device_id -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_4 - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_4_6_chunk__materialized_hypertable_4_bucket_idx on _timescaledb_internal._hyper_4_6_chunk - Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread - Index Cond: (_hyper_4_6_chunk.bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(4)), '-infinity'::timestamp with time zone)) - Filter: (_hyper_4_6_chunk.metric_spread = '1800'::double precision) + -> Index Scan using _hyper_4_6_chunk__materialized_hypertable_4_bucket_idx on _timescaledb_internal._hyper_4_6_chunk + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread + Index Cond: (_hyper_4_6_chunk.bucket < 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone) + Filter: (_hyper_4_6_chunk.metric_spread = '1800'::double precision) -> HashAggregate - Output: (time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time)), jit_test_contagg.device_id, avg(jit_test_contagg.metric), (max(jit_test_contagg.metric) - min(jit_test_contagg.metric)) - Group Key: time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time), jit_test_contagg.device_id - Filter: ((max(jit_test_contagg.metric) - min(jit_test_contagg.metric)) = '1800'::double precision) + Output: (time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time)), _hyper_3_5_chunk.device_id, avg(_hyper_3_5_chunk.metric), (max(_hyper_3_5_chunk.metric) - min(_hyper_3_5_chunk.metric)) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id + Filter: ((max(_hyper_3_5_chunk.metric) - min(_hyper_3_5_chunk.metric)) = '1800'::double precision) -> Result - Output: time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time), jit_test_contagg.device_id, jit_test_contagg.metric - -> Custom Scan (ChunkAppend) on public.jit_test_contagg - Output: jit_test_contagg.observation_time, jit_test_contagg.device_id, jit_test_contagg.metric - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 4 - -> Index Scan using _hyper_3_5_chunk_jit_test_contagg_observation_time_idx on _timescaledb_internal._hyper_3_5_chunk - Output: _hyper_3_5_chunk.observation_time, _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric - Index Cond: (_hyper_3_5_chunk.observation_time >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(4)), '-infinity'::timestamp with time zone)) -(29 rows) + Output: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric + -> Index Scan using _hyper_3_5_chunk_jit_test_contagg_observation_time_idx on _timescaledb_internal._hyper_3_5_chunk + Output: _hyper_3_5_chunk.observation_time, _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric + Index Cond: (_hyper_3_5_chunk.observation_time >= 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone) +(19 rows) -- generate the results into two different files \set ECHO errors diff --git a/tsl/test/expected/jit-16.out b/tsl/test/expected/jit-16.out index 516294a5496..05cf87671f1 100644 --- a/tsl/test/expected/jit-16.out +++ b/tsl/test/expected/jit-16.out @@ -188,35 +188,25 @@ SELECT * FROM jit_device_summary WHERE metric_spread = 1800 ORDER BY bucket DESC QUERY PLAN ----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- Limit - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread -> Sort - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread - Sort Key: _materialized_hypertable_4.bucket DESC, _materialized_hypertable_4.device_id + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread + Sort Key: _hyper_4_6_chunk.bucket DESC, _hyper_4_6_chunk.device_id -> Append - -> Custom Scan (ChunkAppend) on _timescaledb_internal._materialized_hypertable_4 - Output: _materialized_hypertable_4.bucket, _materialized_hypertable_4.device_id, _materialized_hypertable_4.metric_avg, _materialized_hypertable_4.metric_spread - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 0 - -> Index Scan using _hyper_4_6_chunk__materialized_hypertable_4_bucket_idx on _timescaledb_internal._hyper_4_6_chunk - Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread - Index Cond: (_hyper_4_6_chunk.bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(4)), '-infinity'::timestamp with time zone)) - Filter: (_hyper_4_6_chunk.metric_spread = '1800'::double precision) + -> Index Scan using _hyper_4_6_chunk__materialized_hypertable_4_bucket_idx on _timescaledb_internal._hyper_4_6_chunk + Output: _hyper_4_6_chunk.bucket, _hyper_4_6_chunk.device_id, _hyper_4_6_chunk.metric_avg, _hyper_4_6_chunk.metric_spread + Index Cond: (_hyper_4_6_chunk.bucket < 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone) + Filter: (_hyper_4_6_chunk.metric_spread = '1800'::double precision) -> HashAggregate - Output: (time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time)), jit_test_contagg.device_id, avg(jit_test_contagg.metric), (max(jit_test_contagg.metric) - min(jit_test_contagg.metric)) - Group Key: time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time), jit_test_contagg.device_id - Filter: ((max(jit_test_contagg.metric) - min(jit_test_contagg.metric)) = '1800'::double precision) + Output: (time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time)), _hyper_3_5_chunk.device_id, avg(_hyper_3_5_chunk.metric), (max(_hyper_3_5_chunk.metric) - min(_hyper_3_5_chunk.metric)) + Group Key: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id + Filter: ((max(_hyper_3_5_chunk.metric) - min(_hyper_3_5_chunk.metric)) = '1800'::double precision) -> Result - Output: time_bucket('@ 1 hour'::interval, jit_test_contagg.observation_time), jit_test_contagg.device_id, jit_test_contagg.metric - -> Custom Scan (ChunkAppend) on public.jit_test_contagg - Output: jit_test_contagg.observation_time, jit_test_contagg.device_id, jit_test_contagg.metric - Startup Exclusion: true - Runtime Exclusion: false - Chunks excluded during startup: 4 - -> Index Scan using _hyper_3_5_chunk_jit_test_contagg_observation_time_idx on _timescaledb_internal._hyper_3_5_chunk - Output: _hyper_3_5_chunk.observation_time, _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric - Index Cond: (_hyper_3_5_chunk.observation_time >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(4)), '-infinity'::timestamp with time zone)) -(29 rows) + Output: time_bucket('@ 1 hour'::interval, _hyper_3_5_chunk.observation_time), _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric + -> Index Scan using _hyper_3_5_chunk_jit_test_contagg_observation_time_idx on _timescaledb_internal._hyper_3_5_chunk + Output: _hyper_3_5_chunk.observation_time, _hyper_3_5_chunk.device_id, _hyper_3_5_chunk.metric + Index Cond: (_hyper_3_5_chunk.observation_time >= 'Mon Dec 31 01:00:00 2018 PST'::timestamp with time zone) +(19 rows) -- generate the results into two different files \set ECHO errors diff --git a/tsl/test/isolation/expected/cagg_watermark_concurrent_update.out b/tsl/test/isolation/expected/cagg_watermark_concurrent_update.out new file mode 100644 index 00000000000..26befebc537 --- /dev/null +++ b/tsl/test/isolation/expected/cagg_watermark_concurrent_update.out @@ -0,0 +1,230 @@ +Parsed test spec with 3 sessions + +starting permutation: s3_lock_invalidation s2_select s1_run_update s2_select s3_release_invalidation s2_select s1_select +step s3_lock_invalidation: + SELECT debug_waitpoint_enable('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_enable +---------------------- + +(1 row) + +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _hyper_X_X_chunk.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, temperature."time")) + -> Result + -> Custom Scan (ChunkAppend) on temperature + Order: time_bucket('@ 4 hours'::interval, temperature."time") + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) +(13 rows) + +step s1_run_update: + CALL refresh_continuous_aggregate('cagg', '2020-01-01 00:00:00', '2021-01-01 00:00:00'); + +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _hyper_X_X_chunk.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, temperature."time")) + -> Result + -> Custom Scan (ChunkAppend) on temperature + Order: time_bucket('@ 4 hours'::interval, temperature."time") + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) +(13 rows) + +step s3_release_invalidation: + SELECT debug_waitpoint_release('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_release +----------------------- + +(1 row) + +step s1_run_update: <... completed> +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _materialized_hypertable_140.time_bucket + -> Custom Scan (ChunkAppend) on _materialized_hypertable_140 + Order: _materialized_hypertable_140.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, "time")) + -> Sort + Sort Key: (time_bucket('@ 4 hours'::interval, "time")) + -> Result + One-Time Filter: false +(14 rows) + +step s1_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _materialized_hypertable_140.time_bucket + -> Custom Scan (ChunkAppend) on _materialized_hypertable_140 + Order: _materialized_hypertable_140.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, "time")) + -> Sort + Sort Key: (time_bucket('@ 4 hours'::interval, "time")) + -> Result + One-Time Filter: false +(14 rows) + + +starting permutation: s3_lock_invalidation s2_select s1_run_update s2_select s3_release_invalidation s3_lock_invalidation s1_insert_more_data s1_run_update s2_select s3_release_invalidation s2_select +step s3_lock_invalidation: + SELECT debug_waitpoint_enable('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_enable +---------------------- + +(1 row) + +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _hyper_X_X_chunk.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, temperature."time")) + -> Result + -> Custom Scan (ChunkAppend) on temperature + Order: time_bucket('@ 4 hours'::interval, temperature."time") + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) +(13 rows) + +step s1_run_update: + CALL refresh_continuous_aggregate('cagg', '2020-01-01 00:00:00', '2021-01-01 00:00:00'); + +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _hyper_X_X_chunk.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, temperature."time")) + -> Result + -> Custom Scan (ChunkAppend) on temperature + Order: time_bucket('@ 4 hours'::interval, temperature."time") + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) +(13 rows) + +step s3_release_invalidation: + SELECT debug_waitpoint_release('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_release +----------------------- + +(1 row) + +step s1_run_update: <... completed> +step s3_lock_invalidation: + SELECT debug_waitpoint_enable('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_enable +---------------------- + +(1 row) + +step s1_insert_more_data: + INSERT INTO temperature VALUES('2020-01-02 23:59:59+0', 22); + +step s1_run_update: + CALL refresh_continuous_aggregate('cagg', '2020-01-01 00:00:00', '2021-01-01 00:00:00'); + +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _materialized_hypertable_142.time_bucket + -> Custom Scan (ChunkAppend) on _materialized_hypertable_142 + Order: _materialized_hypertable_142.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, _hyper_X_X_chunk."time")) + -> Result + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) +(13 rows) + +step s3_release_invalidation: + SELECT debug_waitpoint_release('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_release +----------------------- + +(1 row) + +step s1_run_update: <... completed> +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _materialized_hypertable_142.time_bucket + -> Custom Scan (ChunkAppend) on _materialized_hypertable_142 + Order: _materialized_hypertable_142.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Thu Jan 02 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Thu Jan 02 16:00:00 2020 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, _hyper_X_X_chunk."time")) + -> Result + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Thu Jan 02 16:00:00 2020 PST'::timestamp with time zone) +(13 rows) + diff --git a/tsl/test/isolation/expected/cagg_watermark_concurrent_update_1.out b/tsl/test/isolation/expected/cagg_watermark_concurrent_update_1.out new file mode 100644 index 00000000000..7dfeb76c726 --- /dev/null +++ b/tsl/test/isolation/expected/cagg_watermark_concurrent_update_1.out @@ -0,0 +1,226 @@ +Parsed test spec with 3 sessions + +starting permutation: s3_lock_invalidation s2_select s1_run_update s2_select s3_release_invalidation s2_select s1_select +step s3_lock_invalidation: + SELECT debug_waitpoint_enable('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_enable +---------------------- + +(1 row) + +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _hyper_X_X_chunk.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, temperature."time")) + -> Custom Scan (ChunkAppend) on temperature + Order: time_bucket('@ 4 hours'::interval, temperature."time") + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) +(12 rows) + +step s1_run_update: + CALL refresh_continuous_aggregate('cagg', '2020-01-01 00:00:00', '2021-01-01 00:00:00'); + +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _hyper_X_X_chunk.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, temperature."time")) + -> Custom Scan (ChunkAppend) on temperature + Order: time_bucket('@ 4 hours'::interval, temperature."time") + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) +(12 rows) + +step s3_release_invalidation: + SELECT debug_waitpoint_release('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_release +----------------------- + +(1 row) + +step s1_run_update: <... completed> +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _materialized_hypertable_140.time_bucket + -> Custom Scan (ChunkAppend) on _materialized_hypertable_140 + Order: _materialized_hypertable_140.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, "time")) + -> Sort + Sort Key: (time_bucket('@ 4 hours'::interval, "time")) + -> Result + One-Time Filter: false +(14 rows) + +step s1_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _materialized_hypertable_140.time_bucket + -> Custom Scan (ChunkAppend) on _materialized_hypertable_140 + Order: _materialized_hypertable_140.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_140_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, "time")) + -> Sort + Sort Key: (time_bucket('@ 4 hours'::interval, "time")) + -> Result + One-Time Filter: false +(14 rows) + + +starting permutation: s3_lock_invalidation s2_select s1_run_update s2_select s3_release_invalidation s3_lock_invalidation s1_insert_more_data s1_run_update s2_select s3_release_invalidation s2_select +step s3_lock_invalidation: + SELECT debug_waitpoint_enable('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_enable +---------------------- + +(1 row) + +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _hyper_X_X_chunk.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, temperature."time")) + -> Custom Scan (ChunkAppend) on temperature + Order: time_bucket('@ 4 hours'::interval, temperature."time") + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) +(12 rows) + +step s1_run_update: + CALL refresh_continuous_aggregate('cagg', '2020-01-01 00:00:00', '2021-01-01 00:00:00'); + +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _hyper_X_X_chunk.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, temperature."time")) + -> Custom Scan (ChunkAppend) on temperature + Order: time_bucket('@ 4 hours'::interval, temperature."time") + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Sat Jan 01 16:00:00 2000 PST'::timestamp with time zone) +(12 rows) + +step s3_release_invalidation: + SELECT debug_waitpoint_release('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_release +----------------------- + +(1 row) + +step s1_run_update: <... completed> +step s3_lock_invalidation: + SELECT debug_waitpoint_enable('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_enable +---------------------- + +(1 row) + +step s1_insert_more_data: + INSERT INTO temperature VALUES('2020-01-02 23:59:59+0', 22); + +step s1_run_update: + CALL refresh_continuous_aggregate('cagg', '2020-01-01 00:00:00', '2021-01-01 00:00:00'); + +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _materialized_hypertable_142.time_bucket + -> Custom Scan (ChunkAppend) on _materialized_hypertable_142 + Order: _materialized_hypertable_142.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, _hyper_X_X_chunk."time")) + -> Result + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Wed Jan 01 16:00:00 2020 PST'::timestamp with time zone) +(13 rows) + +step s3_release_invalidation: + SELECT debug_waitpoint_release('cagg_watermark_update_internal_before_refresh'); + +debug_waitpoint_release +----------------------- + +(1 row) + +step s1_run_update: <... completed> +step s2_select: + EXPLAIN (COSTS OFF) SELECT * FROM cagg; + +QUERY PLAN +----------------------------------------------------------------------------------------------------------------------------- +Merge Append + Sort Key: _materialized_hypertable_142.time_bucket + -> Custom Scan (ChunkAppend) on _materialized_hypertable_142 + Order: _materialized_hypertable_142.time_bucket + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Thu Jan 02 16:00:00 2020 PST'::timestamp with time zone) + -> Index Scan Backward using _hyper_X_X_chunk__materialized_hypertable_142_time_bucket_i on _hyper_X_X_chunk + Index Cond: (time_bucket < 'Thu Jan 02 16:00:00 2020 PST'::timestamp with time zone) + -> GroupAggregate + Group Key: (time_bucket('@ 4 hours'::interval, _hyper_X_X_chunk."time")) + -> Result + -> Index Scan Backward using _hyper_X_X_chunk_temperature_time_idx on _hyper_X_X_chunk + Index Cond: ("time" >= 'Thu Jan 02 16:00:00 2020 PST'::timestamp with time zone) +(13 rows) + diff --git a/tsl/test/isolation/specs/CMakeLists.txt b/tsl/test/isolation/specs/CMakeLists.txt index 50c33e24ddb..5fe05ed60cb 100644 --- a/tsl/test/isolation/specs/CMakeLists.txt +++ b/tsl/test/isolation/specs/CMakeLists.txt @@ -28,6 +28,7 @@ if(CMAKE_BUILD_TYPE MATCHES Debug) APPEND TEST_FILES cagg_concurrent_invalidation.spec + cagg_watermark_concurrent_update.spec compression_chunk_race.spec compression_freeze.spec compression_merge_race.spec diff --git a/tsl/test/isolation/specs/cagg_watermark_concurrent_update.spec b/tsl/test/isolation/specs/cagg_watermark_concurrent_update.spec new file mode 100644 index 00000000000..7a845460d63 --- /dev/null +++ b/tsl/test/isolation/specs/cagg_watermark_concurrent_update.spec @@ -0,0 +1,85 @@ +# This file and its contents are licensed under the Timescale License. +# Please see the included NOTICE for copyright information and +# LICENSE-TIMESCALE for a copy of the license. + + +# +# Test concurrent CAgg refreshes and invalidation threshold updates. This isolation test +# checks that we don't skip CAgg updates when two sessions are trying to modify the +# invalidation threshold at the same time. +# +setup +{ + CREATE TABLE temperature ( + time timestamptz NOT NULL, + value float + ); + + SELECT create_hypertable('temperature', 'time'); + + INSERT INTO temperature + SELECT time, ceil(random() * 100)::int + FROM generate_series('2000-01-01 0:00:00+0'::timestamptz, + '2000-01-01 23:59:59+0','1m') time; + + CREATE MATERIALIZED VIEW cagg + WITH (timescaledb.continuous, timescaledb.materialized_only = false) AS + SELECT time_bucket('4 hour', time), avg(value) + FROM temperature + GROUP BY 1 ORDER BY 1 + WITH NO DATA; +} + +# Refresh CAGG in separate transaction +setup +{ + CALL refresh_continuous_aggregate('cagg', NULL, NULL); +} + +# Add new data to hypertable. This time in the year 2020 instead of 2000 as we +# did for the setup of the CAgg. +setup +{ + INSERT INTO temperature + SELECT time, ceil(random() * 100)::int + FROM generate_series('2020-01-01 0:00:00+0'::timestamptz, + '2020-01-01 23:59:59+0','1m') time; +} + +teardown { + DROP TABLE temperature CASCADE; +} + +session "S1" +step "s1_run_update" { + CALL refresh_continuous_aggregate('cagg', '2020-01-01 00:00:00', '2021-01-01 00:00:00'); +} + +step "s1_insert_more_data" +{ + INSERT INTO temperature VALUES('2020-01-02 23:59:59+0', 22); +} + +step "s1_select" { + EXPLAIN (COSTS OFF) SELECT * FROM cagg; +} + +session "S2" +step "s2_select" { + EXPLAIN (COSTS OFF) SELECT * FROM cagg; +} + +session "S3" +step "s3_lock_invalidation" { + SELECT debug_waitpoint_enable('cagg_watermark_update_internal_before_refresh'); +} + +step "s3_release_invalidation" { + SELECT debug_waitpoint_release('cagg_watermark_update_internal_before_refresh'); +} + +# Updated watermark (01-01-2020) should be seen by s2_select after s3_release_invalidation completes +permutation "s3_lock_invalidation" "s2_select" "s1_run_update" "s2_select" "s3_release_invalidation" "s2_select" "s1_select" + +# Updated watermark (02-01-2020) should be seen by s2_select after second s3_release_invalidation completes +permutation "s3_lock_invalidation" "s2_select" "s1_run_update" "s2_select" "s3_release_invalidation" "s3_lock_invalidation"("s1_run_update") "s1_insert_more_data" "s1_run_update" "s2_select" "s3_release_invalidation" "s2_select" diff --git a/tsl/test/sql/.gitignore b/tsl/test/sql/.gitignore index 2c2657e78fb..987d9c075eb 100644 --- a/tsl/test/sql/.gitignore +++ b/tsl/test/sql/.gitignore @@ -4,10 +4,10 @@ /cagg_ddl-*.sql /cagg_errors_deprecated-*.sql /cagg_permissions-*.sql -/cagg_query-*.sql /cagg_repair-*.sql /cagg_union_view-*.sql /cagg_usage-*.sql +/cagg_watermark-*.sql /compression_bgw-*.sql /compression_errors-*.sql /compression_sorted_merge-*.sql diff --git a/tsl/test/sql/CMakeLists.txt b/tsl/test/sql/CMakeLists.txt index aa7ca7f527e..92a76bd783d 100644 --- a/tsl/test/sql/CMakeLists.txt +++ b/tsl/test/sql/CMakeLists.txt @@ -10,9 +10,9 @@ set(TEST_FILES cagg_invalidation.sql cagg_permissions.sql cagg_policy.sql + cagg_query.sql cagg_refresh.sql cagg_utils.sql - cagg_watermark.sql compress_default.sql compressed_detoaster.sql compressed_collation.sql @@ -124,6 +124,7 @@ set(SOLO_TESTS set(TEST_TEMPLATES bgw_custom.sql.in + cagg_watermark.sql.in compression_bgw.sql.in compression_sorted_merge.sql.in cagg_union_view.sql.in @@ -144,7 +145,6 @@ if(CMAKE_BUILD_TYPE MATCHES Debug) TEST_TEMPLATES cagg_bgw.sql.in cagg_ddl.sql.in - cagg_query.sql.in cagg_repair.sql.in cagg_usage.sql.in compression_errors.sql.in diff --git a/tsl/test/sql/cagg_query.sql.in b/tsl/test/sql/cagg_query.sql similarity index 100% rename from tsl/test/sql/cagg_query.sql.in rename to tsl/test/sql/cagg_query.sql diff --git a/tsl/test/sql/cagg_watermark.sql b/tsl/test/sql/cagg_watermark.sql deleted file mode 100644 index 88125de72cd..00000000000 --- a/tsl/test/sql/cagg_watermark.sql +++ /dev/null @@ -1,201 +0,0 @@ --- This file and its contents are licensed under the Timescale License. --- Please see the included NOTICE for copyright information and --- LICENSE-TIMESCALE for a copy of the license. - -CREATE TABLE continuous_agg_test(time int, data int); -select create_hypertable('continuous_agg_test', 'time', chunk_time_interval=> 10); -CREATE OR REPLACE FUNCTION integer_now_test1() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM continuous_agg_test $$; -SELECT set_integer_now_func('continuous_agg_test', 'integer_now_test1'); - --- watermark tabels start out empty -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - --- inserting into a table that does not have continuous_agg_insert_trigger doesn't change the watermark -INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - -\c :TEST_DBNAME :ROLE_SUPERUSER -CREATE TABLE continuous_agg_test_mat(time int); -select create_hypertable('continuous_agg_test_mat', 'time', chunk_time_interval=> 10); -INSERT INTO _timescaledb_catalog.continuous_agg VALUES (2, 1, NULL, '','','','',0,'',''); -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER - --- create the trigger -CREATE TRIGGER continuous_agg_insert_trigger - AFTER INSERT ON continuous_agg_test - FOR EACH ROW EXECUTE FUNCTION _timescaledb_functions.continuous_agg_invalidation_trigger(1); - --- inserting into the table still doesn't change the watermark since there's no --- continuous_aggs_invalidation_threshold. We treat that case as a invalidation_watermark of --- BIG_INT_MIN, since the first run of the aggregation will need to scan the --- entire table anyway. -INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - --- set the continuous_aggs_invalidation_threshold to 15, any insertions below that value need an invalidation -\c :TEST_DBNAME :ROLE_SUPERUSER -INSERT INTO _timescaledb_catalog.continuous_aggs_invalidation_threshold VALUES (1, 15); -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER - -INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - --- INSERTs only above the continuous_aggs_invalidation_threshold won't change the continuous_aggs_hypertable_invalidation_log -INSERT INTO continuous_agg_test VALUES (21, 3), (22, 4); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - --- INSERTs only below the continuous_aggs_invalidation_threshold will change the continuous_aggs_hypertable_invalidation_log -INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - --- test INSERTing other values -INSERT INTO continuous_agg_test VALUES (1, 7), (12, 6), (24, 5), (51, 4); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - --- INSERT after dropping a COLUMN -ALTER TABLE continuous_agg_test DROP COLUMN data; - -INSERT INTO continuous_agg_test VALUES (-1), (-2), (-3), (-4); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - -INSERT INTO continuous_agg_test VALUES (100); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - --- INSERT after adding a COLUMN -ALTER TABLE continuous_agg_test ADD COLUMN d BOOLEAN; - -INSERT INTO continuous_agg_test VALUES (-6, true), (-7, false), (-3, true), (-4, false); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - -INSERT INTO continuous_agg_test VALUES (120, false), (200, true); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - -\c :TEST_DBNAME :ROLE_SUPERUSER -DELETE FROM _timescaledb_catalog.continuous_agg where mat_hypertable_id = 2; -DELETE FROM _timescaledb_config.bgw_job WHERE id = 2; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER - -DROP TABLE continuous_agg_test CASCADE; -\c :TEST_DBNAME :ROLE_SUPERUSER -TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; -TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER - --- CREATE VIEW creates the invalidation trigger correctly -CREATE TABLE ca_inval_test(time int); -SELECT create_hypertable('ca_inval_test', 'time', chunk_time_interval=> 10); -CREATE OR REPLACE FUNCTION integer_now_test2() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ca_inval_test $$; -SELECT set_integer_now_func('ca_inval_test', 'integer_now_test2'); - -CREATE MATERIALIZED VIEW cit_view - WITH (timescaledb.continuous, timescaledb.materialized_only=false) - AS SELECT time_bucket('5', time), COUNT(time) - FROM ca_inval_test - GROUP BY 1 WITH NO DATA; - -INSERT INTO ca_inval_test SELECT generate_series(0, 5); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - -\c :TEST_DBNAME :ROLE_SUPERUSER -UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold -SET watermark = 15 -WHERE hypertable_id = 3; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER - -INSERT INTO ca_inval_test SELECT generate_series(5, 15); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - -INSERT INTO ca_inval_test SELECT generate_series(16, 20); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - -\c :TEST_DBNAME :ROLE_SUPERUSER -TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER - --- updates below the threshold update both the old and new values -UPDATE ca_inval_test SET time = 5 WHERE time = 6; -UPDATE ca_inval_test SET time = 7 WHERE time = 5; -UPDATE ca_inval_test SET time = 17 WHERE time = 14; -UPDATE ca_inval_test SET time = 12 WHERE time = 16; - --- updates purely above the threshold are not logged -UPDATE ca_inval_test SET time = 19 WHERE time = 18; -UPDATE ca_inval_test SET time = 17 WHERE time = 19; - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - -DROP TABLE ca_inval_test CASCADE; -\c :TEST_DBNAME :ROLE_SUPERUSER -TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; -TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER - --- invalidation trigger is created correctly on chunks that existed before --- the view was created -CREATE TABLE ts_continuous_test(time INTEGER, location INTEGER); - SELECT create_hypertable('ts_continuous_test', 'time', chunk_time_interval => 10); -CREATE OR REPLACE FUNCTION integer_now_test3() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ts_continuous_test $$; -SELECT set_integer_now_func('ts_continuous_test', 'integer_now_test3'); -INSERT INTO ts_continuous_test SELECT i, i FROM - (SELECT generate_series(0, 29) AS i) AS i; -CREATE MATERIALIZED VIEW continuous_view - WITH (timescaledb.continuous, timescaledb.materialized_only=false) - AS SELECT time_bucket('5', time), COUNT(location) - FROM ts_continuous_test - GROUP BY 1 WITH NO DATA; - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - -\c :TEST_DBNAME :ROLE_SUPERUSER -UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold -SET watermark = 2 -WHERE hypertable_id = 5; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER - -INSERT INTO ts_continuous_test VALUES (1, 1); - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - --- aborts don't get written -BEGIN; - INSERT INTO ts_continuous_test VALUES (-20, -20); -ABORT; - -SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; -SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; - -DROP TABLE ts_continuous_test CASCADE; -\c :TEST_DBNAME :ROLE_SUPERUSER -TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; -TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; -\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER diff --git a/tsl/test/sql/cagg_watermark.sql.in b/tsl/test/sql/cagg_watermark.sql.in new file mode 100644 index 00000000000..d60a9fb2254 --- /dev/null +++ b/tsl/test/sql/cagg_watermark.sql.in @@ -0,0 +1,456 @@ +-- This file and its contents are licensed under the Timescale License. +-- Please see the included NOTICE for copyright information and +-- LICENSE-TIMESCALE for a copy of the license. + +\set EXPLAIN_ANALYZE 'EXPLAIN (analyze,costs off,timing off,summary off)' + +CREATE TABLE continuous_agg_test(time int, data int); +select create_hypertable('continuous_agg_test', 'time', chunk_time_interval=> 10); +CREATE OR REPLACE FUNCTION integer_now_test1() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM continuous_agg_test $$; +SELECT set_integer_now_func('continuous_agg_test', 'integer_now_test1'); + +-- watermark tabels start out empty +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +-- inserting into a table that does not have continuous_agg_insert_trigger doesn't change the watermark +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +\c :TEST_DBNAME :ROLE_SUPERUSER +CREATE TABLE continuous_agg_test_mat(time int); +select create_hypertable('continuous_agg_test_mat', 'time', chunk_time_interval=> 10); +INSERT INTO _timescaledb_catalog.continuous_agg VALUES (2, 1, NULL, '','','','',0,'',''); +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER + +-- create the trigger +CREATE TRIGGER continuous_agg_insert_trigger + AFTER INSERT ON continuous_agg_test + FOR EACH ROW EXECUTE FUNCTION _timescaledb_functions.continuous_agg_invalidation_trigger(1); + +-- inserting into the table still doesn't change the watermark since there's no +-- continuous_aggs_invalidation_threshold. We treat that case as a invalidation_watermark of +-- BIG_INT_MIN, since the first run of the aggregation will need to scan the +-- entire table anyway. +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +-- set the continuous_aggs_invalidation_threshold to 15, any insertions below that value need an invalidation +\c :TEST_DBNAME :ROLE_SUPERUSER +INSERT INTO _timescaledb_catalog.continuous_aggs_invalidation_threshold VALUES (1, 15); +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER + +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2), (21, 3), (22, 4); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +-- INSERTs only above the continuous_aggs_invalidation_threshold won't change the continuous_aggs_hypertable_invalidation_log +INSERT INTO continuous_agg_test VALUES (21, 3), (22, 4); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +-- INSERTs only below the continuous_aggs_invalidation_threshold will change the continuous_aggs_hypertable_invalidation_log +INSERT INTO continuous_agg_test VALUES (10, 1), (11, 2); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +-- test INSERTing other values +INSERT INTO continuous_agg_test VALUES (1, 7), (12, 6), (24, 5), (51, 4); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +-- INSERT after dropping a COLUMN +ALTER TABLE continuous_agg_test DROP COLUMN data; + +INSERT INTO continuous_agg_test VALUES (-1), (-2), (-3), (-4); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +INSERT INTO continuous_agg_test VALUES (100); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +-- INSERT after adding a COLUMN +ALTER TABLE continuous_agg_test ADD COLUMN d BOOLEAN; + +INSERT INTO continuous_agg_test VALUES (-6, true), (-7, false), (-3, true), (-4, false); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +INSERT INTO continuous_agg_test VALUES (120, false), (200, true); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +\c :TEST_DBNAME :ROLE_SUPERUSER +DELETE FROM _timescaledb_catalog.continuous_agg where mat_hypertable_id = 2; +DELETE FROM _timescaledb_config.bgw_job WHERE id = 2; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER + +DROP TABLE continuous_agg_test CASCADE; +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER + +-- CREATE VIEW creates the invalidation trigger correctly +CREATE TABLE ca_inval_test(time int); +SELECT create_hypertable('ca_inval_test', 'time', chunk_time_interval=> 10); +CREATE OR REPLACE FUNCTION integer_now_test2() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ca_inval_test $$; +SELECT set_integer_now_func('ca_inval_test', 'integer_now_test2'); + +CREATE MATERIALIZED VIEW cit_view + WITH (timescaledb.continuous, timescaledb.materialized_only=false) + AS SELECT time_bucket('5', time), COUNT(time) + FROM ca_inval_test + GROUP BY 1 WITH NO DATA; + +INSERT INTO ca_inval_test SELECT generate_series(0, 5); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +\c :TEST_DBNAME :ROLE_SUPERUSER +UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold +SET watermark = 15 +WHERE hypertable_id = 3; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER + +INSERT INTO ca_inval_test SELECT generate_series(5, 15); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +INSERT INTO ca_inval_test SELECT generate_series(16, 20); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER + +-- updates below the threshold update both the old and new values +UPDATE ca_inval_test SET time = 5 WHERE time = 6; +UPDATE ca_inval_test SET time = 7 WHERE time = 5; +UPDATE ca_inval_test SET time = 17 WHERE time = 14; +UPDATE ca_inval_test SET time = 12 WHERE time = 16; + +-- updates purely above the threshold are not logged +UPDATE ca_inval_test SET time = 19 WHERE time = 18; +UPDATE ca_inval_test SET time = 17 WHERE time = 19; + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +DROP TABLE ca_inval_test CASCADE; +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER + +-- invalidation trigger is created correctly on chunks that existed before +-- the view was created +CREATE TABLE ts_continuous_test(time INTEGER, location INTEGER); + SELECT create_hypertable('ts_continuous_test', 'time', chunk_time_interval => 10); +CREATE OR REPLACE FUNCTION integer_now_test3() returns int LANGUAGE SQL STABLE as $$ SELECT coalesce(max(time), 0) FROM ts_continuous_test $$; +SELECT set_integer_now_func('ts_continuous_test', 'integer_now_test3'); +INSERT INTO ts_continuous_test SELECT i, i FROM + (SELECT generate_series(0, 29) AS i) AS i; +CREATE MATERIALIZED VIEW continuous_view + WITH (timescaledb.continuous, timescaledb.materialized_only=false) + AS SELECT time_bucket('5', time), COUNT(location) + FROM ts_continuous_test + GROUP BY 1 WITH NO DATA; + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +\c :TEST_DBNAME :ROLE_SUPERUSER +UPDATE _timescaledb_catalog.continuous_aggs_invalidation_threshold +SET watermark = 2 +WHERE hypertable_id = 5; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER + +INSERT INTO ts_continuous_test VALUES (1, 1); + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +-- aborts don't get written +BEGIN; + INSERT INTO ts_continuous_test VALUES (-20, -20); +ABORT; + +SELECT * FROM _timescaledb_catalog.continuous_aggs_invalidation_threshold; +SELECT * from _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; + +DROP TABLE ts_continuous_test CASCADE; + +---- +-- Test watermark invalidation and chunk exclusion with prepared and ad-hoc queries +---- +CREATE TABLE chunks(time timestamptz, device int, value float); +SELECT FROM create_hypertable('chunks','time',chunk_time_interval:='1d'::interval); + +CREATE MATERIALIZED VIEW chunks_1h WITH (timescaledb.continuous) + AS SELECT time_bucket('1 hour', time) AS bucket, device, max(value) AS max FROM chunks GROUP BY 1, 2; + +ALTER MATERIALIZED VIEW chunks_1h set (timescaledb.materialized_only = false); + +-- Get id fg the materialization hypertable +SELECT id AS "MAT_HT_ID_1H" FROM _timescaledb_catalog.hypertable + WHERE table_name=( + SELECT materialization_hypertable_name + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1h' + ) \gset + + +SELECT materialization_hypertable_schema || '.' || materialization_hypertable_name AS "MAT_HT_NAME_1H" + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1h' +\gset + +-- Prepared scan on hypertable (identical to the query of a real-time CAgg) +PREPARE ht_scan_realtime_1h AS + SELECT bucket, device, max + FROM :MAT_HT_NAME_1H + WHERE bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) +UNION ALL + SELECT time_bucket('01:00:00'::interval, chunks."time") AS bucket, + chunks.device, + max(chunks.value) AS max + FROM chunks + WHERE chunks."time" >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) + GROUP BY (time_bucket('01:00:00'::interval, chunks."time")), chunks.device; + +PREPARE cagg_scan_1h AS SELECT * FROM chunks_1h; + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + +INSERT INTO chunks VALUES ('1901-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; +SELECT * FROM chunks_1h; + +-- Add new chunks to the non materialized part of the CAgg +INSERT INTO chunks VALUES ('1910-08-01 01:01:01+01', 1, 2); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + +INSERT INTO chunks VALUES ('1911-08-01 01:01:01+01', 1, 2); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + +-- Materialize CAgg and check for plan time chunk exclusion +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE cagg_scan_1h; +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; + +-- Check plan when chunk_append and constraint_aware_append cannot be used +-- There should be no plans for scans of chunks that are materialized in the CAgg +-- on the underlying hypertable +SET timescaledb.enable_chunk_append = OFF; +SET timescaledb.enable_constraint_aware_append = OFF; +:EXPLAIN_ANALYZE SELECT * FROM chunks_1h; +RESET timescaledb.enable_chunk_append; +RESET timescaledb.enable_constraint_aware_append; + +-- Insert new values and check watermark changes +INSERT INTO chunks VALUES ('1920-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; +SELECT * FROM chunks_1h; + +INSERT INTO chunks VALUES ('1930-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + +-- Two invalidations without prepared statement execution between +INSERT INTO chunks VALUES ('1931-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +INSERT INTO chunks VALUES ('1932-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + +-- Multiple prepared statement executions followed by one invalidation +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; +INSERT INTO chunks VALUES ('1940-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + +-- Compare prepared statement with ad-hoc query +EXECUTE cagg_scan_1h; +SELECT * FROM chunks_1h; + +-- Delete data from hypertable - data is only present in cagg after this point. If the watermark in the prepared +-- statement is not moved to the most-recent watermark, we would see an empty result. +TRUNCATE chunks; + +EXECUTE cagg_scan_1h; +SELECT * FROM chunks_1h; + +-- Refresh the CAgg +CALL refresh_continuous_aggregate('chunks_1h', NULL, NULL); +EXECUTE cagg_scan_1h; +SELECT * FROM chunks_1h; + +-- Check new watermark +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + +-- Update after truncate +INSERT INTO chunks VALUES ('1950-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; + +-- Test with CAgg on CAgg +CREATE MATERIALIZED VIEW chunks_1d WITH (timescaledb.continuous) + AS SELECT time_bucket('1 days', bucket) AS bucket, device, max(max) AS max FROM chunks_1h GROUP BY 1, 2; + +ALTER MATERIALIZED VIEW chunks_1d set (timescaledb.materialized_only = false); + +SELECT id AS "MAT_HT_ID_1D" FROM _timescaledb_catalog.hypertable + WHERE table_name=( + SELECT materialization_hypertable_name + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1d' + ) \gset + + +SELECT materialization_hypertable_schema || '.' || materialization_hypertable_name AS "MAT_HT_NAME_1D" + FROM timescaledb_information.continuous_aggregates + WHERE view_name='chunks_1d' +\gset + +-- Prepared scan on hypertable (identical to the query of a real-time CAgg) +PREPARE ht_scan_realtime_1d AS + SELECT bucket, device, max + FROM :MAT_HT_NAME_1D + WHERE bucket < COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1D)), '-infinity'::timestamp with time zone) +UNION ALL + SELECT time_bucket('@ 1 day'::interval, chunks_1h.bucket) AS bucket, + chunks_1h.device, + max(chunks_1h.max) AS max + FROM chunks_1h + WHERE chunks_1h.bucket >= COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1D)), '-infinity'::timestamp with time zone) + GROUP BY (time_bucket('@ 1 day'::interval, chunks_1h.bucket)), chunks_1h.device; + + +PREPARE cagg_scan_1d AS SELECT * FROM chunks_1d; + +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + +INSERT INTO chunks VALUES ('2000-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +CALL refresh_continuous_aggregate('chunks_1d', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + +INSERT INTO chunks VALUES ('2010-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +CALL refresh_continuous_aggregate('chunks_1d', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1d; + +-- Stored procedure - watermark +CREATE FUNCTION cur_watermark_plsql(mat_table int) RETURNS timestamptz +AS $$ +DECLARE +cur_watermark_value timestamptz; +BEGIN + SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(mat_table)) INTO cur_watermark_value; + RETURN cur_watermark_value; +END$$ LANGUAGE plpgsql; + +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + +INSERT INTO chunks VALUES ('2011-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + +INSERT INTO chunks VALUES ('2012-08-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_watermark_plsql(:MAT_HT_ID_1H); + +-- Stored procedure - result +CREATE FUNCTION cur_cagg_result_count() RETURNS int +AS $$ +DECLARE +count_value int; +BEGIN + SELECT count(*) FROM chunks_1h INTO count_value; + RETURN count_value; +END$$ LANGUAGE plpgsql; + +-- Cache function value +SELECT * FROM cur_cagg_result_count(); + +-- Add to non-materialized part +INSERT INTO chunks VALUES ('2013-08-01 01:01:01+01', 1, 2); +SELECT * FROM cur_cagg_result_count(); + +-- Materialize +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +SELECT * FROM cur_cagg_result_count(); + +-- Ensure all elements are materialized (i.e., watermark is moved properly) +TRUNCATE chunks; +SELECT * FROM cur_cagg_result_count(); +SELECT count(*) FROM chunks_1h; + +-- Test watermark call directly +PREPARE watermark_query AS + SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); + +SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +EXECUTE watermark_query; + +INSERT INTO chunks VALUES ('2013-09-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); + +SELECT _timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)); +EXECUTE watermark_query; + +-- Disable constification of watermark values +SET timescaledb.enable_cagg_watermark_constify = OFF; +INSERT INTO chunks VALUES ('2014-01-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE EXECUTE ht_scan_realtime_1h; +RESET timescaledb.enable_cagg_watermark_constify; + +-- Select with projection +INSERT INTO chunks VALUES ('2015-01-01 01:01:01+01', 1, 2); +CALL refresh_continuous_aggregate('chunks_1h', '1900-01-01', '2021-06-01'); +:EXPLAIN_ANALYZE SELECT device FROM chunks_1h; + +-- Watermark function use other tables in WHERE condition (should not be constified) +CREATE TABLE continuous_agg_test(time int, data int); +:EXPLAIN_ANALYZE SELECT * FROM continuous_agg_test AS t1, continuous_agg_test AS t2 WHERE COALESCE(_timescaledb_functions.to_timestamp(_timescaledb_functions.cagg_watermark(:MAT_HT_ID_1H)), '-infinity'::timestamp with time zone) IS NOT NULL; + +\c :TEST_DBNAME :ROLE_SUPERUSER +TRUNCATE _timescaledb_catalog.continuous_aggs_hypertable_invalidation_log; +TRUNCATE _timescaledb_catalog.continuous_aggs_invalidation_threshold; +\c :TEST_DBNAME :ROLE_DEFAULT_PERM_USER