Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix check when dropping compressed column (#7102) #7195

Merged
merged 1 commit into from
Aug 19, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .unreleased/pr_7195
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fixes: #7195 Fix segmentby/orderby check when dropping a column from a compressed hypertable
16 changes: 14 additions & 2 deletions tsl/src/compression/create.c
Original file line number Diff line number Diff line change
Expand Up @@ -1192,10 +1192,22 @@ tsl_process_compress_table_drop_column(Hypertable *ht, char *name)
errmsg("cannot drop orderby or segmentby column from a hypertable with "
"compression enabled")));

List *chunks = ts_chunk_get_by_hypertable_id(ht->fd.compressed_hypertable_id);
ListCell *lc;
foreach (lc, chunks)
{
Chunk *chunk = lfirst(lc);
CompressionSettings *settings = ts_compression_settings_get(chunk->table_id);
if (ts_array_is_member(settings->fd.segmentby, name) ||
ts_array_is_member(settings->fd.orderby, name))
ereport(ERROR,
(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
errmsg("cannot drop orderby or segmentby column from a chunk with "
"compression enabled")));
}

if (TS_HYPERTABLE_HAS_COMPRESSION_TABLE(ht))
{
List *chunks = ts_chunk_get_by_hypertable_id(ht->fd.compressed_hypertable_id);
ListCell *lc;
foreach (lc, chunks)
{
Chunk *chunk = lfirst(lc);
Expand Down
37 changes: 37 additions & 0 deletions tsl/test/expected/compression_ddl.out
Original file line number Diff line number Diff line change
Expand Up @@ -2511,3 +2511,40 @@ select compress_chunk(show_chunks('hyper_unique_deferred'));
begin; insert INTO hyper_unique_deferred values (1257987700000000000, 'dev1', 1); abort;
ERROR: duplicate key value violates unique constraint "146_2_hyper_unique_deferred_time_key"
\set ON_ERROR_STOP 1
-- tests chunks being compressed using different segmentby settings
-- github issue #7102
CREATE TABLE compression_drop(time timestamptz NOT NULL, v0 int, v1 int);
CREATE INDEX ON compression_drop(time);
CREATE INDEX ON compression_drop(v0,time);
SELECT create_hypertable('compression_drop','time',create_default_indexes:=false);
create_hypertable
--------------------------------
(42,public,compression_drop,t)
(1 row)

ALTER TABLE compression_drop SET (timescaledb.compress, timescaledb.compress_orderby='time DESC', timescaledb.compress_segmentby='v0');
-- insert data and compress chunk
INSERT INTO compression_drop(time, v0, v1)
SELECT time, v0, v0+1
FROM generate_series('2000-01-01 0:00:00+0'::timestamptz,'2000-01-03 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gv0(v0);
SELECT compress_chunk(ch, true) AS "CHUNK_NAME" FROM show_chunks('compression_drop') ch ORDER BY ch DESC \gset
-- change segmentby column
ALTER TABLE compression_drop SET (timescaledb.compress_segmentby='v1');
-- insert more data and compress next chunk
INSERT INTO compression_drop(time, v0, v1)
SELECT time, v0, v0+1
FROM generate_series('2000-01-07 0:00:00+0'::timestamptz,'2000-01-09 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gv0(v0);
SELECT compress_chunk(format('%I.%I',chunk_schema,chunk_name)) AS "CHUNK_NAME"
FROM timescaledb_information.chunks
WHERE hypertable_name = 'compression_drop' AND NOT is_compressed;
CHUNK_NAME
-------------------------------------------
_timescaledb_internal._hyper_42_151_chunk
(1 row)

-- try dropping column v0, should fail
\set ON_ERROR_STOP 0
ALTER TABLE compression_drop DROP COLUMN v0;
ERROR: cannot drop orderby or segmentby column from a chunk with compression enabled
\set ON_ERROR_STOP 1
DROP TABLE compression_drop;
33 changes: 33 additions & 0 deletions tsl/test/sql/compression_ddl.sql
Original file line number Diff line number Diff line change
Expand Up @@ -1003,3 +1003,36 @@ select compress_chunk(show_chunks('hyper_unique_deferred'));
\set ON_ERROR_STOP 0
begin; insert INTO hyper_unique_deferred values (1257987700000000000, 'dev1', 1); abort;
\set ON_ERROR_STOP 1
-- tests chunks being compressed using different segmentby settings
-- github issue #7102
CREATE TABLE compression_drop(time timestamptz NOT NULL, v0 int, v1 int);
CREATE INDEX ON compression_drop(time);
CREATE INDEX ON compression_drop(v0,time);
Comment on lines +1009 to +1010
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Why are you adding those indexes?

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I tried to follow a format similar to another test that also involves dropping a column - link

SELECT create_hypertable('compression_drop','time',create_default_indexes:=false);
ALTER TABLE compression_drop SET (timescaledb.compress, timescaledb.compress_orderby='time DESC', timescaledb.compress_segmentby='v0');

-- insert data and compress chunk
INSERT INTO compression_drop(time, v0, v1)
SELECT time, v0, v0+1
FROM generate_series('2000-01-01 0:00:00+0'::timestamptz,'2000-01-03 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gv0(v0);

SELECT compress_chunk(ch, true) AS "CHUNK_NAME" FROM show_chunks('compression_drop') ch ORDER BY ch DESC \gset

-- change segmentby column
ALTER TABLE compression_drop SET (timescaledb.compress_segmentby='v1');

-- insert more data and compress next chunk
INSERT INTO compression_drop(time, v0, v1)
SELECT time, v0, v0+1
FROM generate_series('2000-01-07 0:00:00+0'::timestamptz,'2000-01-09 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gv0(v0);

SELECT compress_chunk(format('%I.%I',chunk_schema,chunk_name)) AS "CHUNK_NAME"
FROM timescaledb_information.chunks
WHERE hypertable_name = 'compression_drop' AND NOT is_compressed;

-- try dropping column v0, should fail
\set ON_ERROR_STOP 0
ALTER TABLE compression_drop DROP COLUMN v0;
\set ON_ERROR_STOP 1

DROP TABLE compression_drop;
Loading