Skip to content

Commit

Permalink
Fix check when dropping compressed column (#7102)
Browse files Browse the repository at this point in the history
Fixes segmentby/orderby check when dropping a column from a compressed
hypertable.
  • Loading branch information
kpan2034 committed Aug 13, 2024
1 parent a60d305 commit 0027d7c
Show file tree
Hide file tree
Showing 4 changed files with 85 additions and 2 deletions.
1 change: 1 addition & 0 deletions .unreleased/pr_7195
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Fixes: #7102 Fix segmentby/orderby check when dropping a column from a compressed hypertable
16 changes: 14 additions & 2 deletions tsl/src/compression/create.c
Original file line number Diff line number Diff line change
Expand Up @@ -1192,10 +1192,22 @@ tsl_process_compress_table_drop_column(Hypertable *ht, char *name)
errmsg("cannot drop orderby or segmentby column from a hypertable with "
"compression enabled")));

List *chunks = ts_chunk_get_by_hypertable_id(ht->fd.compressed_hypertable_id);
ListCell *lc;
foreach (lc, chunks)
{
Chunk *chunk = lfirst(lc);
CompressionSettings *settings = ts_compression_settings_get(chunk->table_id);
if (ts_array_is_member(settings->fd.segmentby, name) ||
ts_array_is_member(settings->fd.orderby, name))
ereport(ERROR,
(errcode(ERRCODE_FEATURE_NOT_SUPPORTED),
errmsg("cannot drop orderby or segmentby column from a chunk with "
"compression enabled")));
}

if (TS_HYPERTABLE_HAS_COMPRESSION_TABLE(ht))
{
List *chunks = ts_chunk_get_by_hypertable_id(ht->fd.compressed_hypertable_id);
ListCell *lc;
foreach (lc, chunks)
{
Chunk *chunk = lfirst(lc);
Expand Down
37 changes: 37 additions & 0 deletions tsl/test/expected/compression_ddl.out
Original file line number Diff line number Diff line change
Expand Up @@ -2511,3 +2511,40 @@ select compress_chunk(show_chunks('hyper_unique_deferred'));
begin; insert INTO hyper_unique_deferred values (1257987700000000000, 'dev1', 1); abort;
ERROR: duplicate key value violates unique constraint "146_2_hyper_unique_deferred_time_key"
\set ON_ERROR_STOP 1
-- tests chunks being compressed using different segmentby settings
-- github issue #7102
CREATE TABLE compression_drop(time timestamptz NOT NULL, v0 int, v1 int);
CREATE INDEX ON compression_drop(time);
CREATE INDEX ON compression_drop(v0,time);
SELECT create_hypertable('compression_drop','time',create_default_indexes:=false);
create_hypertable
--------------------------------
(42,public,compression_drop,t)
(1 row)

ALTER TABLE compression_drop SET (timescaledb.compress, timescaledb.compress_orderby='time DESC', timescaledb.compress_segmentby='v0');
-- insert data and compress chunk
INSERT INTO compression_drop(time, v0, v1)
SELECT time, v0, v0+1
FROM generate_series('2000-01-01 0:00:00+0'::timestamptz,'2000-01-03 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gv0(v0);
SELECT compress_chunk(ch, true) AS "CHUNK_NAME" FROM show_chunks('compression_drop') ch ORDER BY ch DESC \gset
-- change segmentby column
ALTER TABLE compression_drop SET (timescaledb.compress_segmentby='v1');
-- insert more data and compress next chunk
INSERT INTO compression_drop(time, v0, v1)
SELECT time, v0, v0+1
FROM generate_series('2000-01-07 0:00:00+0'::timestamptz,'2000-01-09 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gv0(v0);
SELECT compress_chunk(format('%I.%I',chunk_schema,chunk_name)) AS "CHUNK_NAME"
FROM timescaledb_information.chunks
WHERE hypertable_name = 'compression_drop' AND NOT is_compressed;
CHUNK_NAME
-------------------------------------------
_timescaledb_internal._hyper_42_151_chunk
(1 row)

-- try dropping column v0, should fail
\set ON_ERROR_STOP 0
ALTER TABLE compression_drop DROP COLUMN v0;
ERROR: cannot drop orderby or segmentby column from a chunk with compression enabled
\set ON_ERROR_STOP 1
DROP TABLE compression_drop;
33 changes: 33 additions & 0 deletions tsl/test/sql/compression_ddl.sql
Original file line number Diff line number Diff line change
Expand Up @@ -1003,3 +1003,36 @@ select compress_chunk(show_chunks('hyper_unique_deferred'));
\set ON_ERROR_STOP 0
begin; insert INTO hyper_unique_deferred values (1257987700000000000, 'dev1', 1); abort;
\set ON_ERROR_STOP 1
-- tests chunks being compressed using different segmentby settings
-- github issue #7102
CREATE TABLE compression_drop(time timestamptz NOT NULL, v0 int, v1 int);
CREATE INDEX ON compression_drop(time);
CREATE INDEX ON compression_drop(v0,time);
SELECT create_hypertable('compression_drop','time',create_default_indexes:=false);
ALTER TABLE compression_drop SET (timescaledb.compress, timescaledb.compress_orderby='time DESC', timescaledb.compress_segmentby='v0');

-- insert data and compress chunk
INSERT INTO compression_drop(time, v0, v1)
SELECT time, v0, v0+1
FROM generate_series('2000-01-01 0:00:00+0'::timestamptz,'2000-01-03 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gv0(v0);

SELECT compress_chunk(ch, true) AS "CHUNK_NAME" FROM show_chunks('compression_drop') ch ORDER BY ch DESC \gset

-- change segmentby column
ALTER TABLE compression_drop SET (timescaledb.compress_segmentby='v1');

-- insert more data and compress next chunk
INSERT INTO compression_drop(time, v0, v1)
SELECT time, v0, v0+1
FROM generate_series('2000-01-07 0:00:00+0'::timestamptz,'2000-01-09 23:55:00+0','2m') gtime(time), generate_series(1,5,1) gv0(v0);

SELECT compress_chunk(format('%I.%I',chunk_schema,chunk_name)) AS "CHUNK_NAME"
FROM timescaledb_information.chunks
WHERE hypertable_name = 'compression_drop' AND NOT is_compressed;

-- try dropping column v0, should fail
\set ON_ERROR_STOP 0
ALTER TABLE compression_drop DROP COLUMN v0;
\set ON_ERROR_STOP 1

DROP TABLE compression_drop;

0 comments on commit 0027d7c

Please sign in to comment.