Skip to content

Commit

Permalink
Move test for error reporting to compression_errors.sql
Browse files Browse the repository at this point in the history
compression_insert.sql has tests related to inserts
The test that is being moved verifies the error message.
  • Loading branch information
gayyappan committed Oct 20, 2021
1 parent acc6abe commit c2e4f31
Show file tree
Hide file tree
Showing 6 changed files with 34 additions and 86 deletions.
22 changes: 22 additions & 0 deletions tsl/test/expected/compression_errors.out
Original file line number Diff line number Diff line change
Expand Up @@ -495,3 +495,25 @@ SELECT config FROM _timescaledb_config.bgw_job WHERE id = :compressjob_id;
CALL run_job(:compressjob_id);
ERROR: job 1001 has null config
CONTEXT: PL/pgSQL function _timescaledb_internal.policy_compression(integer,jsonb) line 16 at RAISE
-- Create a hypertable and add a rogue inherited table to it.
CREATE TABLE i165 (time timestamptz PRIMARY KEY);
SELECT create_hypertable('i165','time');
create_hypertable
--------------------
(23,public,i165,t)
(1 row)

ALTER TABLE i165 SET (timescaledb.compress);
SELECT compress_chunk(show_chunks('i165'));
compress_chunk
----------------
(0 rows)

CREATE TABLE extras (more_magic bool) INHERITS (i165);
INSERT INTO i165 (time) VALUES
(generate_series(TIMESTAMP '2019-08-01', TIMESTAMP '2019-08-10', INTERVAL '10 minutes'));
\set VERBOSITY default
SELECT * FROM i165;
ERROR: chunk not found
DETAIL: schema_name: public, table_name: extras
\set VERBOSITY terse
24 changes: 0 additions & 24 deletions tsl/test/expected/compression_insert-12.out
Original file line number Diff line number Diff line change
Expand Up @@ -856,27 +856,3 @@ NOTICE: chunk "_hyper_13_20_chunk" is already compressed
-> Seq Scan on compress_hyper_14_23_chunk
(10 rows)

-- Create a hypertable and add a rogue inherited table to it.
CREATE TABLE i165 (time timestamptz PRIMARY KEY);
SELECT create_hypertable('i165','time');
create_hypertable
--------------------
(15,public,i165,t)
(1 row)

ALTER TABLE i165 SET (timescaledb.compress);
SELECT compress_chunk(show_chunks('i165'));
compress_chunk
----------------
(0 rows)

CREATE TABLE extras (more_magic bool) INHERITS (i165);
INSERT INTO i165 (time) VALUES
(generate_series(TIMESTAMP '2019-08-01', TIMESTAMP '2019-08-10', INTERVAL '10 minutes'));
\set ON_ERROR_STOP 0
\set VERBOSITY default
SELECT * FROM i165;
ERROR: chunk not found
DETAIL: schema_name: public, table_name: extras
\set VERBOSITY terse
\set ON_ERROR_STOP 1
24 changes: 0 additions & 24 deletions tsl/test/expected/compression_insert-13.out
Original file line number Diff line number Diff line change
Expand Up @@ -856,27 +856,3 @@ NOTICE: chunk "_hyper_13_20_chunk" is already compressed
-> Seq Scan on compress_hyper_14_23_chunk
(10 rows)

-- Create a hypertable and add a rogue inherited table to it.
CREATE TABLE i165 (time timestamptz PRIMARY KEY);
SELECT create_hypertable('i165','time');
create_hypertable
--------------------
(15,public,i165,t)
(1 row)

ALTER TABLE i165 SET (timescaledb.compress);
SELECT compress_chunk(show_chunks('i165'));
compress_chunk
----------------
(0 rows)

CREATE TABLE extras (more_magic bool) INHERITS (i165);
INSERT INTO i165 (time) VALUES
(generate_series(TIMESTAMP '2019-08-01', TIMESTAMP '2019-08-10', INTERVAL '10 minutes'));
\set ON_ERROR_STOP 0
\set VERBOSITY default
SELECT * FROM i165;
ERROR: chunk not found
DETAIL: schema_name: public, table_name: extras
\set VERBOSITY terse
\set ON_ERROR_STOP 1
24 changes: 0 additions & 24 deletions tsl/test/expected/compression_insert-14.out
Original file line number Diff line number Diff line change
Expand Up @@ -856,27 +856,3 @@ NOTICE: chunk "_hyper_13_20_chunk" is already compressed
-> Seq Scan on compress_hyper_14_23_chunk
(10 rows)

-- Create a hypertable and add a rogue inherited table to it.
CREATE TABLE i165 (time timestamptz PRIMARY KEY);
SELECT create_hypertable('i165','time');
create_hypertable
--------------------
(15,public,i165,t)
(1 row)

ALTER TABLE i165 SET (timescaledb.compress);
SELECT compress_chunk(show_chunks('i165'));
compress_chunk
----------------
(0 rows)

CREATE TABLE extras (more_magic bool) INHERITS (i165);
INSERT INTO i165 (time) VALUES
(generate_series(TIMESTAMP '2019-08-01', TIMESTAMP '2019-08-10', INTERVAL '10 minutes'));
\set ON_ERROR_STOP 0
\set VERBOSITY default
SELECT * FROM i165;
ERROR: chunk not found
DETAIL: schema_name: public, table_name: extras
\set VERBOSITY terse
\set ON_ERROR_STOP 1
12 changes: 12 additions & 0 deletions tsl/test/sql/compression_errors.sql
Original file line number Diff line number Diff line change
Expand Up @@ -283,3 +283,15 @@ SELECT config FROM _timescaledb_config.bgw_job WHERE id = :compressjob_id;

--should fail
CALL run_job(:compressjob_id);

-- Create a hypertable and add a rogue inherited table to it.
CREATE TABLE i165 (time timestamptz PRIMARY KEY);
SELECT create_hypertable('i165','time');
ALTER TABLE i165 SET (timescaledb.compress);
SELECT compress_chunk(show_chunks('i165'));
CREATE TABLE extras (more_magic bool) INHERITS (i165);
INSERT INTO i165 (time) VALUES
(generate_series(TIMESTAMP '2019-08-01', TIMESTAMP '2019-08-10', INTERVAL '10 minutes'));
\set VERBOSITY default
SELECT * FROM i165;
\set VERBOSITY terse
14 changes: 0 additions & 14 deletions tsl/test/sql/compression_insert.sql.in
Original file line number Diff line number Diff line change
Expand Up @@ -550,17 +550,3 @@ SELECT compress_chunk(format('%I.%I',chunk_schema,chunk_name), true) FROM timesc

-- should be ordered append
:PREFIX SELECT * FROM test_ordering ORDER BY 1;

-- Create a hypertable and add a rogue inherited table to it.
CREATE TABLE i165 (time timestamptz PRIMARY KEY);
SELECT create_hypertable('i165','time');
ALTER TABLE i165 SET (timescaledb.compress);
SELECT compress_chunk(show_chunks('i165'));
CREATE TABLE extras (more_magic bool) INHERITS (i165);
INSERT INTO i165 (time) VALUES
(generate_series(TIMESTAMP '2019-08-01', TIMESTAMP '2019-08-10', INTERVAL '10 minutes'));
\set ON_ERROR_STOP 0
\set VERBOSITY default
SELECT * FROM i165;
\set VERBOSITY terse
\set ON_ERROR_STOP 1

0 comments on commit c2e4f31

Please sign in to comment.