Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Remove reindex_relation from recompression #6529

Merged
merged 1 commit into from
Jan 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
1 change: 1 addition & 0 deletions .unreleased/pr_6529
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Implements: #6529 Remove reindex_relation from recompression
8 changes: 0 additions & 8 deletions tsl/src/compression/api.c
Original file line number Diff line number Diff line change
Expand Up @@ -1388,14 +1388,6 @@ tsl_recompress_chunk_segmentwise(PG_FUNCTION_ARGS)
index_close(index_rel, AccessExclusiveLock);
row_decompressor_close(&decompressor);

#if PG14_LT
int options = 0;
#else
ReindexParams params = { 0 };
ReindexParams *options = &params;
#endif
reindex_relation(compressed_chunk->table_id, 0, options);

/* changed chunk status, so invalidate any plans involving this chunk */
CacheInvalidateRelcacheByRelid(uncompressed_chunk_id);
table_close(uncompressed_chunk_rel, ExclusiveLock);
Expand Down
28 changes: 26 additions & 2 deletions tsl/test/expected/recompress_chunk_segmentwise.out
Original file line number Diff line number Diff line change
Expand Up @@ -394,6 +394,30 @@ select :'compressed_chunk_name_before_recompression' as before_segmentwise_recom
compress_hyper_12_13_chunk | compress_hyper_12_13_chunk
(1 row)

INSERT INTO mytab
SELECT t, a, 3, 2
FROM generate_series('2023-01-01'::timestamptz, '2023-01-02'::timestamptz, '1 hour'::interval) t
CROSS JOIN generate_series(1, 10, 1) a;
-- recompress will insert newly inserted tuples into compressed chunk along with inserting into the compressed chunk index
CALL recompress_chunk(:'chunk_to_compress_mytab');
-- make sure we are hitting the index and that the index contains the tuples
SET enable_seqscan TO off;
EXPLAIN (COSTS OFF) SELECT count(*) FROM mytab where a = 2;
QUERY PLAN
----------------------------------------------------------------------------------------------------------------------------
Aggregate
-> Custom Scan (DecompressChunk) on _hyper_11_12_chunk
-> Index Scan using compress_hyper_12_13_chunk__compressed_hypertable_12_a_c__ts_me on compress_hyper_12_13_chunk
Index Cond: (a = 2)
(4 rows)

SELECT count(*) FROM mytab where a = 2;
count
-------
28
(1 row)

RESET enable_seqscan;
SELECT decompress_chunk(show_chunks('mytab'));
decompress_chunk
------------------------------------------
Expand Down Expand Up @@ -449,7 +473,7 @@ select * from :compressed_chunk_name;
BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA | | BAAAAAAAAAAABAAAAAAAAAAEAAAAAQAAAAEAAAAAAAAABAAAAAAAAAAI | 1 | 10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
(3 rows)

-- insert again, check both reindex works and NULL values properly handled
-- insert again, check both index insertion works and NULL values properly handled
insert into nullseg_one values (:'start_time', NULL, 4);
call recompress_chunk(:'chunk_to_compress');
select * from :compressed_chunk_name;
Expand Down Expand Up @@ -492,7 +516,7 @@ select * from :compressed_chunk_name;
BAAAAneAR/JEAAACd4BH8kQAAAAAAQAAAAEAAAAAAAAADgAE7wCP5IgA | 1 | BAAAAAAAAAAABAAAAAAAAAAEAAAAAQAAAAEAAAAAAAAABAAAAAAAAAAI | | 1 | 10 | Sat Jan 01 01:00:00 2022 PST | Sat Jan 01 01:00:00 2022 PST
(5 rows)

-- insert again, check both reindex works and NULL values properly handled
-- insert again, check both index insertion works and NULL values properly handled
-- should match existing segment (1, NULL)
insert into nullseg_many values (:'start_time', 1, NULL, NULL);
call recompress_chunk(:'chunk_to_compress');
Expand Down
16 changes: 14 additions & 2 deletions tsl/test/sql/recompress_chunk_segmentwise.sql
Original file line number Diff line number Diff line change
Expand Up @@ -211,6 +211,18 @@ call recompress_chunk(:'chunk_to_compress_mytab');
select compressed_chunk_name as compressed_chunk_name_after_recompression from compressed_chunk_info_view where hypertable_name = 'mytab' \gset
select :'compressed_chunk_name_before_recompression' as before_segmentwise_recompression, :'compressed_chunk_name_after_recompression' as after_segmentwise_recompression;

INSERT INTO mytab
SELECT t, a, 3, 2
FROM generate_series('2023-01-01'::timestamptz, '2023-01-02'::timestamptz, '1 hour'::interval) t
CROSS JOIN generate_series(1, 10, 1) a;
-- recompress will insert newly inserted tuples into compressed chunk along with inserting into the compressed chunk index
CALL recompress_chunk(:'chunk_to_compress_mytab');
-- make sure we are hitting the index and that the index contains the tuples
SET enable_seqscan TO off;
EXPLAIN (COSTS OFF) SELECT count(*) FROM mytab where a = 2;
SELECT count(*) FROM mytab where a = 2;
RESET enable_seqscan;

SELECT decompress_chunk(show_chunks('mytab'));
alter table mytab set (timescaledb.compress = false);
alter table mytab set (timescaledb.compress);
Expand Down Expand Up @@ -241,7 +253,7 @@ select compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chu
call recompress_chunk(:'chunk_to_compress');

select * from :compressed_chunk_name;
-- insert again, check both reindex works and NULL values properly handled
-- insert again, check both index insertion works and NULL values properly handled
insert into nullseg_one values (:'start_time', NULL, 4);
call recompress_chunk(:'chunk_to_compress');
select * from :compressed_chunk_name;
Expand All @@ -264,7 +276,7 @@ select compressed_chunk_schema || '.' || compressed_chunk_name as compressed_chu
call recompress_chunk(:'chunk_to_compress');

select * from :compressed_chunk_name;
-- insert again, check both reindex works and NULL values properly handled
-- insert again, check both index insertion works and NULL values properly handled
-- should match existing segment (1, NULL)
insert into nullseg_many values (:'start_time', 1, NULL, NULL);
call recompress_chunk(:'chunk_to_compress');
Expand Down