Skip to content

Commit

Permalink
reference REL_16_4-111-g925b3aa857 compression_sorted_merge-* transpa…
Browse files Browse the repository at this point in the history
…rent_decompress_chunk-*
  • Loading branch information
akuzm committed Jan 10, 2025
1 parent 4ef39ab commit 11a7e7e
Show file tree
Hide file tree
Showing 2 changed files with 42 additions and 22 deletions.
20 changes: 10 additions & 10 deletions tsl/test/expected/compression_sorted_merge-16.out
Original file line number Diff line number Diff line change
Expand Up @@ -825,18 +825,18 @@ SELECT time,x3 FROM test1 ORDER BY time DESC;
(4 rows)

-- Test with projection and constants
EXPLAIN (verbose) SELECT 1 as one, 2 as two, 3 as three, time, x2 FROM test1 ORDER BY time DESC;
EXPLAIN (verbose, costs off) SELECT 1 as one, 2 as two, 3 as three, time, x2 FROM test1 ORDER BY time DESC;
QUERY PLAN
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Result (cost=1.06..93.44 rows=3000 width=24)
Result
Output: 1, 2, 3, _hyper_1_1_chunk."time", _hyper_1_1_chunk.x2
-> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (cost=1.06..63.44 rows=3000 width=12)
-> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk
Output: _hyper_1_1_chunk."time", _hyper_1_1_chunk.x2
Batch Sorted Merge: true
-> Sort (cost=1.05..1.06 rows=3 width=56)
-> Sort
Output: compress_hyper_2_2_chunk._ts_meta_count, compress_hyper_2_2_chunk.x1, compress_hyper_2_2_chunk.x2, compress_hyper_2_2_chunk.x5, compress_hyper_2_2_chunk._ts_meta_min_1, compress_hyper_2_2_chunk._ts_meta_max_1, compress_hyper_2_2_chunk."time", compress_hyper_2_2_chunk._ts_meta_min_2, compress_hyper_2_2_chunk._ts_meta_max_2, compress_hyper_2_2_chunk.x3, compress_hyper_2_2_chunk._ts_meta_min_3, compress_hyper_2_2_chunk._ts_meta_max_3, compress_hyper_2_2_chunk.x4
Sort Key: compress_hyper_2_2_chunk._ts_meta_max_1 DESC
-> Seq Scan on _timescaledb_internal.compress_hyper_2_2_chunk (cost=0.00..1.03 rows=3 width=56)
-> Seq Scan on _timescaledb_internal.compress_hyper_2_2_chunk
Output: compress_hyper_2_2_chunk._ts_meta_count, compress_hyper_2_2_chunk.x1, compress_hyper_2_2_chunk.x2, compress_hyper_2_2_chunk.x5, compress_hyper_2_2_chunk._ts_meta_min_1, compress_hyper_2_2_chunk._ts_meta_max_1, compress_hyper_2_2_chunk."time", compress_hyper_2_2_chunk._ts_meta_min_2, compress_hyper_2_2_chunk._ts_meta_max_2, compress_hyper_2_2_chunk.x3, compress_hyper_2_2_chunk._ts_meta_min_3, compress_hyper_2_2_chunk._ts_meta_max_3, compress_hyper_2_2_chunk.x4
(10 rows)

Expand All @@ -850,18 +850,18 @@ SELECT 1 as one, 2 as two, 3 as three, time, x2 FROM test1 ORDER BY time DESC;
(4 rows)

-- Test with projection and constants
EXPLAIN (verbose) SELECT 1 as one, 2 as two, 3 as three, x2, time FROM test1 ORDER BY time DESC;
EXPLAIN (verbose, costs off) SELECT 1 as one, 2 as two, 3 as three, x2, time FROM test1 ORDER BY time DESC;
QUERY PLAN
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
Result (cost=1.06..93.44 rows=3000 width=24)
Result
Output: 1, 2, 3, _hyper_1_1_chunk.x2, _hyper_1_1_chunk."time"
-> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk (cost=1.06..63.44 rows=3000 width=12)
-> Custom Scan (DecompressChunk) on _timescaledb_internal._hyper_1_1_chunk
Output: _hyper_1_1_chunk.x2, _hyper_1_1_chunk."time"
Batch Sorted Merge: true
-> Sort (cost=1.05..1.06 rows=3 width=56)
-> Sort
Output: compress_hyper_2_2_chunk._ts_meta_count, compress_hyper_2_2_chunk.x1, compress_hyper_2_2_chunk.x2, compress_hyper_2_2_chunk.x5, compress_hyper_2_2_chunk._ts_meta_min_1, compress_hyper_2_2_chunk._ts_meta_max_1, compress_hyper_2_2_chunk."time", compress_hyper_2_2_chunk._ts_meta_min_2, compress_hyper_2_2_chunk._ts_meta_max_2, compress_hyper_2_2_chunk.x3, compress_hyper_2_2_chunk._ts_meta_min_3, compress_hyper_2_2_chunk._ts_meta_max_3, compress_hyper_2_2_chunk.x4
Sort Key: compress_hyper_2_2_chunk._ts_meta_max_1 DESC
-> Seq Scan on _timescaledb_internal.compress_hyper_2_2_chunk (cost=0.00..1.03 rows=3 width=56)
-> Seq Scan on _timescaledb_internal.compress_hyper_2_2_chunk
Output: compress_hyper_2_2_chunk._ts_meta_count, compress_hyper_2_2_chunk.x1, compress_hyper_2_2_chunk.x2, compress_hyper_2_2_chunk.x5, compress_hyper_2_2_chunk._ts_meta_min_1, compress_hyper_2_2_chunk._ts_meta_max_1, compress_hyper_2_2_chunk."time", compress_hyper_2_2_chunk._ts_meta_min_2, compress_hyper_2_2_chunk._ts_meta_max_2, compress_hyper_2_2_chunk.x3, compress_hyper_2_2_chunk._ts_meta_min_3, compress_hyper_2_2_chunk._ts_meta_max_3, compress_hyper_2_2_chunk.x4
(10 rows)

Expand Down
44 changes: 32 additions & 12 deletions tsl/test/shared/expected/transparent_decompress_chunk-16.out
Original file line number Diff line number Diff line change
Expand Up @@ -448,29 +448,49 @@ QUERY PLAN
-- test aggregate
:PREFIX SELECT count(*) FROM :TEST_TABLE;
QUERY PLAN
Aggregate (actual rows=1 loops=1)
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17990 loops=1)
-> Seq Scan on compress_hyper_X_X_chunk (actual rows=20 loops=1)
(3 rows)
Finalize Aggregate (actual rows=1 loops=1)
-> Gather (actual rows=1 loops=1)
Workers Planned: 2
Workers Launched: 2
-> Custom Scan (VectorAgg) (actual rows=0 loops=2)
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17990 loops=1)
-> Parallel Seq Scan on compress_hyper_X_X_chunk (actual rows=10 loops=2)
(7 rows)

-- test aggregate with GROUP BY
:PREFIX SELECT count(*) FROM :TEST_TABLE GROUP BY device_id ORDER BY device_id;
QUERY PLAN
GroupAggregate (actual rows=5 loops=1)
Finalize GroupAggregate (actual rows=5 loops=1)
Group Key: _hyper_X_X_chunk.device_id
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17990 loops=1)
-> Index Scan using compress_hyper_X_X_chunk_device_id__ts_meta_min_1__ts_meta_idx on compress_hyper_X_X_chunk (actual rows=20 loops=1)
(4 rows)
-> Gather Merge (actual rows=20 loops=1)
Workers Planned: 2
Workers Launched: 2
-> Sort (actual rows=10 loops=2)
Sort Key: _hyper_X_X_chunk.device_id
Worker 0: Sort Method: quicksort
Worker 1: Sort Method: quicksort
-> Custom Scan (VectorAgg) (actual rows=10 loops=2)
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17990 loops=1)
-> Parallel Seq Scan on compress_hyper_X_X_chunk (actual rows=10 loops=2)
(12 rows)

-- test window functions with GROUP BY
:PREFIX SELECT sum(count(*)) OVER () FROM :TEST_TABLE GROUP BY device_id ORDER BY device_id;
QUERY PLAN
WindowAgg (actual rows=5 loops=1)
-> GroupAggregate (actual rows=5 loops=1)
-> Finalize GroupAggregate (actual rows=5 loops=1)
Group Key: _hyper_X_X_chunk.device_id
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17990 loops=1)
-> Index Scan using compress_hyper_X_X_chunk_device_id__ts_meta_min_1__ts_meta_idx on compress_hyper_X_X_chunk (actual rows=20 loops=1)
(5 rows)
-> Gather Merge (actual rows=20 loops=1)
Workers Planned: 2
Workers Launched: 2
-> Sort (actual rows=10 loops=2)
Sort Key: _hyper_X_X_chunk.device_id
Worker 0: Sort Method: quicksort
Worker 1: Sort Method: quicksort
-> Custom Scan (VectorAgg) (actual rows=10 loops=2)
-> Custom Scan (DecompressChunk) on _hyper_X_X_chunk (actual rows=17990 loops=1)
-> Parallel Seq Scan on compress_hyper_X_X_chunk (actual rows=10 loops=2)
(13 rows)

-- test CTE
:PREFIX WITH q AS (
Expand Down

0 comments on commit 11a7e7e

Please sign in to comment.