diff options
author | Marius Bakke <marius@gnu.org> | 2022-06-14 17:58:08 +0200 |
---|---|---|
committer | Marius Bakke <marius@gnu.org> | 2022-06-14 23:33:52 +0200 |
commit | d73b88d82650f8e38327dcd1c7a0c9da4fc96e0f (patch) | |
tree | aef6cad0df887a7090e82f56fd73f2dfb63031aa /gnu/packages/patches | |
parent | b98a61a8f87c6b0cdca8816157b58c7d58be90ee (diff) | |
download | guix-d73b88d82650f8e38327dcd1c7a0c9da4fc96e0f.tar.gz guix-d73b88d82650f8e38327dcd1c7a0c9da4fc96e0f.zip |
gnu: TimescaleDB: Update to 2.7.0.
* gnu/packages/databases.scm (timescaledb): Update to 2.7.0.
[source](snippet): Remove more files.
[source](patches): New field.
* gnu/packages/patches/timescaledb-flaky-test.patch: New file.
* gnu/local.mk (dist_patch_DATA): Adjust accordingly.
Diffstat (limited to 'gnu/packages/patches')
-rw-r--r-- | gnu/packages/patches/timescaledb-flaky-test.patch | 107 |
1 files changed, 107 insertions, 0 deletions
diff --git a/gnu/packages/patches/timescaledb-flaky-test.patch b/gnu/packages/patches/timescaledb-flaky-test.patch new file mode 100644 index 0000000000..6268bcecad --- /dev/null +++ b/gnu/packages/patches/timescaledb-flaky-test.patch @@ -0,0 +1,107 @@ +Use fixed dates in test for consistent results. + +Taken from upstream: + + https://github.com/timescale/timescaledb/commit/1d0670e703862b284c241ab797404f851b25b5df + +diff --git a/test/expected/copy-12.out b/test/expected/copy-12.out +index 5cb28a45a2..37abf6f6ff 100644 +--- a/test/expected/copy-12.out ++++ b/test/expected/copy-12.out +@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large + SELECT time, + random() AS value + FROM +-generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day', +- INTERVAL '1 hour') AS g1(time) ++generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time) + ORDER BY time; + SELECT COUNT(*) FROM hyper_copy_large; + count + ------- +- 697 ++ 721 + (1 row) + + -- Migrate data to chunks by using copy +@@ -345,7 +344,7 @@ NOTICE: migrating data to chunks + SELECT COUNT(*) FROM hyper_copy_large; + count + ------- +- 697 ++ 721 + (1 row) + + ---------------------------------------------------------------- +diff --git a/test/expected/copy-13.out b/test/expected/copy-13.out +index 02bf913eff..89e16fe8e2 100644 +--- a/test/expected/copy-13.out ++++ b/test/expected/copy-13.out +@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large + SELECT time, + random() AS value + FROM +-generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day', +- INTERVAL '1 hour') AS g1(time) ++generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time) + ORDER BY time; + SELECT COUNT(*) FROM hyper_copy_large; + count + ------- +- 697 ++ 721 + (1 row) + + -- Migrate data to chunks by using copy +@@ -345,7 +344,7 @@ NOTICE: migrating data to chunks + SELECT COUNT(*) FROM hyper_copy_large; + count + ------- +- 697 ++ 721 + (1 row) + + ---------------------------------------------------------------- +diff --git a/test/expected/copy-14.out b/test/expected/copy-14.out +index 02bf913eff..89e16fe8e2 100644 +--- a/test/expected/copy-14.out ++++ b/test/expected/copy-14.out +@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large + SELECT time, + random() AS value + FROM +-generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day', +- INTERVAL '1 hour') AS g1(time) ++generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time) + ORDER BY time; + SELECT COUNT(*) FROM hyper_copy_large; + count + ------- +- 697 ++ 721 + (1 row) + + -- Migrate data to chunks by using copy +@@ -345,7 +344,7 @@ NOTICE: migrating data to chunks + SELECT COUNT(*) FROM hyper_copy_large; + count + ------- +- 697 ++ 721 + (1 row) + + ---------------------------------------------------------------- +diff --git a/test/sql/copy.sql.in b/test/sql/copy.sql.in +index 91402c2ab8..bba4265064 100644 +--- a/test/sql/copy.sql.in ++++ b/test/sql/copy.sql.in +@@ -276,8 +276,7 @@ INSERT INTO hyper_copy_large + SELECT time, + random() AS value + FROM +-generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day', +- INTERVAL '1 hour') AS g1(time) ++generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time) + ORDER BY time; + + SELECT COUNT(*) FROM hyper_copy_large; |