guix-patches
[Top][All Lists]
Advanced

[Date Prev][Date Next][Thread Prev][Thread Next][Date Index][Thread Index]

[bug#59187] [PATCH] gnu: timescaledb: Update to 2.8.1.


From: jgart
Subject: [bug#59187] [PATCH] gnu: timescaledb: Update to 2.8.1.
Date: Thu, 10 Nov 2022 19:57:37 -0600

* gnu/packages/databases.scm (timescaledb): Update to 2.8.1.
[source]: Remove failing patch.
---
 gnu/packages/databases.scm                    |   5 +-
 .../patches/timescaledb-flaky-test.patch      | 107 ------------------
 2 files changed, 2 insertions(+), 110 deletions(-)
 delete mode 100644 gnu/packages/patches/timescaledb-flaky-test.patch

diff --git a/gnu/packages/databases.scm b/gnu/packages/databases.scm
index e2aabc1363..1117ef6e19 100644
--- a/gnu/packages/databases.scm
+++ b/gnu/packages/databases.scm
@@ -1333,7 +1333,7 @@ (define-public postgresql postgresql-14)
 (define-public timescaledb
   (package
     (name "timescaledb")
-    (version "2.7.0")
+    (version "2.8.1")
     (source (origin
               (method git-fetch)
               (uri (git-reference
@@ -1342,8 +1342,7 @@ (define-public timescaledb
               (file-name (git-file-name name version))
               (sha256
                (base32
-                "18wszj8ia5rs4y4zkyfb0f5z4y1g7ac3jym748nbkbszhxmq7nc7"))
-              (patches (search-patches "timescaledb-flaky-test.patch"))
+                "1gbadna0ilmqad7sbrixm12wd71h43njhsbp1kh5lispb6drdb6r"))
               (modules '((guix build utils)))
               (snippet
                ;; Remove files carrying the proprietary TIMESCALE license.
diff --git a/gnu/packages/patches/timescaledb-flaky-test.patch 
b/gnu/packages/patches/timescaledb-flaky-test.patch
deleted file mode 100644
index 6268bcecad..0000000000
--- a/gnu/packages/patches/timescaledb-flaky-test.patch
+++ /dev/null
@@ -1,107 +0,0 @@
-Use fixed dates in test for consistent results.
-
-Taken from upstream:
-
-  
https://github.com/timescale/timescaledb/commit/1d0670e703862b284c241ab797404f851b25b5df
-
-diff --git a/test/expected/copy-12.out b/test/expected/copy-12.out
-index 5cb28a45a2..37abf6f6ff 100644
---- a/test/expected/copy-12.out
-+++ b/test/expected/copy-12.out
-@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
--  INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- -- Migrate data to chunks by using copy
-@@ -345,7 +344,7 @@ NOTICE:  migrating data to chunks
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- ----------------------------------------------------------------
-diff --git a/test/expected/copy-13.out b/test/expected/copy-13.out
-index 02bf913eff..89e16fe8e2 100644
---- a/test/expected/copy-13.out
-+++ b/test/expected/copy-13.out
-@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
--  INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- -- Migrate data to chunks by using copy
-@@ -345,7 +344,7 @@ NOTICE:  migrating data to chunks
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- ----------------------------------------------------------------
-diff --git a/test/expected/copy-14.out b/test/expected/copy-14.out
-index 02bf913eff..89e16fe8e2 100644
---- a/test/expected/copy-14.out
-+++ b/test/expected/copy-14.out
-@@ -324,13 +324,12 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
--  INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- -- Migrate data to chunks by using copy
-@@ -345,7 +344,7 @@ NOTICE:  migrating data to chunks
- SELECT COUNT(*) FROM hyper_copy_large;
-  count 
- -------
--   697
-+   721
- (1 row)
- 
- ----------------------------------------------------------------
-diff --git a/test/sql/copy.sql.in b/test/sql/copy.sql.in
-index 91402c2ab8..bba4265064 100644
---- a/test/sql/copy.sql.in
-+++ b/test/sql/copy.sql.in
-@@ -276,8 +276,7 @@ INSERT INTO hyper_copy_large
- SELECT time,
- random() AS value
- FROM
--generate_series(now() - INTERVAL '1 months', now() - INTERVAL '1 day',
--  INTERVAL '1 hour') AS g1(time)
-+generate_series('2022-01-01', '2022-01-31', INTERVAL '1 hour') AS g1(time)
- ORDER BY time;
- 
- SELECT COUNT(*) FROM hyper_copy_large;
-- 
2.38.1






reply via email to

[Prev in Thread] Current Thread [Next in Thread]